1 (* Copyright (C) 2005, HELM Team.
3 * This file is part of HELM, an Hypertextual, Electronic
4 * Library of Mathematics, developed at the Computer Science
5 * Department, University of Bologna, Italy.
7 * HELM is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License
9 * as published by the Free Software Foundation; either version 2
10 * of the License, or (at your option) any later version.
12 * HELM is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License
18 * along with HELM; if not, write to the Free Software
19 * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
22 * For details, see the HELM World-Wide-Web page,
23 * http://helm.cs.unibo.it/
30 exception Error of int * int * string
32 let regexp number = xml_digit+
33 let regexp utf8_blank = " " | "\r\n" | "\n" | "\t" | [160] (* this is a nbsp *)
34 let regexp percentage =
35 ('-' | "") [ '0' - '9' ] + '%'
36 let regexp floatwithunit =
37 ('-' | "") [ '0' - '9' ] + ["."] [ '0' - '9' ] + ([ 'a' - 'z' ] + | "" )
38 let regexp color = "#" [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f'
39 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ]
40 [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ]
42 (* ZACK: breaks unicode's binder followed by an ascii letter without blank *)
43 (* let regexp ident_letter = xml_letter *)
45 let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ]
47 (* must be in sync with "is_ligature_char" below *)
48 let regexp ligature_char = [ "'`~!?@*()[]<>-+=|:;.,/\"" ]
49 let regexp ligature = ligature_char ligature_char+
51 let regexp we_proved = "we" utf8_blank+ "proved"
52 let regexp we_have = "we" utf8_blank+ "have"
53 let regexp let_rec = "let" utf8_blank+ "rec"
54 let regexp let_corec = "let" utf8_blank+ "corec"
55 let regexp nlet_rec = "nlet" utf8_blank+ "rec"
56 let regexp nlet_corec = "nlet" utf8_blank+ "corec"
57 let regexp ident_decoration = '\'' | '?' | '`'
58 let regexp ident_cont = ident_letter | xml_digit | '_'
59 let regexp ident_start = ident_letter
60 let regexp ident = ident_letter ident_cont* ident_decoration*
61 let regexp variable_ident = '_' '_' number
62 let regexp pident = '_' ident
64 let regexp tex_token = '\\' ident
66 let regexp delim_begin = "\\["
67 let regexp delim_end = "\\]"
69 let regexp qkeyword = "'" ( ident | pident ) "'"
71 let regexp implicit = '?'
72 let regexp implicit_vector = "..."
73 let regexp placeholder = '%'
74 let regexp meta = implicit number
76 let regexp csymbol = '\'' ident
78 let regexp begin_group = "@{" | "${"
79 let regexp end_group = '}'
80 let regexp wildcard = "$_"
81 let regexp ast_ident = "@" ident
82 let regexp ast_csymbol = "@" csymbol
83 let regexp meta_ident = "$" ident
84 let regexp meta_anonymous = "$_"
85 let regexp qstring = '"' [^ '"']* '"'
87 let regexp begincomment = "(**" utf8_blank
88 let regexp beginnote = "(*"
89 let regexp endcomment = "*)"
90 (* let regexp comment_char = [^'*'] | '*'[^')']
91 let regexp note = "|+" ([^'*'] | "**") comment_char* "+|" *)
96 "over"; "atop"; "frac";
97 "sqrt"; "root"; "mstyle" ; "mpadded"; "maction"
101 let level1_keywords =
102 [ "hbox"; "hvbox"; "hovbox"; "vbox";
104 "list0"; "list1"; "sep";
106 "term"; "ident"; "number";
109 let level2_meta_keywords =
110 [ "if"; "then"; "elCicNotationParser.se";
111 "fold"; "left"; "right"; "rec";
114 "anonymous"; "ident"; "number"; "term"; "fresh"
117 (* (string, unit) Hashtbl.t, to exploit multiple bindings *)
118 let initial_level2_ast_keywords () = Hashtbl.create 23;;
120 let level2_ast_keywords = ref (initial_level2_ast_keywords ())
122 let initialize_keywords () =
123 List.iter (fun k -> Hashtbl.add !level2_ast_keywords k ())
124 [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match";
125 "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ]
128 let _ = initialize_keywords ();;
130 let add_level2_ast_keyword k = Hashtbl.add !level2_ast_keywords k ()
131 let remove_level2_ast_keyword k = Hashtbl.remove !level2_ast_keywords k
133 (* (string, int) Hashtbl.t, with multiple bindings.
134 * int is the unicode codepoint *)
135 let ligatures = Hashtbl.create 23
139 (fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol)
140 [ ("->", <:unicode<to>>); ("=>", <:unicode<Rightarrow>>);
141 (":=", <:unicode<def>>);
144 let regexp uri_step = [ 'a' - 'z' 'A' - 'Z' '0' - '9' '_' '-' ''' ]+
147 ("cic:/" | "theory:/") (* schema *)
148 (* ident ('/' ident)* |+ path +| *)
149 uri_step ('/' uri_step)* (* path *)
150 ('.' ident)+ (* ext *)
151 ("#xpointer(" number ('/' number)+ ")")? (* xpointer *)
153 let regexp nreference =
155 uri_step ('/' uri_step)* (* path *)
158 | "def" "(" number ")"
159 | "fix" "(" number "," number "," number ")"
160 | "cfx" "(" number ")"
161 | "ind" "(" number "," number "," number ")"
162 | "con" "(" number "," number "," number ")") (* ext + reference *)
164 let error lexbuf msg =
165 let begin_cnum, end_cnum = Ulexing.loc lexbuf in
166 raise (Error (begin_cnum, end_cnum, msg))
167 let error_at_end lexbuf msg =
168 let begin_cnum, end_cnum = Ulexing.loc lexbuf in
169 raise (Error (begin_cnum, end_cnum, msg))
171 let return_with_loc token begin_cnum end_cnum =
172 let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in
175 let return lexbuf token =
176 let begin_cnum, end_cnum = Ulexing.loc lexbuf in
177 return_with_loc token begin_cnum end_cnum
179 let return_lexeme lexbuf name = return lexbuf (name, Ulexing.utf8_lexeme lexbuf)
181 let return_symbol lexbuf s = return lexbuf ("SYMBOL", s)
182 let return_eoi lexbuf = return lexbuf ("EOI", "")
184 let remove_quotes s = String.sub s 1 (String.length s - 2)
187 let tok_func stream =
188 (* let lexbuf = Ulexing.from_utf8_stream stream in *)
189 (** XXX Obj.magic rationale.
191 * camlp5 constraints the tok_func field of Token.glexer to have type:
192 * Stream.t char -> (Stream.t 'te * flocation_function)
193 * In order to use ulex we have (in theory) to instantiate a new lexbuf each
194 * time a char Stream.t is passed, destroying the previous lexbuf which may
195 * have consumed a character from the old stream which is lost forever :-(
197 * Instead of passing to camlp5 a char Stream.t we pass a lexbuf, casting it to
198 * char Stream.t with Obj.magic where needed.
200 let lexbuf = Obj.magic stream in
201 Token.make_stream_and_location
206 | Ulexing.Error -> error_at_end lexbuf "Unexpected character"
207 | Ulexing.InvalidCodepoint p ->
208 error_at_end lexbuf (sprintf "Invalid code point: %d" p))
211 Token.tok_func = tok_func;
212 Token.tok_using = (fun _ -> ());
213 Token.tok_removing = (fun _ -> ());
214 Token.tok_match = Token.default_match;
215 Token.tok_text = Token.lexer_text;
216 Token.tok_comm = None;
219 let expand_macro lexbuf =
221 Ulexing.utf8_sub_lexeme lexbuf 1 (Ulexing.lexeme_length lexbuf - 1)
224 ("SYMBOL", Utf8Macro.expand macro)
225 with Utf8Macro.Macro_not_found _ ->
226 (* FG: unexpanded TeX macros are terminated by a space for rendering *)
227 "SYMBOL", (Ulexing.utf8_lexeme lexbuf ^ " ")
229 let remove_quotes s = String.sub s 1 (String.length s - 2)
230 let remove_left_quote s = String.sub s 1 (String.length s - 1)
232 let rec level2_pattern_token_group counter buffer =
235 if (counter > 0) then
236 Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
237 snd (Ulexing.loc lexbuf)
239 Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
240 ignore (level2_pattern_token_group (counter + 1) buffer lexbuf) ;
241 level2_pattern_token_group counter buffer lexbuf
243 Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
244 level2_pattern_token_group counter buffer lexbuf
246 let read_unparsed_group token_name lexbuf =
247 let buffer = Buffer.create 16 in
248 let begin_cnum, _ = Ulexing.loc lexbuf in
249 let end_cnum = level2_pattern_token_group 0 buffer lexbuf in
250 return_with_loc (token_name, Buffer.contents buffer) begin_cnum end_cnum
252 let handle_keywords lexbuf k name =
253 let s = Ulexing.utf8_lexeme lexbuf in
255 return lexbuf ("", s)
257 return lexbuf (name, s)
260 let rec level2_meta_token =
262 | utf8_blank+ -> level2_meta_token lexbuf
264 handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "IDENT"
265 | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
267 handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "PIDENT"
268 | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf
270 return lexbuf ("UNPARSED_AST",
271 remove_left_quote (Ulexing.utf8_lexeme lexbuf))
273 return lexbuf ("UNPARSED_AST",
274 remove_left_quote (Ulexing.utf8_lexeme lexbuf))
275 | eof -> return_eoi lexbuf
277 let rec comment_token acc depth =
280 let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
281 comment_token acc (depth + 1) lexbuf
283 let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
286 else comment_token acc (depth - 1) lexbuf
288 let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
289 comment_token acc depth lexbuf
291 (** @param k continuation to be invoked when no ligature has been found *)
292 let rec ligatures_token k =
295 let lexeme = Ulexing.utf8_lexeme lexbuf in
296 (match List.rev (Hashtbl.find_all ligatures lexeme) with
297 | [] -> (* ligature not found, rollback and try default lexer *)
298 Ulexing.rollback lexbuf;
300 | default_lig :: _ -> (* ligatures found, use the default one *)
301 return_symbol lexbuf default_lig)
302 | eof -> return_eoi lexbuf
303 | _ -> (* not a ligature, rollback and try default lexer *)
304 Ulexing.rollback lexbuf;
307 and level2_ast_token =
309 | let_rec -> return lexbuf ("LETREC","")
310 | let_corec -> return lexbuf ("LETCOREC","")
311 | nlet_rec -> return lexbuf ("NLETREC","")
312 | nlet_corec -> return lexbuf ("NLETCOREC","")
313 | we_proved -> return lexbuf ("WEPROVED","")
314 | we_have -> return lexbuf ("WEHAVE","")
315 | utf8_blank+ -> ligatures_token level2_ast_token lexbuf
317 let s = Ulexing.utf8_lexeme lexbuf in
318 return lexbuf ("META", String.sub s 1 (String.length s - 1))
319 | implicit -> return lexbuf ("IMPLICIT", "")
320 | implicit_vector -> return lexbuf ("IMPLICITVECTOR", "")
321 | placeholder -> return lexbuf ("PLACEHOLDER", "")
322 | ident -> handle_keywords lexbuf (Hashtbl.mem !level2_ast_keywords) "IDENT"
323 | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
324 | pident -> handle_keywords lexbuf (Hashtbl.mem !level2_ast_keywords) "PIDENT"
325 | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
326 | tex_token -> return lexbuf (expand_macro lexbuf)
327 | nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf)
328 | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf)
330 return lexbuf ("QSTRING", remove_quotes (Ulexing.utf8_lexeme lexbuf))
332 return lexbuf ("CSYMBOL", remove_left_quote (Ulexing.utf8_lexeme lexbuf))
333 | "${" -> read_unparsed_group "UNPARSED_META" lexbuf
334 | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf
335 | '(' -> return lexbuf ("LPAREN", "")
336 | ')' -> return lexbuf ("RPAREN", "")
338 return lexbuf ("UNPARSED_META",
339 remove_left_quote (Ulexing.utf8_lexeme lexbuf))
340 | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous")
342 let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in
344 Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4)
346 return lexbuf ("NOTE", comment) *)
347 ligatures_token level2_ast_token lexbuf
348 | begincomment -> return lexbuf ("BEGINCOMMENT","")
349 | endcomment -> return lexbuf ("ENDCOMMENT","")
350 | eof -> return_eoi lexbuf
351 | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
353 and level1_pattern_token =
355 | utf8_blank+ -> ligatures_token level1_pattern_token lexbuf
356 | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
357 | ident ->handle_keywords lexbuf (fun x -> List.mem x level1_keywords) "IDENT"
358 | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
359 | pident->handle_keywords lexbuf (fun x->List.mem x level1_keywords) "PIDENT"
360 | color -> return lexbuf ("COLOR", Ulexing.utf8_lexeme lexbuf)
362 return lexbuf ("PERCENTAGE", Ulexing.utf8_lexeme lexbuf)
364 return lexbuf ("FLOATWITHUNIT", Ulexing.utf8_lexeme lexbuf)
365 | tex_token -> return lexbuf (expand_macro lexbuf)
367 return lexbuf ("QKEYWORD", remove_quotes (Ulexing.utf8_lexeme lexbuf))
368 | '(' -> return lexbuf ("LPAREN", "")
369 | ')' -> return lexbuf ("RPAREN", "")
370 | eof -> return_eoi lexbuf
371 | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
373 let level1_pattern_token = ligatures_token level1_pattern_token
374 let level2_ast_token = ligatures_token level2_ast_token
376 (* API implementation *)
378 let initial_level1_pattern_lexer () = mk_lexer level1_pattern_token
379 let initial_level2_ast_lexer () = mk_lexer level2_ast_token
380 let initial_level2_meta_lexer () = mk_lexer level2_meta_token
383 let level1_pattern_lexer_ref = ref (initial_level1_pattern_lexer ())
384 let level2_ast_lexer_ref = ref (initial_level2_ast_lexer ())
385 let level2_meta_lexer_ref = ref (initial_level2_meta_lexer ())
387 let level1_pattern_lexer () = !level1_pattern_lexer_ref
388 let level2_ast_lexer () = !level2_ast_lexer_ref
389 let level2_meta_lexer () = !level2_meta_lexer_ref
391 let history = ref [];;
395 (!level2_ast_keywords,!level1_pattern_lexer_ref,
396 !level2_ast_lexer_ref,!level2_meta_lexer_ref) :: !history;
397 level2_ast_keywords := initial_level2_ast_keywords ();
398 initialize_keywords ();
399 level1_pattern_lexer_ref := initial_level1_pattern_lexer ();
400 level2_ast_lexer_ref := initial_level2_ast_lexer ();
401 level2_meta_lexer_ref := initial_level2_meta_lexer ();
407 | (kwd,pl,al,ml) :: tl ->
408 level2_ast_keywords := kwd;
409 level1_pattern_lexer_ref := pl;
410 level2_ast_lexer_ref := al;
411 level2_meta_lexer_ref := ml;