1 (* Copyright (C) 2005, HELM Team.
3 * This file is part of HELM, an Hypertextual, Electronic
4 * Library of Mathematics, developed at the Computer Science
5 * Department, University of Bologna, Italy.
7 * HELM is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License
9 * as published by the Free Software Foundation; either version 2
10 * of the License, or (at your option) any later version.
12 * HELM is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License
18 * along with HELM; if not, write to the Free Software
19 * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
22 * For details, see the HELM World-Wide-Web page,
23 * http://helm.cs.unibo.it/
30 exception Error of int * int * string
32 let regexp number = xml_digit+
33 let regexp utf8_blank = " " | "\r\n" | "\n" | "\t" | [160] (* this is a nbsp *)
34 let regexp percentage =
35 ('-' | "") [ '0' - '9' ] + '%'
36 let regexp floatwithunit =
37 ('-' | "") [ '0' - '9' ] + ["."] [ '0' - '9' ] + ([ 'a' - 'z' ] + | "" )
38 let regexp color = "#" [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f'
39 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ]
40 [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ]
42 (* ZACK: breaks unicode's binder followed by an ascii letter without blank *)
43 (* let regexp ident_letter = xml_letter *)
45 let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ]
47 (* must be in sync with "is_ligature_char" below *)
48 let regexp ligature_char = [ "'`~!?@*()[]<>-+=|:;.,/\"" ]
49 let regexp ligature = ligature_char ligature_char+
51 let regexp we_proved = "we" utf8_blank+ "proved"
52 let regexp we_have = "we" utf8_blank+ "have"
53 let regexp let_rec = "let" utf8_blank+ "rec"
54 let regexp let_corec = "let" utf8_blank+ "corec"
55 let regexp nlet_rec = "nlet" utf8_blank+ "rec"
56 let regexp nlet_corec = "nlet" utf8_blank+ "corec"
57 let regexp ident_decoration = '\'' | '?' | '`'
58 let regexp ident_cont = ident_letter | xml_digit | '_'
59 let regexp ident_start = ident_letter
60 let regexp ident = ident_letter ident_cont* ident_decoration*
61 let regexp variable_ident = '_' '_' number
62 let regexp pident = '_' ident
64 let regexp tex_token = '\\' ident
66 let regexp delim_begin = "\\["
67 let regexp delim_end = "\\]"
69 let regexp qkeyword = "'" ( ident | pident ) "'"
71 let regexp implicit = '?'
72 let regexp placeholder = '%'
73 let regexp meta = implicit number
75 let regexp csymbol = '\'' ident
77 let regexp begin_group = "@{" | "${"
78 let regexp end_group = '}'
79 let regexp wildcard = "$_"
80 let regexp ast_ident = "@" ident
81 let regexp ast_csymbol = "@" csymbol
82 let regexp meta_ident = "$" ident
83 let regexp meta_anonymous = "$_"
84 let regexp qstring = '"' [^ '"']* '"'
86 let regexp begincomment = "(**" utf8_blank
87 let regexp beginnote = "(*"
88 let regexp endcomment = "*)"
89 (* let regexp comment_char = [^'*'] | '*'[^')']
90 let regexp note = "|+" ([^'*'] | "**") comment_char* "+|" *)
95 "over"; "atop"; "frac";
96 "sqrt"; "root"; "mstyle" ; "mpadded"; "maction"
100 let level1_keywords =
101 [ "hbox"; "hvbox"; "hovbox"; "vbox";
103 "list0"; "list1"; "sep";
105 "term"; "ident"; "number";
108 let level2_meta_keywords =
109 [ "if"; "then"; "elCicNotationParser.se";
110 "fold"; "left"; "right"; "rec";
113 "anonymous"; "ident"; "number"; "term"; "fresh"
116 (* (string, unit) Hashtbl.t, to exploit multiple bindings *)
117 let initial_level2_ast_keywords () = Hashtbl.create 23;;
119 let level2_ast_keywords = ref (initial_level2_ast_keywords ())
121 let initialize_keywords () =
122 List.iter (fun k -> Hashtbl.add !level2_ast_keywords k ())
123 [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match";
124 "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ]
127 let _ = initialize_keywords ();;
129 let add_level2_ast_keyword k = Hashtbl.add !level2_ast_keywords k ()
130 let remove_level2_ast_keyword k = Hashtbl.remove !level2_ast_keywords k
132 (* (string, int) Hashtbl.t, with multiple bindings.
133 * int is the unicode codepoint *)
134 let ligatures = Hashtbl.create 23
138 (fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol)
139 [ ("->", <:unicode<to>>); ("=>", <:unicode<Rightarrow>>);
140 (":=", <:unicode<def>>);
143 let regexp uri_step = [ 'a' - 'z' 'A' - 'Z' '0' - '9' '_' '-' ''' ]+
146 ("cic:/" | "theory:/") (* schema *)
147 (* ident ('/' ident)* |+ path +| *)
148 uri_step ('/' uri_step)* (* path *)
149 ('.' ident)+ (* ext *)
150 ("#xpointer(" number ('/' number)+ ")")? (* xpointer *)
152 let regexp nreference =
154 uri_step ('/' uri_step)* (* path *)
157 | "def" "(" number ")"
158 | "fix" "(" number "," number "," number ")"
159 | "cfx" "(" number ")"
160 | "ind" "(" number "," number "," number ")"
161 | "con" "(" number "," number "," number ")") (* ext + reference *)
163 let error lexbuf msg =
164 let begin_cnum, end_cnum = Ulexing.loc lexbuf in
165 raise (Error (begin_cnum, end_cnum, msg))
166 let error_at_end lexbuf msg =
167 let begin_cnum, end_cnum = Ulexing.loc lexbuf in
168 raise (Error (begin_cnum, end_cnum, msg))
170 let return_with_loc token begin_cnum end_cnum =
171 let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in
174 let return lexbuf token =
175 let begin_cnum, end_cnum = Ulexing.loc lexbuf in
176 return_with_loc token begin_cnum end_cnum
178 let return_lexeme lexbuf name = return lexbuf (name, Ulexing.utf8_lexeme lexbuf)
180 let return_symbol lexbuf s = return lexbuf ("SYMBOL", s)
181 let return_eoi lexbuf = return lexbuf ("EOI", "")
183 let remove_quotes s = String.sub s 1 (String.length s - 2)
186 let tok_func stream =
187 (* let lexbuf = Ulexing.from_utf8_stream stream in *)
188 (** XXX Obj.magic rationale.
190 * camlp5 constraints the tok_func field of Token.glexer to have type:
191 * Stream.t char -> (Stream.t 'te * flocation_function)
192 * In order to use ulex we have (in theory) to instantiate a new lexbuf each
193 * time a char Stream.t is passed, destroying the previous lexbuf which may
194 * have consumed a character from the old stream which is lost forever :-(
196 * Instead of passing to camlp5 a char Stream.t we pass a lexbuf, casting it to
197 * char Stream.t with Obj.magic where needed.
199 let lexbuf = Obj.magic stream in
200 Token.make_stream_and_location
205 | Ulexing.Error -> error_at_end lexbuf "Unexpected character"
206 | Ulexing.InvalidCodepoint p ->
207 error_at_end lexbuf (sprintf "Invalid code point: %d" p))
210 Token.tok_func = tok_func;
211 Token.tok_using = (fun _ -> ());
212 Token.tok_removing = (fun _ -> ());
213 Token.tok_match = Token.default_match;
214 Token.tok_text = Token.lexer_text;
215 Token.tok_comm = None;
218 let expand_macro lexbuf =
220 Ulexing.utf8_sub_lexeme lexbuf 1 (Ulexing.lexeme_length lexbuf - 1)
223 ("SYMBOL", Utf8Macro.expand macro)
224 with Utf8Macro.Macro_not_found _ ->
225 (* FG: unexpanded TeX macros are terminated by a space for rendering *)
226 "SYMBOL", (Ulexing.utf8_lexeme lexbuf ^ " ")
228 let remove_quotes s = String.sub s 1 (String.length s - 2)
229 let remove_left_quote s = String.sub s 1 (String.length s - 1)
231 let rec level2_pattern_token_group counter buffer =
234 if (counter > 0) then
235 Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
236 snd (Ulexing.loc lexbuf)
238 Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
239 ignore (level2_pattern_token_group (counter + 1) buffer lexbuf) ;
240 level2_pattern_token_group counter buffer lexbuf
242 Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
243 level2_pattern_token_group counter buffer lexbuf
245 let read_unparsed_group token_name lexbuf =
246 let buffer = Buffer.create 16 in
247 let begin_cnum, _ = Ulexing.loc lexbuf in
248 let end_cnum = level2_pattern_token_group 0 buffer lexbuf in
249 return_with_loc (token_name, Buffer.contents buffer) begin_cnum end_cnum
251 let handle_keywords lexbuf k name =
252 let s = Ulexing.utf8_lexeme lexbuf in
254 return lexbuf ("", s)
256 return lexbuf (name, s)
259 let rec level2_meta_token =
261 | utf8_blank+ -> level2_meta_token lexbuf
263 handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "IDENT"
264 | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
266 handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "PIDENT"
267 | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf
269 return lexbuf ("UNPARSED_AST",
270 remove_left_quote (Ulexing.utf8_lexeme lexbuf))
272 return lexbuf ("UNPARSED_AST",
273 remove_left_quote (Ulexing.utf8_lexeme lexbuf))
274 | eof -> return_eoi lexbuf
276 let rec comment_token acc depth =
279 let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
280 comment_token acc (depth + 1) lexbuf
282 let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
285 else comment_token acc (depth - 1) lexbuf
287 let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
288 comment_token acc depth lexbuf
290 (** @param k continuation to be invoked when no ligature has been found *)
291 let rec ligatures_token k =
294 let lexeme = Ulexing.utf8_lexeme lexbuf in
295 (match List.rev (Hashtbl.find_all ligatures lexeme) with
296 | [] -> (* ligature not found, rollback and try default lexer *)
297 Ulexing.rollback lexbuf;
299 | default_lig :: _ -> (* ligatures found, use the default one *)
300 return_symbol lexbuf default_lig)
301 | eof -> return_eoi lexbuf
302 | _ -> (* not a ligature, rollback and try default lexer *)
303 Ulexing.rollback lexbuf;
306 and level2_ast_token =
308 | let_rec -> return lexbuf ("LETREC","")
309 | let_corec -> return lexbuf ("LETCOREC","")
310 | nlet_rec -> return lexbuf ("NLETREC","")
311 | nlet_corec -> return lexbuf ("NLETCOREC","")
312 | we_proved -> return lexbuf ("WEPROVED","")
313 | we_have -> return lexbuf ("WEHAVE","")
314 | utf8_blank+ -> ligatures_token level2_ast_token lexbuf
316 let s = Ulexing.utf8_lexeme lexbuf in
317 return lexbuf ("META", String.sub s 1 (String.length s - 1))
318 | implicit -> return lexbuf ("IMPLICIT", "")
319 | placeholder -> return lexbuf ("PLACEHOLDER", "")
320 | ident -> handle_keywords lexbuf (Hashtbl.mem !level2_ast_keywords) "IDENT"
321 | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
322 | pident -> handle_keywords lexbuf (Hashtbl.mem !level2_ast_keywords) "PIDENT"
323 | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
324 | tex_token -> return lexbuf (expand_macro lexbuf)
325 | nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf)
326 | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf)
328 return lexbuf ("QSTRING", remove_quotes (Ulexing.utf8_lexeme lexbuf))
330 return lexbuf ("CSYMBOL", remove_left_quote (Ulexing.utf8_lexeme lexbuf))
331 | "${" -> read_unparsed_group "UNPARSED_META" lexbuf
332 | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf
333 | '(' -> return lexbuf ("LPAREN", "")
334 | ')' -> return lexbuf ("RPAREN", "")
336 return lexbuf ("UNPARSED_META",
337 remove_left_quote (Ulexing.utf8_lexeme lexbuf))
338 | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous")
340 let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in
342 Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4)
344 return lexbuf ("NOTE", comment) *)
345 ligatures_token level2_ast_token lexbuf
346 | begincomment -> return lexbuf ("BEGINCOMMENT","")
347 | endcomment -> return lexbuf ("ENDCOMMENT","")
348 | eof -> return_eoi lexbuf
349 | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
351 and level1_pattern_token =
353 | utf8_blank+ -> ligatures_token level1_pattern_token lexbuf
354 | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
355 | ident ->handle_keywords lexbuf (fun x -> List.mem x level1_keywords) "IDENT"
356 | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
357 | pident->handle_keywords lexbuf (fun x->List.mem x level1_keywords) "PIDENT"
358 | color -> return lexbuf ("COLOR", Ulexing.utf8_lexeme lexbuf)
360 return lexbuf ("PERCENTAGE", Ulexing.utf8_lexeme lexbuf)
362 return lexbuf ("FLOATWITHUNIT", Ulexing.utf8_lexeme lexbuf)
363 | tex_token -> return lexbuf (expand_macro lexbuf)
365 return lexbuf ("QKEYWORD", remove_quotes (Ulexing.utf8_lexeme lexbuf))
366 | '(' -> return lexbuf ("LPAREN", "")
367 | ')' -> return lexbuf ("RPAREN", "")
368 | eof -> return_eoi lexbuf
369 | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
371 let level1_pattern_token = ligatures_token level1_pattern_token
372 let level2_ast_token = ligatures_token level2_ast_token
374 (* API implementation *)
376 let initial_level1_pattern_lexer () = mk_lexer level1_pattern_token
377 let initial_level2_ast_lexer () = mk_lexer level2_ast_token
378 let initial_level2_meta_lexer () = mk_lexer level2_meta_token
381 let level1_pattern_lexer_ref = ref (initial_level1_pattern_lexer ())
382 let level2_ast_lexer_ref = ref (initial_level2_ast_lexer ())
383 let level2_meta_lexer_ref = ref (initial_level2_meta_lexer ())
385 let level1_pattern_lexer () = !level1_pattern_lexer_ref
386 let level2_ast_lexer () = !level2_ast_lexer_ref
387 let level2_meta_lexer () = !level2_meta_lexer_ref
389 let history = ref [];;
393 (!level2_ast_keywords,!level1_pattern_lexer_ref,
394 !level2_ast_lexer_ref,!level2_meta_lexer_ref) :: !history;
395 level2_ast_keywords := initial_level2_ast_keywords ();
396 initialize_keywords ();
397 level1_pattern_lexer_ref := initial_level1_pattern_lexer ();
398 level2_ast_lexer_ref := initial_level2_ast_lexer ();
399 level2_meta_lexer_ref := initial_level2_meta_lexer ();
405 | (kwd,pl,al,ml) :: tl ->
406 level2_ast_keywords := kwd;
407 level1_pattern_lexer_ref := pl;
408 level2_ast_lexer_ref := al;
409 level2_meta_lexer_ref := ml;