X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Fsoftware%2Fcomponents%2Fcontent_pres%2FcicNotationLexer.ml;h=00696cc00d2cec8b36e4a3a4a1c8d45000331a52;hb=8e6a30412bbac3acc0a020e0fe90d492b0fc6776;hp=f157af40696876a7f38b65946cb6c72acc588590;hpb=66929b8edb58d468a134b648466f3e9c45ba5c0e;p=helm.git diff --git a/helm/software/components/content_pres/cicNotationLexer.ml b/helm/software/components/content_pres/cicNotationLexer.ml index f157af406..00696cc00 100644 --- a/helm/software/components/content_pres/cicNotationLexer.ml +++ b/helm/software/components/content_pres/cicNotationLexer.ml @@ -30,7 +30,7 @@ open Printf exception Error of int * int * string let regexp number = xml_digit+ -let regexp utf8_blank = " " | "\n" | "\t" | [160] (* this is a nbsp *) +let regexp utf8_blank = " " | "\r\n" | "\n" | "\t" | [160] (* this is a nbsp *) (* ZACK: breaks unicode's binder followed by an ascii letter without blank *) (* let regexp ident_letter = xml_letter *) @@ -50,6 +50,10 @@ let is_ligature_char = true with Not_found -> false)) +let regexp we_proved = "we" utf8_blank+ "proved" +let regexp we_have = "we" utf8_blank+ "have" +let regexp let_rec = "let" utf8_blank+ "rec" +let regexp let_corec = "let" utf8_blank+ "corec" let regexp ident_decoration = '\'' | '?' | '`' let regexp ident_cont = ident_letter | xml_digit | '_' let regexp ident = ident_letter ident_cont* ident_decoration* @@ -109,8 +113,8 @@ let level2_meta_keywords = let level2_ast_keywords = Hashtbl.create 23 let _ = List.iter (fun k -> Hashtbl.add level2_ast_keywords k ()) - [ "CProp"; "Prop"; "Type"; "Set"; "let"; "rec"; "corec"; "match"; - "with"; "in"; "and"; "to"; "as"; "on"; "return" ] + [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match"; + "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ] let add_level2_ast_keyword k = Hashtbl.add level2_ast_keywords k () let remove_level2_ast_keyword k = Hashtbl.remove level2_ast_keywords k @@ -144,14 +148,8 @@ let error_at_end lexbuf msg = raise (Error (begin_cnum, end_cnum, msg)) let return_with_loc token begin_cnum end_cnum = - (* TODO handle line/column numbers *) - let flocation_begin = - { Lexing.pos_fname = ""; - Lexing.pos_lnum = -1; Lexing.pos_bol = -1; - Lexing.pos_cnum = begin_cnum } - in - let flocation_end = { flocation_begin with Lexing.pos_cnum = end_cnum } in - (token, (flocation_begin, flocation_end)) + let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in + token, flocation let return lexbuf token = let begin_cnum, end_cnum = Ulexing.loc lexbuf in @@ -169,17 +167,17 @@ let mk_lexer token = (* let lexbuf = Ulexing.from_utf8_stream stream in *) (** XXX Obj.magic rationale. * The problem. - * camlp4 constraints the tok_func field of Token.glexer to have type: + * camlp5 constraints the tok_func field of Token.glexer to have type: * Stream.t char -> (Stream.t 'te * flocation_function) * In order to use ulex we have (in theory) to instantiate a new lexbuf each * time a char Stream.t is passed, destroying the previous lexbuf which may * have consumed a character from the old stream which is lost forever :-( * The "solution". - * Instead of passing to camlp4 a char Stream.t we pass a lexbuf, casting it to + * Instead of passing to camlp5 a char Stream.t we pass a lexbuf, casting it to * char Stream.t with Obj.magic where needed. *) let lexbuf = Obj.magic stream in - Token.make_stream_and_flocation + Token.make_stream_and_location (fun () -> try token lexbuf @@ -280,6 +278,10 @@ let rec ligatures_token k = and level2_ast_token = lexer + | let_rec -> return lexbuf ("LETREC","") + | let_corec -> return lexbuf ("LETCOREC","") + | we_proved -> return lexbuf ("WEPROVED","") + | we_have -> return lexbuf ("WEHAVE","") | utf8_blank+ -> ligatures_token level2_ast_token lexbuf | meta -> let s = Ulexing.utf8_lexeme lexbuf in