X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Focaml%2Fcic_notation%2FcicNotationLexer.ml;h=33fb8fd78494085a819c9d65fb11e2020a654c42;hb=97c2d258a5c524eb5c4b85208899d80751a2c82f;hp=9897412b95a21b63e291f486e3421b057d6e0ebf;hpb=e20f3963028a966fc93ba0d611c4aa8341d20e2c;p=helm.git diff --git a/helm/ocaml/cic_notation/cicNotationLexer.ml b/helm/ocaml/cic_notation/cicNotationLexer.ml index 9897412b9..33fb8fd78 100644 --- a/helm/ocaml/cic_notation/cicNotationLexer.ml +++ b/helm/ocaml/cic_notation/cicNotationLexer.ml @@ -34,7 +34,20 @@ let regexp number = xml_digit+ let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ] -let regexp ident_decoration = '\'' | '!' | '?' | '`' + (* must be in sync with "is_ligature_char" below *) +let regexp ligature_char = [ "'`~!?@*()[]<>-+=|:;.,/\"" ] +let regexp ligature = ligature_char ligature_char+ + +let is_ligature_char = + (* must be in sync with "regexp ligature_char" above *) + let chars = "'`~!?@*()[]<>-+=|:;.,/\"" in + (fun char -> + (try + ignore (String.index chars char); + true + with Not_found -> false)) + +let regexp ident_decoration = '\'' | '?' | '`' let regexp ident_cont = ident_letter | xml_digit | '_' let regexp ident = ident_letter ident_cont* ident_decoration* @@ -61,9 +74,10 @@ let regexp meta_anonymous = "$_" let regexp qstring = '"' [^ '"']* '"' let regexp begincomment = "(**" xml_blank +let regexp beginnote = "(*" let regexp endcomment = "*)" -let regexp comment_char = [^'*'] | '*'[^')'] -let regexp note = "(*" ([^'*'] | "**") comment_char* "*)" +(* let regexp comment_char = [^'*'] | '*'[^')'] +let regexp note = "|+" ([^'*'] | "**") comment_char* "+|" *) let level1_layouts = [ "sub"; "sup"; @@ -91,18 +105,30 @@ let level2_meta_keywords = (* (string, unit) Hashtbl.t, to exploit multiple bindings *) let level2_ast_keywords = Hashtbl.create 23 let _ = - (* TODO ZACK: keyword list almost cut and paste from cicTextualLexer2.ml, to - * be reviewed *) List.iter (fun k -> Hashtbl.add level2_ast_keywords k ()) - [ "Prop"; "Type"; "Set"; "let"; "rec"; "corec"; "using"; "match"; "with"; - "in"; "and"; "to"; "as"; "on"; "names" ] + [ "CProp"; "Prop"; "Type"; "Set"; "let"; "rec"; "corec"; "match"; + "with"; "in"; "and"; "to"; "as"; "on"; "return" ] let add_level2_ast_keyword k = Hashtbl.add level2_ast_keywords k () let remove_level2_ast_keyword k = Hashtbl.remove level2_ast_keywords k + (* (string, int) Hashtbl.t, with multiple bindings. + * int is the unicode codepoint *) +let ligatures = Hashtbl.create 23 +let _ = + List.iter + (fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol) + [ ("->", <:unicode>); ("=>", <:unicode>); + ("<=", <:unicode>); (">=", <:unicode>); + ("<>", <:unicode>); (":=", <:unicode>); + ] + +let regexp uri_step = [ 'a' - 'z' 'A' - 'Z' '0' - '9' '_' '-' ]+ + let regexp uri = ("cic:/" | "theory:/") (* schema *) - ident ('/' ident)* (* path *) +(* ident ('/' ident)* |+ path +| *) + uri_step ('/' uri_step)* (* path *) ('.' ident)+ (* ext *) ("#xpointer(" number ('/' number)+ ")")? (* xpointer *) @@ -129,11 +155,26 @@ let return lexbuf token = let return_lexeme lexbuf name = return lexbuf (name, Ulexing.utf8_lexeme lexbuf) +let return_symbol lexbuf s = return lexbuf ("SYMBOL", s) +let return_eoi lexbuf = return lexbuf ("EOI", "") + let remove_quotes s = String.sub s 1 (String.length s - 2) let mk_lexer token = let tok_func stream = - let lexbuf = Ulexing.from_utf8_stream stream in +(* let lexbuf = Ulexing.from_utf8_stream stream in *) +(** XXX Obj.magic rationale. + * The problem. + * camlp4 constraints the tok_func field of Token.glexer to have type: + * Stream.t char -> (Stream.t 'te * flocation_function) + * In order to use ulex we have (in theory) to instantiate a new lexbuf each + * time a char Stream.t is passed, destroying the previous lexbuf which may + * have consumed a character from the old stream which is lost forever :-( + * The "solution". + * Instead of passing to camlp4 a char Stream.t we pass a lexbuf, casting it to + * char Stream.t with Obj.magic where needed. + *) + let lexbuf = Obj.magic stream in Token.make_stream_and_flocation (fun () -> try @@ -163,7 +204,8 @@ let expand_macro lexbuf = let remove_quotes s = String.sub s 1 (String.length s - 2) let remove_left_quote s = String.sub s 1 (String.length s - 1) -let rec level2_pattern_token_group counter buffer = lexer +let rec level2_pattern_token_group counter buffer = + lexer | end_group -> if (counter > 0) then Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ; @@ -182,7 +224,8 @@ let read_unparsed_group token_name lexbuf = let end_cnum = level2_pattern_token_group 0 buffer lexbuf in return_with_loc (token_name, Buffer.contents buffer) begin_cnum end_cnum -let rec level2_meta_token = lexer +let rec level2_meta_token = + lexer | xml_blank+ -> level2_meta_token lexbuf | ident -> let s = Ulexing.utf8_lexeme lexbuf in @@ -199,10 +242,41 @@ let rec level2_meta_token = lexer | ast_csymbol -> return lexbuf ("UNPARSED_AST", remove_left_quote (Ulexing.utf8_lexeme lexbuf)) - | eof -> return lexbuf ("EOI", "") - -let rec level2_ast_token = lexer - | xml_blank+ -> level2_ast_token lexbuf + | eof -> return_eoi lexbuf + +let rec comment_token acc depth = + lexer + | beginnote -> + let acc = acc ^ Ulexing.utf8_lexeme lexbuf in + comment_token acc (depth + 1) lexbuf + | endcomment -> + let acc = acc ^ Ulexing.utf8_lexeme lexbuf in + if depth = 0 + then acc + else comment_token acc (depth - 1) lexbuf + | _ -> + let acc = acc ^ Ulexing.utf8_lexeme lexbuf in + comment_token acc depth lexbuf + + (** @param k continuation to be invoked when no ligature has been found *) +let rec ligatures_token k = + lexer + | ligature -> + let lexeme = Ulexing.utf8_lexeme lexbuf in + (match List.rev (Hashtbl.find_all ligatures lexeme) with + | [] -> (* ligature not found, rollback and try default lexer *) + Ulexing.rollback lexbuf; + k lexbuf + | default_lig :: _ -> (* ligatures found, use the default one *) + return_symbol lexbuf default_lig) + | eof -> return_eoi lexbuf + | _ -> (* not a ligature, rollback and try default lexer *) + Ulexing.rollback lexbuf; + k lexbuf + +and level2_ast_token = + lexer + | xml_blank+ -> ligatures_token level2_ast_token lexbuf | meta -> return lexbuf ("META", Ulexing.utf8_lexeme lexbuf) | implicit -> return lexbuf ("IMPLICIT", "") | placeholder -> return lexbuf ("PLACEHOLDER", "") @@ -227,18 +301,21 @@ let rec level2_ast_token = lexer return lexbuf ("UNPARSED_META", remove_left_quote (Ulexing.utf8_lexeme lexbuf)) | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous") - | note -> - let comment = + | beginnote -> + let comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in +(* let comment = Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4) in - return lexbuf ("NOTE", comment) + return lexbuf ("NOTE", comment) *) + ligatures_token level2_ast_token lexbuf | begincomment -> return lexbuf ("BEGINCOMMENT","") | endcomment -> return lexbuf ("ENDCOMMENT","") - | eof -> return lexbuf ("EOI", "") - | _ -> return lexbuf ("SYMBOL", Ulexing.utf8_lexeme lexbuf) + | eof -> return_eoi lexbuf + | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf) -let rec level1_pattern_token = lexer - | xml_blank+ -> level1_pattern_token lexbuf +and level1_pattern_token = + lexer + | xml_blank+ -> ligatures_token level1_pattern_token lexbuf | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf) | ident -> let s = Ulexing.utf8_lexeme lexbuf in @@ -253,8 +330,11 @@ let rec level1_pattern_token = lexer return lexbuf ("QKEYWORD", remove_quotes (Ulexing.utf8_lexeme lexbuf)) | '(' -> return lexbuf ("LPAREN", "") | ')' -> return lexbuf ("RPAREN", "") - | eof -> return lexbuf ("EOI", "") - | _ -> return lexbuf ("SYMBOL", Ulexing.utf8_lexeme lexbuf) + | eof -> return_eoi lexbuf + | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf) + +let level1_pattern_token = ligatures_token level1_pattern_token +let level2_ast_token = ligatures_token level2_ast_token (* API implementation *) @@ -262,3 +342,10 @@ let level1_pattern_lexer = mk_lexer level1_pattern_token let level2_ast_lexer = mk_lexer level2_ast_token let level2_meta_lexer = mk_lexer level2_meta_token +let lookup_ligatures lexeme = + try + if lexeme.[0] = '\\' + then [ Utf8Macro.expand (String.sub lexeme 1 (String.length lexeme - 1)) ] + else List.rev (Hashtbl.find_all ligatures lexeme) + with Invalid_argument _ | Utf8Macro.Macro_not_found _ as exn -> [] +