X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Focaml%2Fcic_notation%2FcicNotationLexer.ml;h=9c0efc8bb99a8b06c6b1e684637c1da562ba7c55;hb=08ecc780b3b0a4cac7ed72cf68c310e4eeffa2c1;hp=6ff589384a707e7db30b878c8433e2b9791d7c35;hpb=879797d6505bc39489009d9ae1e2506022bde9e2;p=helm.git diff --git a/helm/ocaml/cic_notation/cicNotationLexer.ml b/helm/ocaml/cic_notation/cicNotationLexer.ml index 6ff589384..9c0efc8bb 100644 --- a/helm/ocaml/cic_notation/cicNotationLexer.ml +++ b/helm/ocaml/cic_notation/cicNotationLexer.ml @@ -23,58 +23,132 @@ * http://helm.cs.unibo.it/ *) -exception Error of int * int * string +open Printf -exception Not_an_extended_ident +exception Error of int * int * string let regexp number = xml_digit+ + (* ZACK: breaks unicode's binder followed by an ascii letter without blank *) +(* let regexp ident_letter = xml_letter *) + +let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ] + let regexp ident_decoration = '\'' | '!' | '?' | '`' -let regexp ident_cont = xml_letter | xml_digit | '_' -let regexp ident = xml_letter ident_cont* ident_decoration* +let regexp ident_cont = ident_letter | xml_digit | '_' +let regexp ident = ident_letter ident_cont* ident_decoration* let regexp tex_token = '\\' ident -let regexp keyword = '"' ident '"' +let regexp delim_begin = "\\[" +let regexp delim_end = "\\]" + +let regexp qkeyword = "'" ident "'" let regexp implicit = '?' +let regexp placeholder = '%' let regexp meta = implicit number +let regexp csymbol = '\'' ident + +let regexp begin_group = "@{" | "${" +let regexp end_group = '}' +let regexp wildcard = "$_" +let regexp ast_ident = "@" ident +let regexp ast_csymbol = "@" csymbol +let regexp meta_ident = "$" ident +let regexp meta_anonymous = "$_" +let regexp qstring = '"' [^ '"']* '"' + +let regexp begincomment = "(**" xml_blank +let regexp endcomment = "*)" +let regexp comment_char = [^'*'] | '*'[^')'] +let regexp note = "(*" ([^'*'] | "**") comment_char* "*)" + +let level1_layouts = + [ "sub"; "sup"; + "below"; "above"; + "over"; "atop"; "frac"; + "sqrt"; "root" + ] + +let level1_keywords = + [ "hbox"; "hvbox"; "hovbox"; "vbox"; + "break"; + "list0"; "list1"; "sep"; + "opt"; + "term"; "ident"; "number" + ] @ level1_layouts + +let level2_meta_keywords = + [ "if"; "then"; "else"; + "fold"; "left"; "right"; "rec"; + "fail"; + "default"; + "anonymous"; "ident"; "number"; "term"; "fresh" + ] + + (* (string, unit) Hashtbl.t, to exploit multiple bindings *) +let level2_ast_keywords = Hashtbl.create 23 +let _ = + List.iter (fun k -> Hashtbl.add level2_ast_keywords k ()) + [ "CProp"; "Prop"; "Type"; "Set"; "let"; "rec"; "corec"; "using"; "match"; + "with"; "in"; "and"; "to"; "as"; "on"; "names" ] + +let add_level2_ast_keyword k = Hashtbl.add level2_ast_keywords k () +let remove_level2_ast_keyword k = Hashtbl.remove level2_ast_keywords k + +let regexp uri = + ("cic:/" | "theory:/") (* schema *) + ident ('/' ident)* (* path *) + ('.' ident)+ (* ext *) + ("#xpointer(" number ('/' number)+ ")")? (* xpointer *) + let error lexbuf msg = - raise (Error (Ulexing.lexeme_start lexbuf, Ulexing.lexeme_end lexbuf, msg)) + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + raise (Error (begin_cnum, end_cnum, msg)) let error_at_end lexbuf msg = - raise (Error (Ulexing.lexeme_end lexbuf, Ulexing.lexeme_end lexbuf, msg)) + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + raise (Error (begin_cnum, end_cnum, msg)) -let return lexbuf token = +let return_with_loc token begin_cnum end_cnum = + (* TODO handle line/column numbers *) let flocation_begin = - { Lexing.pos_fname = ""; Lexing.pos_lnum = -1; Lexing.pos_bol = -1; - Lexing.pos_cnum = Ulexing.lexeme_start lexbuf } - in - let flocation_end = - { flocation_begin with Lexing.pos_cnum = Ulexing.lexeme_end lexbuf } + { Lexing.pos_fname = ""; + Lexing.pos_lnum = -1; Lexing.pos_bol = -1; + Lexing.pos_cnum = begin_cnum } in + let flocation_end = { flocation_begin with Lexing.pos_cnum = end_cnum } in (token, (flocation_begin, flocation_end)) +let return lexbuf token = + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + return_with_loc token begin_cnum end_cnum + +let return_lexeme lexbuf name = return lexbuf (name, Ulexing.utf8_lexeme lexbuf) + let remove_quotes s = String.sub s 1 (String.length s - 2) -let tok_func token stream = - let lexbuf = Ulexing.from_utf8_stream stream in - Token.make_stream_and_flocation - (fun () -> - try - token lexbuf - with - | Ulexing.Error -> error_at_end lexbuf "Unexpected character" - | Ulexing.InvalidCodepoint i -> error_at_end lexbuf "Invalid code point") - -let mk_lexer token = { - Token.tok_func = token; - Token.tok_using = (fun _ -> ()); - Token.tok_removing = (fun _ -> ()); - Token.tok_match = Token.default_match; - Token.tok_text = Token.lexer_text; - Token.tok_comm = None; -} +let mk_lexer token = + let tok_func stream = + let lexbuf = Ulexing.from_utf8_stream stream in + Token.make_stream_and_flocation + (fun () -> + try + token lexbuf + with + | Ulexing.Error -> error_at_end lexbuf "Unexpected character" + | Ulexing.InvalidCodepoint p -> + error_at_end lexbuf (sprintf "Invalid code point: %d" p)) + in + { + Token.tok_func = tok_func; + Token.tok_using = (fun _ -> ()); + Token.tok_removing = (fun _ -> ()); + Token.tok_match = Token.default_match; + Token.tok_text = Token.lexer_text; + Token.tok_comm = None; + } let expand_macro lexbuf = let macro = @@ -84,35 +158,105 @@ let expand_macro lexbuf = ("SYMBOL", Utf8Macro.expand macro) with Utf8Macro.Macro_not_found _ -> "SYMBOL", Ulexing.utf8_lexeme lexbuf -let keyword lexbuf = "KEYWORD", remove_quotes (Ulexing.utf8_lexeme lexbuf) +let remove_quotes s = String.sub s 1 (String.length s - 2) +let remove_left_quote s = String.sub s 1 (String.length s - 1) + +let rec level2_pattern_token_group counter buffer = lexer + | end_group -> + if (counter > 0) then + Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ; + snd (Ulexing.loc lexbuf) + | begin_group -> + Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ; + ignore (level2_pattern_token_group (counter + 1) buffer lexbuf) ; + level2_pattern_token_group counter buffer lexbuf + | _ -> + Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ; + level2_pattern_token_group counter buffer lexbuf -(** {2 Level 1 lexer} *) +let read_unparsed_group token_name lexbuf = + let buffer = Buffer.create 16 in + let begin_cnum, _ = Ulexing.loc lexbuf in + let end_cnum = level2_pattern_token_group 0 buffer lexbuf in + return_with_loc (token_name, Buffer.contents buffer) begin_cnum end_cnum -let rec level1_token = lexer - | xml_blank+ -> level1_token lexbuf - | ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) +let rec level2_meta_token = lexer + | xml_blank+ -> level2_meta_token lexbuf + | ident -> + let s = Ulexing.utf8_lexeme lexbuf in + begin + if List.mem s level2_meta_keywords then + return lexbuf ("", s) + else + return lexbuf ("IDENT", s) + end + | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf + | ast_ident -> + return lexbuf ("UNPARSED_AST", + remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | ast_csymbol -> + return lexbuf ("UNPARSED_AST", + remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | eof -> return lexbuf ("EOI", "") + +let rec level2_ast_token = lexer + | xml_blank+ -> level2_ast_token lexbuf + | meta -> return lexbuf ("META", Ulexing.utf8_lexeme lexbuf) + | implicit -> return lexbuf ("IMPLICIT", "") + | placeholder -> return lexbuf ("PLACEHOLDER", "") + | ident -> + let lexeme = Ulexing.utf8_lexeme lexbuf in + if Hashtbl.mem level2_ast_keywords lexeme then + return lexbuf ("", lexeme) + else + return lexbuf ("IDENT", lexeme) | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf) - | keyword -> return lexbuf (keyword lexbuf) | tex_token -> return lexbuf (expand_macro lexbuf) + | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf) + | qstring -> + return lexbuf ("QSTRING", remove_quotes (Ulexing.utf8_lexeme lexbuf)) + | csymbol -> + return lexbuf ("CSYMBOL", remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | "${" -> read_unparsed_group "UNPARSED_META" lexbuf + | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf + | '(' -> return lexbuf ("LPAREN", "") + | ')' -> return lexbuf ("RPAREN", "") + | meta_ident -> + return lexbuf ("UNPARSED_META", + remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous") + | note -> + let comment = + Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4) + in + return lexbuf ("NOTE", comment) + | begincomment -> return lexbuf ("BEGINCOMMENT","") + | endcomment -> return lexbuf ("ENDCOMMENT","") | eof -> return lexbuf ("EOI", "") | _ -> return lexbuf ("SYMBOL", Ulexing.utf8_lexeme lexbuf) -let level1_tok_func stream = tok_func level1_token stream -let level1_lexer = mk_lexer level1_tok_func - -(** {2 Level 2 lexer} *) - -let rec level2_token = lexer - | xml_blank+ -> level1_token lexbuf - | meta -> return lexbuf ("META", Ulexing.utf8_lexeme lexbuf) - | implicit -> return lexbuf ("IMPLICIT", Ulexing.utf8_lexeme lexbuf) - | ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) +let rec level1_pattern_token = lexer + | xml_blank+ -> level1_pattern_token lexbuf | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf) - | keyword -> return lexbuf (keyword lexbuf) + | ident -> + let s = Ulexing.utf8_lexeme lexbuf in + begin + if List.mem s level1_keywords then + return lexbuf ("", s) + else + return lexbuf ("IDENT", s) + end | tex_token -> return lexbuf (expand_macro lexbuf) + | qkeyword -> + return lexbuf ("QKEYWORD", remove_quotes (Ulexing.utf8_lexeme lexbuf)) + | '(' -> return lexbuf ("LPAREN", "") + | ')' -> return lexbuf ("RPAREN", "") | eof -> return lexbuf ("EOI", "") | _ -> return lexbuf ("SYMBOL", Ulexing.utf8_lexeme lexbuf) -let level2_tok_func stream = tok_func level2_token stream -let level2_lexer = mk_lexer level2_tok_func +(* API implementation *) + +let level1_pattern_lexer = mk_lexer level1_pattern_token +let level2_ast_lexer = mk_lexer level2_ast_token +let level2_meta_lexer = mk_lexer level2_meta_token