X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Focaml%2Fcic_disambiguation%2FcicTextualLexer2.ml;h=6f179505d749c8782e7a83b2ff58edc1bde3b964;hb=4cb4d286a1fdcb150c2848a9d21ac3486906c317;hp=8a6faa19339fe2979b75b5407ebeca108f551ed6;hpb=c5cd7e02a5f443fdbd1151cd861e4723b91c8bce;p=helm.git diff --git a/helm/ocaml/cic_disambiguation/cicTextualLexer2.ml b/helm/ocaml/cic_disambiguation/cicTextualLexer2.ml index 8a6faa193..6f179505d 100644 --- a/helm/ocaml/cic_disambiguation/cicTextualLexer2.ml +++ b/helm/ocaml/cic_disambiguation/cicTextualLexer2.ml @@ -33,8 +33,12 @@ let regexp blank = [ ' ' '\t' '\n' ] let regexp paren = [ '(' '[' '{' ')' ']' '}' ] let regexp implicit = '?' let regexp symbol_char = - [^ 'a' - 'z' 'A' - 'Z' '0' - '9' ' ' '\t' '\n' '\\' '(' '[' '{' ')' ']' '}' ] - + [^ 'a' - 'z' 'A' - 'Z' '0' - '9' + ' ' '\t' '\n' + '\\' '(' '[' '{' ')' ']' '}' '?' + ] +let regexp comment_char = [^ '\n' ] +let regexp comment = "%%" comment_char* let regexp blanks = blank+ let regexp num = digit+ let regexp tex_token = '\\' alpha+ @@ -46,8 +50,10 @@ let regexp ident' = ((alpha | tex_token) ident_cont'*) | ('_' ident_cont'+) let regexp meta = implicit num let regexp qstring = '"' [^ '"']* '"' let regexp uri = - (* schema *) (* path *) (* ext *) (* xpointer *) - ("cic:/" | "theory:/") ident ('/' ident)* ('.' ident)+ ('#' num ('/' num)*)? + ("cic:/" | "theory:/") (* schema *) + ident ('/' ident)* (* path *) + ('.' ident)+ (* ext *) + ("#xpointer(" num ('/' num)+ ")")? (* xpointer *) (* let regexp catchall = .* *) let keywords = Hashtbl.create 17 @@ -100,8 +106,8 @@ let rec token' = lexer and token = lexer *) -let rec token = lexer - | blanks -> token lexbuf +let rec token comments = lexer + | blanks -> token comments lexbuf | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf) | ident -> let lexeme = Ulexing.utf8_lexeme lexbuf in @@ -125,22 +131,30 @@ let rec token = lexer return lexbuf ("SYMBOL", Utf8Macro.expand macro) with Utf8Macro.Macro_not_found _ -> return lexbuf ("SYMBOL", Ulexing.utf8_lexeme lexbuf)) + | comment -> + if comments then + let comment = + Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 2) + in + return lexbuf ("COMMENT", comment) + else + token comments lexbuf | eof -> return lexbuf ("EOI", "") | _ -> error lexbuf "Invalid character" -let tok_func stream = +let tok_func comments stream = let lexbuf = Ulexing.from_utf8_stream stream in Token.make_stream_and_flocation (fun () -> try - token lexbuf + token comments lexbuf with | Ulexing.Error -> error_at_end lexbuf "Unexpected character" | Ulexing.InvalidCodepoint i -> error_at_end lexbuf "Invalid code point") -let cic_lexer = - { - Token.tok_func = tok_func; +let cic_lexer ?(comments = false) () = + { + Token.tok_func = tok_func comments; Token.tok_using = (fun _ -> ()); Token.tok_removing = (fun _ -> ()); Token.tok_match = Token.default_match;