From: Wilmer Ricciotti Date: Fri, 13 May 2011 08:55:12 +0000 (+0000) Subject: interim version (added smallLexer) X-Git-Tag: make_still_working~2519 X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=commitdiff_plain;h=42680d47c033d751738fd0f84af7b45b2a91a5b8;p=helm.git interim version (added smallLexer) --- diff --git a/matitaB/components/content/notationEnv.ml b/matitaB/components/content/notationEnv.ml index cca2e3799..986c9b63c 100644 --- a/matitaB/components/content/notationEnv.ml +++ b/matitaB/components/content/notationEnv.ml @@ -37,7 +37,7 @@ type value = | NumValue of string | OptValue of value option | ListValue of value list - | LocValue of Stdpp.location + | DisambiguationValue of (Stdpp.location * string option * string option) type value_type = | TermType of int (* the level of the expected term *) diff --git a/matitaB/components/content/notationEnv.mli b/matitaB/components/content/notationEnv.mli index 9c83e1a83..311889321 100644 --- a/matitaB/components/content/notationEnv.mli +++ b/matitaB/components/content/notationEnv.mli @@ -35,7 +35,7 @@ type value = | NumValue of string | OptValue of value option | ListValue of value list - | LocValue of Stdpp.location + | DisambiguationValue of (Stdpp.location * string option * string option) type value_type = | TermType of int (* the level of the expected term *) diff --git a/matitaB/components/content/notationPp.ml b/matitaB/components/content/notationPp.ml index d120fde68..8f1db4a62 100644 --- a/matitaB/components/content/notationPp.ml +++ b/matitaB/components/content/notationPp.ml @@ -347,7 +347,7 @@ let rec pp_value (status: #NCic.status) = function | Env.OptValue (Some v) -> "Some " ^ pp_value status v | Env.OptValue None -> "None" | Env.ListValue l -> sprintf "[%s]" (String.concat "; " (List.map (pp_value status) l)) - | Env.LocValue l -> sprintf "#" + | Env.DisambiguationValue _ -> sprintf "#" let rec pp_value_type = function diff --git a/matitaB/components/content_pres/cicNotationLexer.ml b/matitaB/components/content_pres/cicNotationLexer.ml index 524575d03..2f428babc 100644 --- a/matitaB/components/content_pres/cicNotationLexer.ml +++ b/matitaB/components/content_pres/cicNotationLexer.ml @@ -73,7 +73,9 @@ let regexp uri = ("(" number (',' number)* ")")? (* reference spec *) let regexp qstring = '"' [^ '"']* '"' -let regexp hreftag = "" +let regexp hreftag = " level2_meta_token lexbuf - | hreftag -> prerr_endline ("****** HREF : " ^ (Ulexing.utf8_lexeme lexbuf));return lexbuf ("HREF", get_uri lexbuf) - | hrefclose -> return lexbuf ("HREFEND","") + | hreftag -> return lexbuf ("ATAG","") + | hrefclose -> return lexbuf ("ATAGEND","") | ident -> handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "IDENT" | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) @@ -298,57 +299,106 @@ let ligatures_token k = Ulexing.rollback lexbuf; k lexbuf -let rec level2_ast_token status = - lexer - | let_rec -> return lexbuf ("LETREC","") - | let_corec -> return lexbuf ("LETCOREC","") - | we_proved -> return lexbuf ("WEPROVED","") - | we_have -> return lexbuf ("WEHAVE","") - | utf8_blank+ -> ligatures_token (level2_ast_token status) lexbuf - | meta -> - let s = Ulexing.utf8_lexeme lexbuf in - return lexbuf ("META", String.sub s 1 (String.length s - 1)) - | implicit -> return lexbuf ("IMPLICIT", "") - | placeholder -> return lexbuf ("PLACEHOLDER", "") - | hreftag -> prerr_endline ("****** HREF : " ^ (Ulexing.utf8_lexeme lexbuf));return lexbuf ("HREF", get_uri lexbuf) - | hrefclose -> return lexbuf ("HREFEND","") - | ident -> handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT" - | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) - | pident -> handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT" - | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf) - | tex_token -> return lexbuf (expand_macro lexbuf) - | nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf) - | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf) - | qstring -> - return lexbuf ("QSTRING", remove_quotes (Ulexing.utf8_lexeme lexbuf)) - | csymbol -> - return lexbuf ("CSYMBOL", remove_left_quote (Ulexing.utf8_lexeme lexbuf)) - | "${" -> read_unparsed_group "UNPARSED_META" lexbuf - | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf - | '(' -> return lexbuf ("LPAREN", "") - | ')' -> return lexbuf ("RPAREN", "") - | meta_ident -> - return lexbuf ("UNPARSED_META", - remove_left_quote (Ulexing.utf8_lexeme lexbuf)) - | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous") - | beginnote -> - let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in -(* let comment = - Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4) - in - return lexbuf ("NOTE", comment) *) - ligatures_token (level2_ast_token status) lexbuf - | begincomment -> return lexbuf ("BEGINCOMMENT","") - | endcomment -> return lexbuf ("ENDCOMMENT","") - | eof -> return_eoi lexbuf - | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf) +module LocalizeOD = + struct + type t = Stdpp.location + let compare = Pervasives.compare + end + +module LocalizeEnv = Map.Make (LocalizeOD) + +let so_pp = function + | None -> "()" + | Some s -> "*" ^ s +;; + +let update_table loc desc href loctable = + if desc <> None || href <> None + then + (let s,e = HExtlib.loc_of_floc loc in + prerr_endline (Printf.sprintf "*** [%d,%d] \"%s\",\"%s\"" + s e (so_pp href) (so_pp desc)); + LocalizeEnv.add loc (href,desc) loctable) + else loctable +;; -and level1_pattern_token = +let level2_ast_token loctable status = + let rec aux desc href = + lexer + | let_rec -> return lexbuf ("LETREC","") + | let_corec -> return lexbuf ("LETCOREC","") + | we_proved -> return lexbuf ("WEPROVED","") + | we_have -> return lexbuf ("WEHAVE","") + | utf8_blank+ -> ligatures_token (aux desc href) lexbuf + | meta -> + let s = Ulexing.utf8_lexeme lexbuf in + return lexbuf ("META", String.sub s 1 (String.length s - 1)) + | implicit -> return lexbuf ("IMPLICIT", "") + | placeholder -> return lexbuf ("PLACEHOLDER", "") + | hreftag -> aux_in_tag None None lexbuf + | hrefclose -> aux None None lexbuf + | ident -> loctable := + update_table (loc_of_buf lexbuf) desc href !loctable; + handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT" + | variable_ident -> + return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) + | pident -> loctable := + update_table (loc_of_buf lexbuf) desc href !loctable; + handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT" + | number -> loctable := + update_table (loc_of_buf lexbuf) desc href !loctable; + return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf) + | tex_token -> loctable := + update_table (loc_of_buf lexbuf) desc href !loctable; + return lexbuf (expand_macro lexbuf) + | nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf) + | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf) + | qstring -> + return lexbuf ("QSTRING", remove_quotes (Ulexing.utf8_lexeme lexbuf)) + | csymbol -> + return lexbuf ("CSYMBOL", remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | "${" -> read_unparsed_group "UNPARSED_META" lexbuf + | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf + | '(' -> return lexbuf ("LPAREN", "") + | ')' -> return lexbuf ("RPAREN", "") + | meta_ident -> + return lexbuf ("UNPARSED_META", + remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous") + | beginnote -> + let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in + (* let comment = + Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4) + in + return lexbuf ("NOTE", comment) *) + ligatures_token (aux desc href) lexbuf + | begincomment -> return lexbuf ("BEGINCOMMENT","") + | endcomment -> return lexbuf ("ENDCOMMENT","") + | eof -> return_eoi lexbuf + | _ -> loctable := + update_table (loc_of_buf lexbuf) desc href !loctable; + return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf) + + and aux_in_tag desc href = lexer + | utf8_blank+ -> ligatures_token (aux_in_tag desc href) lexbuf + | href -> + aux_in_tag desc + (Some (Ulexing.utf8_sub_lexeme lexbuf 6 (Ulexing.lexeme_length lexbuf - 7))) + lexbuf + | hreftitle -> + aux_in_tag + (Some (Ulexing.utf8_sub_lexeme lexbuf 7 (Ulexing.lexeme_length lexbuf - 8))) + href lexbuf + | ">" -> aux desc href lexbuf + | _ -> aux None None lexbuf + in aux None None + +let rec level1_pattern_token = lexer | utf8_blank+ -> ligatures_token level1_pattern_token lexbuf | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf) - | hreftag -> prerr_endline ("****** HREF : " ^ (Ulexing.utf8_lexeme lexbuf));return lexbuf ("HREF", get_uri lexbuf) - | hrefclose -> return lexbuf ("HREFEND","") + | hreftag -> return lexbuf ("ATAG", "") + | hrefclose -> return lexbuf ("ATAGEND","") | ident ->handle_keywords lexbuf (fun x -> List.mem x level1_keywords) "IDENT" | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) | pident->handle_keywords lexbuf (fun x->List.mem x level1_keywords) "PIDENT" @@ -366,7 +416,8 @@ and level1_pattern_token = | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf) let level1_pattern_token = ligatures_token level1_pattern_token -let level2_ast_token status = ligatures_token (level2_ast_token status) +let level2_ast_token loctable status = + ligatures_token (level2_ast_token loctable status) (* API implementation *) type lexers = { @@ -375,7 +426,7 @@ type lexers = { level2_meta_lexer : (string * string) Token.glexer } -let mk_lexers keywords = +let mk_lexers loctable keywords = let initial_keywords = [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match"; "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ] @@ -385,7 +436,7 @@ let mk_lexers keywords = in { level1_pattern_lexer = mk_lexer level1_pattern_token; - level2_ast_lexer = mk_lexer (level2_ast_token status); + level2_ast_lexer = mk_lexer (level2_ast_token loctable status); level2_meta_lexer = mk_lexer level2_meta_token } ;; diff --git a/matitaB/components/content_pres/cicNotationLexer.mli b/matitaB/components/content_pres/cicNotationLexer.mli index 3bb42f5cb..13f0150a8 100644 --- a/matitaB/components/content_pres/cicNotationLexer.mli +++ b/matitaB/components/content_pres/cicNotationLexer.mli @@ -28,12 +28,18 @@ * error message *) exception Error of int * int * string +module LocalizeEnv : +sig + include Map.S with type key = Stdpp.location +end + type lexers = { - level1_pattern_lexer : (string * string) Token.glexer; - level2_ast_lexer : (string * string) Token.glexer; - level2_meta_lexer : (string * string) Token.glexer + level1_pattern_lexer : (string * string) Token.glexer; + level2_ast_lexer : (string * string) Token.glexer; + level2_meta_lexer : (string * string) Token.glexer } -val mk_lexers : string list -> lexers +val mk_lexers : + (string option * string option) LocalizeEnv.t ref -> string list -> lexers diff --git a/matitaB/components/content_pres/cicNotationParser.ml b/matitaB/components/content_pres/cicNotationParser.ml index 3ccd54141..dc4534c84 100644 --- a/matitaB/components/content_pres/cicNotationParser.ml +++ b/matitaB/components/content_pres/cicNotationParser.ml @@ -42,8 +42,9 @@ type ('a,'b,'c,'d,'e) grammars = { level2_ast_grammar : Grammar.g; term: 'b Grammar.Entry.e; ident: 'e Grammar.Entry.e; + sym_attributes: (string option * string option) Grammar.Entry.e; + sym_table: (string * Stdpp.location Grammar.Entry.e) list; let_defs: 'c Grammar.Entry.e; - located: Stdpp.location Grammar.Entry.e; protected_binder_vars: 'd Grammar.Entry.e; level2_meta: 'b Grammar.Entry.e; } @@ -69,7 +70,8 @@ type db = { Ast.term Ast.capture_variable * Ast.term * int) list, Ast.term list * Ast.term option, Env.ident_or_var) grammars; keywords: string list; - items: (string * Ast.term * (NotationEnv.t -> Ast.location -> Ast.term)) list + items: (string * Ast.term * (NotationEnv.t -> Ast.location -> Ast.term)) list; + loctable: (string option * string option) CicNotationLexer.LocalizeEnv.t ref } let int_of_string s = @@ -85,10 +87,59 @@ let level_of precedence = raise (Level_not_found precedence); string_of_int precedence -let gram_symbol s = - Gramext.srules - [ [ Gramext.Stoken ("SYMBOL",s) ], Gramext.action (fun _ loc -> loc) ] +let add_symbol_to_grammar_explicit level2_ast_grammar + sym_attributes sym_table s = + try + let _ = List.assoc s sym_table + in sym_table + with Not_found -> + let entry = Grammar.Entry.create level2_ast_grammar ("sym" ^ s) in + Grammar.extend + [ Grammar.Entry.obj entry, + None, + [ None, + Some (*Gramext.NonA*) Gramext.NonA, + [ [Gramext.Stoken ("SYMBOL",s)], (* concrete l1 syntax *) + (Gramext.action (fun _ loc -> None, loc)) + ; [Gramext.Stoken ("ATAG","") + ;Gramext.Snterm (Grammar.Entry.obj sym_attributes) + ;Gramext.Stoken ("SYMBOL",">") + ;Gramext.Stoken ("SYMBOL",s) + ;Gramext.Stoken ("ATAGEND","")], + (Gramext.action (fun _ uridesc _ _ _ loc -> (Some uridesc),loc)) + ]]]; + prerr_endline ("adding to grammar symbol " ^ s); + (s,entry)::sym_table + +let add_symbol_to_grammar status s = + let sym_attributes = status#notation_parser_db.grammars.sym_attributes in + let sym_table = status#notation_parser_db.grammars.sym_table in + let level2_ast_grammar = + status#notation_parser_db.grammars.level2_ast_grammar + in + let sym_table = + add_symbol_to_grammar_explicit level2_ast_grammar sym_attributes sym_table s + in + let grammars = + { status#notation_parser_db.grammars with sym_table = sym_table } + in + let notation_parser_db = + { status#notation_parser_db with grammars = grammars } in + status#set_notation_parser_db notation_parser_db + +let gram_symbol status s = + let sym_table = status#notation_parser_db.grammars.sym_table in + let entry = + try List.assoc s sym_table + with Not_found -> + (let syms = List.map fst (status#notation_parser_db.grammars.sym_table) in + let syms = List.map (fun x -> "\"" ^ x ^ "\"") syms in + prerr_endline ("new symbol non-terminals: " ^ (String.concat ", " syms)); + prerr_endline ("unable to find symbol \"" ^ s ^ "\""); assert false) + in + Gramext.Snterm (Grammar.Entry.obj entry) + let gram_ident status = Gramext.Snterm (Grammar.Entry.obj (status#notation_parser_db.grammars.ident : 'a Grammar.Entry.e)) @@ -104,19 +155,26 @@ let gram_term status = function level_of precedence) ;; -let gram_of_literal = +let gram_of_literal status = function - | `Symbol s -> gram_symbol s + | `Symbol s -> gram_symbol status s | `Keyword s -> gram_keyword s | `Number s -> gram_number s -let make_action action bindings = +let make_action status action bindings = let rec aux (vl : NotationEnv.t) = function [] -> Gramext.action (fun (loc: Ast.location) -> action vl loc) - | NoBinding :: tl -> Gramext.action - (fun (loc: Ast.location) -> - aux (("",(Env.NoType,Env.LocValue loc))::vl) tl) + | NoBinding :: tl -> + Gramext.action + (fun (_,(loc: Ast.location)) -> + let uri,desc = + try + CicNotationLexer.LocalizeEnv.find loc + !(status#notation_parser_db.loctable) + with Not_found -> None, None + in aux (("",(Env.NoType, + Env.DisambiguationValue (loc,uri,desc)))::vl) tl) (* LUCA: DEFCON 3 BEGIN *) | Binding (name, Env.TermType l) :: tl -> Gramext.action @@ -155,6 +213,53 @@ let flatten_opt = in aux [] +(* given a level 1 pattern, adds productions for symbols when needed *) +let update_sym_grammar status pattern = + let rec aux status = function + | Ast.AttributedTerm (_, t) -> aux status t + | Ast.Literal l -> aux_literal status l + | Ast.Layout l -> aux_layout status l + | Ast.Magic m -> aux_magic status m + | Ast.Variable v -> aux_variable status v + | t -> + prerr_endline (NotationPp.pp_term status t); + assert false + and aux_literal status = + function + | `Symbol s -> add_symbol_to_grammar status s + | `Keyword s -> status + | `Number s -> status + and aux_layout status = function + | Ast.Sub (p1, p2) -> aux (aux status p1) p2 + | Ast.Sup (p1, p2) -> aux (aux status p1) p2 + | Ast.Below (p1, p2) -> aux (aux status p1) p2 + | Ast.Above (p1, p2) -> aux (aux status p1) p2 + | Ast.Frac (p1, p2) -> aux (aux status p1) p2 + | Ast.InfRule (p1, p2, p3) -> aux (aux (aux status p1) p2) p3 + | Ast.Atop (p1, p2) -> aux (aux status p1) p2 + | Ast.Over (p1, p2) -> aux (aux status p1) p2 + | Ast.Root (p1, p2) -> aux (aux status p1) p2 + | Ast.Sqrt p -> aux status p + | Ast.Break -> status + | Ast.Box (_, pl) -> List.fold_left aux status pl + | Ast.Group pl -> List.fold_left aux status pl + | Ast.Mstyle (_,pl) -> List.fold_left aux status pl + | Ast.Mpadded (_,pl) -> List.fold_left aux status pl + | Ast.Maction l -> List.fold_left aux status l + and aux_magic status magic = + match magic with + | Ast.Opt p -> aux status p + | Ast.List0 (p, s) + | Ast.List1 (p, s) -> + let status = + match s with None -> status | Some s' -> aux_literal status s' + in + aux status p + | _ -> assert false + and aux_variable status _ = status + in + aux status pattern + (* given a level 1 pattern computes the new RHS of "term" grammar entry *) let extract_term_production status pattern = let rec aux = function @@ -168,24 +273,24 @@ let extract_term_production status pattern = assert false and aux_literal = function - | `Symbol s -> [NoBinding, gram_symbol s] + | `Symbol s -> [NoBinding, gram_symbol status s] | `Keyword s -> (* assumption: s will be registered as a keyword with the lexer *) [NoBinding, gram_keyword s] | `Number s -> [NoBinding, gram_number s] and aux_layout = function - | Ast.Sub (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\sub "] @ aux p2 - | Ast.Sup (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\sup "] @ aux p2 - | Ast.Below (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\below "] @ aux p2 - | Ast.Above (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\above "] @ aux p2 - | Ast.Frac (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\frac "] @ aux p2 - | Ast.InfRule (p1, p2, p3) -> [NoBinding, gram_symbol "\\infrule "] @ aux p1 @ aux p2 @ aux p3 - | Ast.Atop (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\atop "] @ aux p2 - | Ast.Over (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\over "] @ aux p2 + | Ast.Sub (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\sub "] @ aux p2 + | Ast.Sup (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\sup "] @ aux p2 + | Ast.Below (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\below "] @ aux p2 + | Ast.Above (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\above "] @ aux p2 + | Ast.Frac (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\frac "] @ aux p2 + | Ast.InfRule (p1, p2, p3) -> [NoBinding, gram_symbol status "\\infrule "] @ aux p1 @ aux p2 @ aux p3 + | Ast.Atop (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\atop "] @ aux p2 + | Ast.Over (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\over "] @ aux p2 | Ast.Root (p1, p2) -> - [NoBinding, gram_symbol "\\root "] @ aux p2 - @ [NoBinding, gram_symbol "\\of "] @ aux p1 - | Ast.Sqrt p -> [NoBinding, gram_symbol "\\sqrt "] @ aux p + [NoBinding, gram_symbol status "\\root "] @ aux p2 + @ [NoBinding, gram_symbol status "\\of "] @ aux p1 + | Ast.Sqrt p -> [NoBinding, gram_symbol status "\\sqrt "] @ aux p | Ast.Break -> [] | Ast.Box (_, pl) -> List.flatten (List.map aux pl) | Ast.Group pl -> List.flatten (List.map aux pl) @@ -215,8 +320,10 @@ let extract_term_production status pattern = match magic with | Ast.List0 (_, None) -> Gramext.Slist0 s | Ast.List1 (_, None) -> Gramext.Slist1 s - | Ast.List0 (_, Some l) -> Gramext.Slist0sep (s, gram_of_literal l) - | Ast.List1 (_, Some l) -> Gramext.Slist1sep (s, gram_of_literal l) + | Ast.List0 (_, Some l) -> + Gramext.Slist0sep (s, gram_of_literal status l) + | Ast.List1 (_, Some l) -> + Gramext.Slist1sep (s, gram_of_literal status l) | _ -> assert false in [ Env (List.map Env.list_declaration p_names), @@ -236,8 +343,8 @@ let extract_term_production status pattern = let p_bindings, p_atoms = List.split (aux p) in let p_names = flatten_opt p_bindings in let action = - make_action (fun (env : NotationEnv.t) (loc : Ast.location) -> env) - p_bindings + make_action status + (fun (env : NotationEnv.t) (loc : Ast.location) -> env) p_bindings in p_bindings, p_atoms, p_names, action in @@ -376,14 +483,15 @@ let parse_level1_pattern grammars precedence lexbuf = let parse_level2_ast grammars lexbuf = exc_located_wrapper - (fun () -> Grammar.Entry.parse grammars.level2_ast (Obj.magic lexbuf)) + (fun () -> + Grammar.Entry.parse grammars.level2_ast (Obj.magic lexbuf)) let parse_level2_meta grammars lexbuf = exc_located_wrapper (fun () -> Grammar.Entry.parse grammars.level2_meta (Obj.magic lexbuf)) (* create empty precedence level for "term" *) -let initialize_grammars grammars = +let initialize_grammars loctable grammars = let dummy_action = Gramext.action (fun _ -> failwith "internal error, lexer generated a dummy token") @@ -562,11 +670,12 @@ END begin let level2_ast = grammars.level2_ast in let term = grammars.term in + let atag_attributes = grammars.sym_attributes in let let_defs = grammars.let_defs in let ident = grammars.ident in let protected_binder_vars = grammars.protected_binder_vars in EXTEND - GLOBAL: level2_ast term let_defs protected_binder_vars ident; + GLOBAL: level2_ast term let_defs protected_binder_vars ident atag_attributes; level2_ast: [ [ p = term -> p ] ]; sort: [ [ "Prop" -> `Prop @@ -609,13 +718,16 @@ EXTEND | SYMBOL <:unicode> (* λ *) -> `Lambda ] ]; - tident: [ - [ uri = HREF; - id = IDENT; - HREFEND -> (id, `Uri uri) ]]; gident: [ - [ fullident = tident -> fullident; - | id = IDENT -> (id, `Ambiguous) ]]; + [ id = IDENT -> + try + let uri,_ = CicNotationLexer.LocalizeEnv.find loc + !loctable in + match uri with + | Some u -> id, `Uri u + | None -> id, `Ambiguous + with + | Not_found -> id, `Ambiguous ]]; arg: [ [ LPAREN; names = LIST1 gident SEP SYMBOL ","; SYMBOL ":"; ty = term; RPAREN -> @@ -653,9 +765,6 @@ EXTEND | _ -> failwith ("Invalid index name: " ^ blob) ] ]; - located: [ - [ s = SYMBOL -> loc ] - ]; let_defs: [ [ defs = LIST1 [ name = single_arg; @@ -789,8 +898,8 @@ END grammars ;; -let initial_grammars keywords = - let lexers = CicNotationLexer.mk_lexers keywords in +let initial_grammars loctable keywords = + let lexers = CicNotationLexer.mk_lexers loctable keywords in let level1_pattern_grammar = Grammar.gcreate lexers.CicNotationLexer.level1_pattern_lexer in let level2_ast_grammar = @@ -802,17 +911,30 @@ let initial_grammars keywords = let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast" in let term = Grammar.Entry.create level2_ast_grammar "term" in let ident = Grammar.Entry.create level2_ast_grammar "ident" in - let located = Grammar.Entry.create level2_ast_grammar "located" in + (* unexpanded TeX macros terminated by a space (see comment in + * CicNotationLexer) *) + let initial_symbols = + ["\\sub ";"\\sup ";"\\below ";"\\above ";"\\frac " + ;"\\infrule ";"\\atop ";"\\over ";"\\root ";"\\of ";"\\sqrt "] in + let sym_attributes = + Grammar.Entry.create level2_ast_grammar "atag_attributes" in + let sym_table = + List.fold_left + (add_symbol_to_grammar_explicit level2_ast_grammar sym_attributes) + [] initial_symbols + in let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs" in let protected_binder_vars = Grammar.Entry.create level2_ast_grammar "protected_binder_vars" in let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta" in - initialize_grammars { level1_pattern=level1_pattern; + initialize_grammars loctable + { level1_pattern=level1_pattern; level2_ast=level2_ast; term=term; ident=ident; + sym_table=sym_table; + sym_attributes=sym_attributes; let_defs=let_defs; - located=located; protected_binder_vars=protected_binder_vars; level2_meta=level2_meta; level2_ast_grammar=level2_ast_grammar; @@ -826,12 +948,17 @@ class type g_status = class status0 ~keywords:kwds = object - val db = { grammars = initial_grammars kwds; keywords = kwds; items = [] } + val db = + let lt = ref CicNotationLexer.LocalizeEnv.empty in + { grammars = initial_grammars lt kwds; keywords = kwds; + items = []; loctable = lt } method notation_parser_db = db method set_notation_parser_db v = {< db = v >} method set_notation_parser_status : 'status. #g_status as 'status -> 'self = fun o -> {< db = o#notation_parser_db >} + method reset_loctable () = + db.loctable := CicNotationLexer.LocalizeEnv.empty end class virtual status ~keywords:kwds = @@ -843,8 +970,10 @@ class virtual status ~keywords:kwds = let extend (status : #status) (CL1P (level1_pattern,precedence)) action = (* move inside constructor XXX *) let add1item status (level, level1_pattern, action) = + let status = update_sym_grammar status level1_pattern in let p_bindings, p_atoms = - List.split (extract_term_production status level1_pattern) in + List.split (extract_term_production status level1_pattern) + in Grammar.extend [ Grammar.Entry.obj (status#notation_parser_db.grammars.term : 'a Grammar.Entry.e), @@ -852,7 +981,7 @@ let extend (status : #status) (CL1P (level1_pattern,precedence)) action = [ None, Some (*Gramext.NonA*) Gramext.NonA, [ p_atoms, (* concrete l1 syntax *) - (make_action + (make_action status (fun (env: NotationEnv.t) (loc: Ast.location) -> (action env loc)) p_bindings) ]]]; diff --git a/matitaB/components/content_pres/cicNotationParser.mli b/matitaB/components/content_pres/cicNotationParser.mli index 1ec1fd59c..f5daf20b0 100644 --- a/matitaB/components/content_pres/cicNotationParser.mli +++ b/matitaB/components/content_pres/cicNotationParser.mli @@ -39,6 +39,7 @@ class virtual status: keywords:string list -> inherit g_status method set_notation_parser_db: db -> 'self method set_notation_parser_status: 'status. #g_status as 'status -> 'self + method reset_loctable: unit -> unit end type checked_l1_pattern = private CL1P of NotationPt.term * int diff --git a/matitaB/components/content_pres/interpTable.ml b/matitaB/components/content_pres/interpTable.ml new file mode 100644 index 000000000..8ec85c1ee --- /dev/null +++ b/matitaB/components/content_pres/interpTable.ml @@ -0,0 +1,84 @@ +(* Copyright (C) 2011, HELM Team. + * + * This file is part of HELM, an Hypertextual, Electronic + * Library of Mathematics, developed at the Computer Science + * Department, University of Bologna, Italy. + * + * HELM is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * HELM is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with HELM; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, + * MA 02111-1307, USA. + * + * For details, see the HELM World-Wide-Web page, + * http://helm.cs.unibo.it/ + *) + +module Domain = + struct + type t = Stdpp.location * string (* location, name *) + let compare = Pervasives.compare + end + +module Table = Map.Make (Domain) + +module Ast = NotationPt + +let populate = + let rec aux lastloc acc = function + | Ast.AttributedTerm (`Loc l,u) -> aux l acc u + | Ast.Appl tl -> + List.fold_left (aux lastloc) acc tl + | Ast.Binder (k,ty,body) -> + let acc' = aux_var lastloc acc ty in + aux lastloc acc' body + | Ast.Case (t,ity,oty,pl) -> + (* FIXME: ity contains interpretation info + * but we don't have the location for it + * this must be rethought + *) + let acc = aux lastloc acc t in + let acc = match oty with + | None -> acc + | Some oty' -> aux lastloc acc oty' + in + List.fold_left (aux_pattern lastloc) acc pl + | Ast.Cast (u,v) -> + aux lastloc (aux lastloc acc v) u + | Ast.LetIn (ty,u,v) -> + let acc = aux_var lastloc acc ty in + let acc = aux lastloc acc u in + aux lastloc acc v + | Ast.LetRec (ik,l,t) -> + let acc = aux lastloc acc t in + List.fold_left (aux_letrec lastloc) acc l + | Ast.Ident (id,`Uri u) -> Table.add (lastloc,id) u acc + | _ -> acc + and aux_pattern lastloc acc = function + | Ast.Pattern (_,_,tys),t -> + (* FIXME: patterns may contain disambiguation info + * for constructors *) + let acc = aux lastloc acc t in + List.fold_left (aux_var lastloc) acc tys + | _,t -> aux lastloc acc t + and aux_letrec lastloc acc (args,ty,body,_) = + let acc = aux lastloc acc body in + let acc = List.fold_left (aux_var lastloc) acc args + in + aux_var lastloc acc ty + and aux_var lastloc acc (_,ty) = + match ty with + | None -> acc + | Some ty' -> aux lastloc acc ty' + + in aux Stdpp.dummy_loc (Table.empty) +;; diff --git a/matitaB/components/content_pres/interpTable.mli b/matitaB/components/content_pres/interpTable.mli new file mode 100644 index 000000000..7fa2f1b11 --- /dev/null +++ b/matitaB/components/content_pres/interpTable.mli @@ -0,0 +1,6 @@ +module Table : +sig + include Map.S with type key = (Stdpp.location * string) +end + +val populate : NotationPt.term -> string Table.t diff --git a/matitaB/components/content_pres/matitaScriptLexer.ml b/matitaB/components/content_pres/matitaScriptLexer.ml new file mode 100644 index 000000000..e1ab73b53 --- /dev/null +++ b/matitaB/components/content_pres/matitaScriptLexer.ml @@ -0,0 +1,354 @@ +(* Copyright (C) 2005, HELM Team. + * + * This file is part of HELM, an Hypertextual, Electronic + * Library of Mathematics, developed at the Computer Science + * Department, University of Bologna, Italy. + * + * HELM is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * HELM is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with HELM; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, + * MA 02111-1307, USA. + * + * For details, see the HELM World-Wide-Web page, + * http://helm.cs.unibo.it/ + *) + +(* $Id: cicNotationLexer.ml 11231 2011-03-30 11:52:27Z ricciott $ *) + +open Printf + +exception Error of int * int * string + +module StringSet = Set.Make(String) + +(* Lexer *) +let regexp number = xml_digit+ +let regexp utf8_blank = " " | "\r\n" | "\n" | "\t" | [160] (* this is a nbsp *) +let regexp percentage = + ('-' | "") [ '0' - '9' ] + '%' +let regexp floatwithunit = + ('-' | "") [ '0' - '9' ] + ["."] [ '0' - '9' ] + ([ 'a' - 'z' ] + | "" ) +let regexp color = "#" [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' +'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ] +[ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ] + + (* ZACK: breaks unicode's binder followed by an ascii letter without blank *) +(* let regexp ident_letter = xml_letter *) + +let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ] + + (* must be in sync with "is_ligature_char" below *) +let regexp ligature_char = [ "'`~!?@*()[]<>-+=|:;.,/\"" ] +let regexp ligature = ligature_char ligature_char+ + +let regexp we_proved = "we" utf8_blank+ "proved" +let regexp we_have = "we" utf8_blank+ "have" +let regexp let_rec = "let" utf8_blank+ "rec" +let regexp let_corec = "let" utf8_blank+ "corec" +let regexp ident_decoration = '\'' | '?' | '`' +let regexp ident_cont = ident_letter | xml_digit | '_' +let regexp ident_start = ident_letter +let regexp ident = ident_letter ident_cont* ident_decoration* +let regexp variable_ident = '_' '_' number +let regexp pident = '_' ident + +let regexp uri_step = [ 'a' - 'z' 'A' - 'Z' '0' - '9' '_' '-' ''' ]+ + +let regexp uri = + ("cic:/" | "theory:/") (* schema *) +(* ident ('/' ident)* |+ path +| *) + uri_step ('/' uri_step)* (* path *) + ('.' ident)+ (* ext *) +(* ("#xpointer(" number ('/' number)+ ")")? (* xpointer *) *) + ("(" number (',' number)* ")")? (* reference spec *) + +let regexp qstring = '"' [^ '"']* '"' +let regexp hreftag = " Hashtbl.add ligatures ligature symbol) + [ ("->", <:unicode>); ("=>", <:unicode>); + (":=", <:unicode>); + ] + +let regexp nreference = + "cic:/" (* schema *) + uri_step ('/' uri_step)* (* path *) + '.' + ( "dec" + | "def" "(" number ")" + | "fix" "(" number "," number "," number ")" + | "cfx" "(" number ")" + | "ind" "(" number "," number "," number ")" + | "con" "(" number "," number "," number ")") (* ext + reference *) + +let error lexbuf msg = + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + raise (Error (begin_cnum, end_cnum, msg)) +let error_at_end lexbuf msg = + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + raise (Error (begin_cnum, end_cnum, msg)) + +let loc_of_buf lexbuf = + HExtlib.floc_of_loc (Ulexing.loc lexbuf) + +let return_with_loc token begin_cnum end_cnum = + let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in + token, flocation + +let return lexbuf token = + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + return_with_loc token begin_cnum end_cnum + +let return_lexeme lexbuf name = return lexbuf (name, Ulexing.utf8_lexeme lexbuf) + +let return_symbol lexbuf s = return lexbuf ("SYMBOL", s) +let return_eoi lexbuf = return lexbuf ("EOI", "") + +let remove_quotes s = String.sub s 1 (String.length s - 2) + +let mk_lexer token = + let tok_func stream = +(* let lexbuf = Ulexing.from_utf8_stream stream in *) +(** XXX Obj.magic rationale. + * The problem. + * camlp5 constraints the tok_func field of Token.glexer to have type: + * Stream.t char -> (Stream.t 'te * flocation_function) + * In order to use ulex we have (in theory) to instantiate a new lexbuf each + * time a char Stream.t is passed, destroying the previous lexbuf which may + * have consumed a character from the old stream which is lost forever :-( + * The "solution". + * Instead of passing to camlp5 a char Stream.t we pass a lexbuf, casting it to + * char Stream.t with Obj.magic where needed. + *) + let lexbuf = Obj.magic stream in + Token.make_stream_and_location + (fun () -> + try + token lexbuf + with + | Ulexing.Error -> error_at_end lexbuf "Unexpected character" + | Ulexing.InvalidCodepoint p -> + error_at_end lexbuf (sprintf "Invalid code point: %d" p)) + in + { + Token.tok_func = tok_func; + Token.tok_using = (fun _ -> ()); + Token.tok_removing = (fun _ -> ()); + Token.tok_match = Token.default_match; + Token.tok_text = Token.lexer_text; + Token.tok_comm = None; + } + +let expand_macro lexbuf = + let macro = + Ulexing.utf8_sub_lexeme lexbuf 1 (Ulexing.lexeme_length lexbuf - 1) + in + try + ("SYMBOL", Utf8Macro.expand macro) + with Utf8Macro.Macro_not_found _ -> +(* FG: unexpanded TeX macros are terminated by a space for rendering *) + "SYMBOL", (Ulexing.utf8_lexeme lexbuf ^ " ") + +let remove_quotes s = String.sub s 1 (String.length s - 2) +let remove_left_quote s = String.sub s 1 (String.length s - 1) + +let rec level2_pattern_token_group counter buffer = + lexer + | end_group -> + if (counter > 0) then + Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ; + snd (Ulexing.loc lexbuf) + | begin_group -> + Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ; + ignore (level2_pattern_token_group (counter + 1) buffer lexbuf) ; + level2_pattern_token_group counter buffer lexbuf + | _ -> + Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ; + level2_pattern_token_group counter buffer lexbuf + +let read_unparsed_group token_name lexbuf = + let buffer = Buffer.create 16 in + let begin_cnum, _ = Ulexing.loc lexbuf in + let end_cnum = level2_pattern_token_group 0 buffer lexbuf in + return_with_loc (token_name, Buffer.contents buffer) begin_cnum end_cnum + +let handle_keywords lexbuf k name = + let s = Ulexing.utf8_lexeme lexbuf in + if k s then + return lexbuf ("", s) + else + return lexbuf (name, s) +;; + +let rec level2_meta_token = + lexer + | utf8_blank+ -> level2_meta_token lexbuf + | hreftag -> return lexbuf ("ATAG","") + | hrefclose -> return lexbuf ("ATAGEND","") + | ident -> + handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "IDENT" + | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) + | pident -> + handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "PIDENT" + | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf + | ast_ident -> + return lexbuf ("UNPARSED_AST", + remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | ast_csymbol -> + return lexbuf ("UNPARSED_AST", + remove_left_quote (Ulexing.utf8_lexeme lexbuf)) + | eof -> return_eoi lexbuf + +let rec comment_token acc depth = + lexer + | beginnote -> + let acc = acc ^ Ulexing.utf8_lexeme lexbuf in + comment_token acc (depth + 1) lexbuf + | endcomment -> + let acc = acc ^ Ulexing.utf8_lexeme lexbuf in + if depth = 0 + then acc + else comment_token acc (depth - 1) lexbuf + | _ -> + let acc = acc ^ Ulexing.utf8_lexeme lexbuf in + comment_token acc depth lexbuf + + (** @param k continuation to be invoked when no ligature has been found *) +let ligatures_token k = + lexer + | ligature -> + let lexeme = Ulexing.utf8_lexeme lexbuf in + (match List.rev (Hashtbl.find_all ligatures lexeme) with + | [] -> (* ligature not found, rollback and try default lexer *) + Ulexing.rollback lexbuf; + k lexbuf + | default_lig :: _ -> (* ligatures found, use the default one *) + return_symbol lexbuf default_lig) + | eof -> return_eoi lexbuf + | _ -> (* not a ligature, rollback and try default lexer *) + Ulexing.rollback lexbuf; + k lexbuf + +module LocalizeOD = + struct + type t = Stdpp.location + let compare = Pervasives.compare + end + +let update_table loc desc href loctable = + if desc <> None || href <> None + then + (let s,e = HExtlib.loc_of_floc loc in + prerr_endline (Printf.sprintf "*** [%d,%d] \"%s\",\"%s\"" + s e (so_pp href) (so_pp desc)); + LocalizeEnv.add loc (href,desc) loctable) + else loctable +;; + +let get_hot_spots = + let rec aux loc1 desc href = + lexer + | hreftag -> aux_in_tag (Ulexing.loc lexbuf) None None lexbuf + | hrefclose -> + try + let loc1 = HExtlib.floc_of_loc (HExtlib.unopt loc1) in + let loc2 = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + (loc1,loc2,href,desc) :: aux None None None lexbuf + with Failure _ -> aux None None None lexbuf + | beginnote -> (* FIXME commenti come in smallLexer *) + let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in + (* let comment = + Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4) + in + return lexbuf ("NOTE", comment) *) + ligatures_token (aux desc href) lexbuf + | begincomment -> return lexbuf ("BEGINCOMMENT","") + | endcomment -> return lexbuf ("ENDCOMMENT","") + | eof -> [] + | _ -> aux loc1 desc href lexbuf + and aux_in_tag loc1 desc href = lexer + | utf8_blank+ -> aux_in_tag loc1 desc href lexbuf + | href -> + aux_in_tag loc1 desc + (Some (Ulexing.utf8_sub_lexeme lexbuf 6 (Ulexing.lexeme_length lexbuf - 7))) + lexbuf + | hreftitle -> + aux_in_tag loc1 + (Some (Ulexing.utf8_sub_lexeme lexbuf 7 (Ulexing.lexeme_length lexbuf - 8))) + href lexbuf + | ">" -> + let merge (a,b) (c,d) = (a,d) in + aux (Some (merge loc1 (Ulexing.loc lexbuf))) desc href lexbuf + | _ -> aux None None None lexbuf + in aux None None None + diff --git a/matitaB/components/content_pres/smallLexer.ml b/matitaB/components/content_pres/smallLexer.ml new file mode 100644 index 000000000..7e850dfe7 --- /dev/null +++ b/matitaB/components/content_pres/smallLexer.ml @@ -0,0 +1,269 @@ +(* Copyright (C) 2005, HELM Team. + * + * This file is part of HELM, an Hypertextual, Electronic + * Library of Mathematics, developed at the Computer Science + * Department, University of Bologna, Italy. + * + * HELM is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * HELM is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with HELM; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, + * MA 02111-1307, USA. + * + * For details, see the HELM World-Wide-Web page, + * http://helm.cs.unibo.it/ + *) + +(* $Id: cicNotationLexer.ml 11028 2010-11-02 17:08:43Z tassi $ *) + +open Printf + +exception Error of int * int * string + +module StringSet = Set.Make(String) + + + +(* Lexer *) +let regexp number = xml_digit+ + +let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ] + + (* must be in sync with "is_ligature_char" below *) +let regexp ligature_char = [ "'`~!?@*()[]<>-+=|:;.,/\"" ] +let regexp ligature = ligature_char ligature_char+ + +let regexp ident_decoration = '\'' | '?' | '`' +let regexp ident_cont = ident_letter | xml_digit | '_' +let regexp ident_start = ident_letter +let regexp ident = ident_letter ident_cont* ident_decoration* +let regexp variable_ident = '_' '_' number +let regexp pident = '_' ident + +let regexp tex_token = '\\' ident + +let regexp csymbol = '\'' ident + +let regexp begincom = "(*" +let regexp endcom = "*)" +let regexp qstring = '"' [^ '"']* '"' + + + + + (* (string, int) Hashtbl.t, with multiple bindings. + * int is the unicode codepoint *) +let ligatures = Hashtbl.create 23 + +let _ = + List.iter + (fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol) + [ ("->", <:unicode>); ("=>", <:unicode>); + (":=", <:unicode>); + ] + +let error lexbuf msg = + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + raise (Error (begin_cnum, end_cnum, msg)) +let error_at_end lexbuf msg = + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + raise (Error (begin_cnum, end_cnum, msg)) + + +(* +let return_symbol lexbuf s = return lexbuf ("SYMBOL", s) +let return_eoi lexbuf = return lexbuf ("EOI", "") +*) + +let remove_quotes s = String.sub s 1 (String.length s - 2) + +let mk_lexer token = + let tok_func stream = +(* let lexbuf = Ulexing.from_utf8_stream stream in *) +(** XXX Obj.magic rationale. + * The problem. + * camlp5 constraints the tok_func field of Token.glexer to have type: + * Stream.t char -> (Stream.t 'te * flocation_function) + * In order to use ulex we have (in theory) to instantiate a new lexbuf each + * time a char Stream.t is passed, destroying the previous lexbuf which may + * have consumed a character from the old stream which is lost forever :-( + * The "solution". + * Instead of passing to camlp5 a char Stream.t we pass a lexbuf, casting it to + * char Stream.t with Obj.magic where needed. + *) + let lexbuf = Obj.magic stream in + Token.make_stream_and_location + (fun () -> + try + token lexbuf + with + | Ulexing.Error -> error_at_end lexbuf "Unexpected character" + | Ulexing.InvalidCodepoint p -> + error_at_end lexbuf (sprintf "Invalid code point: %d" p)) + in + { + Token.tok_func = tok_func; + Token.tok_using = (fun _ -> ()); + Token.tok_removing = (fun _ -> ()); + Token.tok_match = (* Token.default_match *) (fun (a,b) () -> a); + Token.tok_text = Token.lexer_text; + Token.tok_comm = None; + } + +let expand_macro lexbuf = + let macro = + Ulexing.utf8_sub_lexeme lexbuf 1 (Ulexing.lexeme_length lexbuf - 1) + in + try + ("SYMBOL" ^ (Utf8Macro.expand macro)) + with Utf8Macro.Macro_not_found _ -> +(* FG: unexpanded TeX macros are terminated by a space for rendering *) + "SYMBOL" ^ (Ulexing.utf8_lexeme lexbuf ^ " ") + +let remove_quotes s = String.sub s 1 (String.length s - 2) +let remove_left_quote s = String.sub s 1 (String.length s - 1) + +let print = Printf.fprintf + +let handle_full_id ch id uri = + print ch "%s" uri id + +let handle_full_sym ch s uri desc = + let uri = try HExtlib.unopt uri + with Failure _ -> "cic:/fakeuri.def(1)" + in print ch "%s" desc uri s + +let handle_full_num ch num uri desc = + let uri = try HExtlib.unopt uri + with Failure _ -> "cic:/fakeuri.def(1)" + in print ch "%s" desc uri num + +let handle_tex ch s = + (* print "TEX:%s" (remove_left_quote (Ulexing.utf8_lexeme lexbuf)) *) + print ch "%s" s + +let return_with_loc token begin_cnum end_cnum = + let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in + token, flocation + +let return lexbuf token = + let begin_cnum, end_cnum = Ulexing.loc lexbuf in + return_with_loc token begin_cnum end_cnum + + (** @param k continuation to be invoked when no ligature has been found *) +let ligatures_token k = + lexer +(* | ligature -> + let lexeme = Ulexing.utf8_lexeme lexbuf in + (match List.rev (Hashtbl.find_all ligatures lexeme) with + | [] -> (* ligature not found, rollback and try default lexer *) + Ulexing.rollback lexbuf; + k lexbuf + | default_lig :: _ -> (* ligatures found, use the default one *) + return_symbol lexbuf default_lig) *) + | eof -> return lexbuf () + | _ -> (* not a ligature, rollback and try default lexer *) + Ulexing.rollback lexbuf; + k lexbuf + +let handle_interpr loc literal interp outch = + try + (match DisambiguateTypes.InterprEnv.find loc interp with + | GrafiteAst.Ident_alias (_,uri) -> + handle_full_id outch literal uri + | GrafiteAst.Symbol_alias (_,uri,desc) -> + handle_full_sym outch literal uri desc + | GrafiteAst.Number_alias (uri,desc) -> + handle_full_num outch literal uri desc) + with Not_found -> print outch "%s" literal +;; + +let level2_ast_token interp outch = + let rec aux nesting = + lexer + | begincom -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf); + aux (nesting+1) lexbuf) + | endcom -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf); + aux (max 0 (nesting-1)) lexbuf) + | eof -> return lexbuf () + | _ -> if nesting > 0 + then (print outch "%s" (Ulexing.utf8_lexeme lexbuf); + aux nesting lexbuf) + else (Ulexing.rollback lexbuf; + basic_lexer lexbuf) + and basic_lexer = + lexer + | ident -> let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + let id = Ulexing.utf8_lexeme lexbuf in + handle_interpr loc id interp outch; + aux 0 lexbuf + | variable_ident -> + let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + let id = Ulexing.utf8_lexeme lexbuf in + handle_interpr loc id interp outch; + aux 0 lexbuf + | pident -> let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + let id = Ulexing.utf8_lexeme lexbuf in + handle_interpr loc id interp outch; + aux 0 lexbuf + | number -> + let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + let num = Ulexing.utf8_lexeme lexbuf in + handle_interpr loc num interp outch; + aux 0 lexbuf + | tex_token -> + let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + let s = Ulexing.utf8_lexeme lexbuf in + handle_interpr loc s interp outch; + aux 0 lexbuf + | qstring -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf); aux 0 lexbuf) + | csymbol -> + let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + let s = Ulexing.utf8_lexeme lexbuf in + handle_interpr loc s interp outch; + aux 0 lexbuf + | eof -> return lexbuf () + | _ -> + let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in + let s = Ulexing.utf8_lexeme lexbuf in + handle_interpr loc s interp outch; + aux 0 lexbuf + in aux 0 + + +(* API implementation *) + +(*let mk_small_lexer interpr keywords = + let initial_keywords = + [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match"; + "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done"; + "rec"; "corec" ] + in + (*let status = + List.fold_right StringSet.add (initial_keywords @ keywords) StringSet.empty + in*) + mk_lexer (level2_ast_token outch interpr) +;;*) + +let mk_small_printer interpr outch = + let initial_keywords = + [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match"; + "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done"; + "rec"; "corec" ] + in + (* let status = + List.fold_right StringSet.add initial_keywords StringSet.empty + in *) + level2_ast_token interpr outch +;; + +(* let mk_small_lexer interpr = mk_small_lexer interpr [];;*) diff --git a/matitaB/components/content_pres/smallLexer.mli b/matitaB/components/content_pres/smallLexer.mli new file mode 100644 index 000000000..c4f4565bc --- /dev/null +++ b/matitaB/components/content_pres/smallLexer.mli @@ -0,0 +1,5 @@ +(* val mk_small_lexer : GrafiteAst.alias_spec DisambiguateTypes.InterprEnv.t -> + * unit Token.glexer*) +val mk_small_printer : + GrafiteAst.alias_spec DisambiguateTypes.InterprEnv.t -> + out_channel -> Ulexing.lexbuf -> unit * Stdpp.location diff --git a/matitaB/components/content_pres/termContentPres.ml b/matitaB/components/content_pres/termContentPres.ml index 53f0265b7..27b55ebc1 100644 --- a/matitaB/components/content_pres/termContentPres.ml +++ b/matitaB/components/content_pres/termContentPres.ml @@ -467,7 +467,7 @@ let rec pp_ast1 status term = NotationEnv.OptValue (Some (pp_value v)) | NotationEnv.ListValue vl -> NotationEnv.ListValue (List.map pp_value vl) - | NotationEnv.LocValue _ as v -> v + | NotationEnv.DisambiguationValue _ as v -> v in let ast_env_of_env env = List.map (fun (var, (ty, value)) -> (var, (ty, pp_value value))) env @@ -575,7 +575,7 @@ let tail_names names env = in aux [] env -let instantiate_level2 status env loc term = +let instantiate_level2 status env (loc,uri,desc) term = (* prerr_endline ("istanzio: " ^ NotationPp.pp_term term); *) let fresh_env = ref [] in let lookup_fresh_name n = @@ -607,7 +607,7 @@ let instantiate_level2 status env loc term = | Ast.Num _ | Ast.Sort _ | Ast.UserInput -> term - | Ast.Symbol _ -> Ast.AttributedTerm (`Loc loc, term) + | Ast.Symbol (s,_) -> aux_symbol s loc (uri,desc) | Ast.Magic magic -> aux_magic env magic | Ast.Variable var -> aux_variable env var @@ -615,6 +615,10 @@ let instantiate_level2 status env loc term = | Ast.Cast (t, ty) -> Ast.Cast (aux env t, aux env ty) | _ -> assert false + and aux_symbol s loc = function + | _, None -> Ast.AttributedTerm (`Loc loc, Ast.Symbol (s, None)) + | uri, Some desc -> + Ast.AttributedTerm (`Loc loc, Ast.Symbol (s, Some (uri,desc))) and aux_opt env = function | Some term -> Some (aux env term) | None -> None diff --git a/matitaB/components/content_pres/termContentPres.mli b/matitaB/components/content_pres/termContentPres.mli index cd7d1ee3f..4d0bb909b 100644 --- a/matitaB/components/content_pres/termContentPres.mli +++ b/matitaB/components/content_pres/termContentPres.mli @@ -54,4 +54,6 @@ val pp_ast: #status -> NotationPt.term -> NotationPt.term (** fills a term pattern instantiating variable magics *) val instantiate_level2: - #NCic.status -> NotationEnv.t -> Stdpp.location -> NotationPt.term -> NotationPt.term + #NCic.status -> NotationEnv.t -> + Stdpp.location * string option * string option -> + NotationPt.term -> NotationPt.term diff --git a/matitaB/components/grafite_engine/grafiteEngine.ml b/matitaB/components/grafite_engine/grafiteEngine.ml index d2f12dc9d..26ba7ffd0 100644 --- a/matitaB/components/grafite_engine/grafiteEngine.ml +++ b/matitaB/components/grafite_engine/grafiteEngine.ml @@ -134,13 +134,12 @@ let eval_alias status data= let basic_eval_input_notation (l1,l2) status = GrafiteParser.extend status l1 (fun env loc -> - let rec inner_loc default = function - | [] -> default - | (_,(NotationEnv.NoType,NotationEnv.LocValue l))::_ -> l - | _::tl -> inner_loc default tl + let rec get_disamb = function + | [] -> Stdpp.dummy_loc,None,None + | (_,(NotationEnv.NoType,NotationEnv.DisambiguationValue dv))::_ -> dv + | _::tl -> get_disamb tl in - let inner_loc = inner_loc loc env in - let l2' = TermContentPres.instantiate_level2 status env inner_loc l2 in + let l2' = TermContentPres.instantiate_level2 status env (get_disamb env) l2 in prerr_endline ("new l2 ast : " ^ (NotationPp.pp_term status l2')); NotationPt.AttributedTerm (`Loc loc,l2')) ;; diff --git a/matitaB/components/grafite_parser/grafiteParser.ml b/matitaB/components/grafite_parser/grafiteParser.ml index 2c9f44554..ca2224ff5 100644 --- a/matitaB/components/grafite_parser/grafiteParser.ml +++ b/matitaB/components/grafite_parser/grafiteParser.ml @@ -662,7 +662,8 @@ let extend status l1 action = ;; -let parse_statement status = +let parse_statement status = + status#reset_loctable (); parse_statement status#parser_db (* vim:set foldmethod=marker: *) diff --git a/matitaB/matita/Makefile b/matitaB/matita/Makefile index 0271b714a..25f5d38fe 100644 --- a/matitaB/matita/Makefile +++ b/matitaB/matita/Makefile @@ -20,7 +20,7 @@ OCAML_DEBUG_FLAGS = -g #OCAML_PROF=p -p a #OCAMLOPT_DEBUG_FLAGS = -p OCAMLC_FLAGS = $(OCAML_FLAGS) $(OCAML_THREADS_FLAGS) -OCAMLC = $(OCAMLFIND) ocamlc$(OCAML_PROF) $(OCAMLC_FLAGS) $(OCAML_DEBUG_FLAGS) +OCAMLC = $(OCAMLFIND) ocamlc$(OCAML_PROF) $(OCAMLC_FLAGS) $(OCAML_DEBUG_FLAGS) $(SYNTAXOPTIONS) OCAMLOPT = $(OCAMLFIND) opt $(OCAMLC_FLAGS) $(OCAMLOPT_DEBUG_FLAGS) OCAMLDEP = $(OCAMLFIND) ocamldep $(OCAMLDEP_FLAGS) INSTALL_PROGRAMS= matita matitac @@ -34,6 +34,7 @@ ifeq ($(NODB),true) endif MLI = \ + matitaScriptLexer.mli \ lablGraphviz.mli \ matitaTypes.mli \ matitaMisc.mli \ @@ -67,7 +68,7 @@ CML = buildTimeConf.ml $(CMLI:%.mli=%.ml) MAINCML = $(MAINCMLI:%.mli=%.ml) PROGRAMS_BYTE = \ - matita matitac matitaclean + matita matitac matitadaemon matitaclean PROGRAMS = $(PROGRAMS_BYTE) PROGRAMS_OPT = $(patsubst %,%.opt,$(PROGRAMS_BYTE)) NOINST_PROGRAMS = @@ -76,6 +77,11 @@ NOINST_PROGRAMS_OPT = $(patsubst %,%.opt,$(EXTRA_PROGRAMS)) .PHONY: all all: $(PROGRAMS) $(NOINST_PROGRAMS) +UTF8DIR := $(shell $(OCAMLFIND) query helm-syntax_extensions) +ULEXDIR := $(shell $(OCAMLFIND) query ulex08) + +matitaScriptLexer.cmo: SYNTAXOPTIONS = -pp "camlp5o -I $(UTF8DIR) -I $(ULEXDIR) pa_extend.cmo pa_ulex.cma pa_unicode_macro.cma -loc loc" + CMOS = $(ML:%.ml=%.cmo) CCMOS = $(CML:%.ml=%.cmo) MAINCMOS = $(MAINCML:%.ml=%.cmo) @@ -113,6 +119,9 @@ linkonly: $(H)$(OCAMLC) $(PKGS) -linkpkg -o matita $(CMOS) $(OCAML_DEBUG_FLAGS) matita.ml $(H)echo " OCAMLC matitac.ml" $(H)$(OCAMLC) $(CPKGS) -linkpkg -o matitac $(CCMOS) $(MAINCMOS) $(OCAML_DEBUG_FLAGS) matitac.ml + $(H)echo " OCAMLC matitadaemon.ml" + $(H)$(OCAMLC) $(CPKGS) -linkpkg -o matitadaemon $(CCMOS) $(MAINCMOS) $(OCAML_DEBUG_FLAGS) matitadaemon.ml + .PHONY: linkonly matita: matita.ml $(LIB_DEPS) $(CMOS) $(H)echo " OCAMLC $<" @@ -128,6 +137,13 @@ matitac.opt: matitac.ml $(CLIBX_DEPS) $(CCMXS) $(MAINCMXS) $(H)echo " OCAMLOPT $<" $(H)$(OCAMLOPT) $(CPKGS) -linkpkg -o $@ $(CCMXS) $(MAINCMXS) matitac.ml +matitadaemon: matitadaemon.ml $(CLIB_DEPS) $(CMOS) $(MAINCMOS) + $(H)echo " OCAMLC $<" + $(H)$(OCAMLC) $(PKGS) -linkpkg -o $@ $(CMOS) $(MAINCMOS) matitadaemon.ml +matitadaemon.opt: matitadaemon.ml $(CLIBX_DEPS) $(CMXS) $(MAINCMXS) + $(H)echo " OCAMLOPT $<" + $(H)$(OCAMLOPT) $(PKGS) -linkpkg -o $@ $(CMXS) $(MAINCMXS) matitadaemon.ml + rottener: rottener.ml $(CLIB_DEPS) $(CCMOS) $(MAINCMOS) $(H)echo " OCAMLC $<" $(H)$(OCAMLC) $(CPKGS) -package lablgtk2 -linkpkg -o $@ $(CCMOS) $(MAINCMOS) rottener.ml diff --git a/matitaB/matita/lib/basics/core_notation.ma b/matitaB/matita/lib/basics/core_notation.ma index cdb497371..098b5d419 100644 --- a/matitaB/matita/lib/basics/core_notation.ma +++ b/matitaB/matita/lib/basics/core_notation.ma @@ -2,7 +2,7 @@ notation "hvbox(∃ _ break . p)" with precedence 20 -for @{'exists $p }. +for @{'exists $p }. notation < "hvbox(\exists ident i : ty break . p)" right associative with precedence 20 diff --git a/matitaB/matita/lib/basics/logic.ma b/matitaB/matita/lib/basics/logic.ma index fd137d662..34299aff0 100644 --- a/matitaB/matita/lib/basics/logic.ma +++ b/matitaB/matita/lib/basics/logic.ma @@ -10,52 +10,52 @@ V_______________________________________________________________ *) include "basics/pts.ma". -include "hints_declaration.ma". +(*include "hints_declaration.ma".*) (* propositional equality *) inductive eq (A:Type[1]) (x:A) : A → Prop ≝ refl: eq A x x. - -interpretation "leibnitz's equality" 'eq t x y = (eq t x y). +interpretation "leibnitz's equality" 'eq t x y = (eq t x y). lemma eq_rect_r: - ∀A.∀a,x.∀p:eq ? x a.∀P: ∀x:A. x = a → Type[2]. P a (refl A a) → P x p. + ∀A.∀a,x.∀p:eq ? x a.∀P: + ∀x:A. x = a → Type[2]. P a (refl A a) → P x p. #A #a #x #p (cases p) // qed. lemma eq_ind_r : - ∀A.∀a.∀P: ∀x:A. x = a → Prop. P a (refl A a) → - ∀x.∀p:eq ? x a.P x p. - #A #a #P #p #x0 #p0; @(eq_rect_r ? ? ? p0) //; qed. + ∀A.∀a.∀P: ∀x:A. x = a → Prop. P a (refl A a) → + ∀x.∀p:eq ? x a.P x p. + #A #a #P #p #x0 #p0; @(eq_rect_r ? ? ? p0) //; qed. lemma eq_rect_Type2_r: - ∀A.∀a.∀P: ∀x:A. eq ? x a → Type[2]. P a (refl A a) → - ∀x.∀p:eq ? x a.P x p. + ∀A.∀a.∀P: ∀x:A. eq ? x a → Type[2]. P a (refl A a) → + ∀x.∀p:eq ? x a.P x p. #A #a #P #H #x #p (generalize in match H) (generalize in match P) cases p; //; qed. -theorem rewrite_l: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. x = y → P y. +theorem rewrite_l: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. x = y → P y. #A #x #P #Hx #y #Heq (cases Heq); //; qed. -theorem sym_eq: ∀A.∀x,y:A. x = y → y = x. -#A #x #y #Heq @(rewrite_l A x (λz.z=x)); //; qed. +theorem sym_eq: ∀A.∀x,y:A. x = y → y = x. +#A #x #y #Heq @(rewrite_l A x (λz.z=x)); //; qed. -theorem rewrite_r: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. y = x → P y. -#A #x #P #Hx #y #Heq (cases (sym_eq ? ? ? Heq)); //; qed. +theorem rewrite_r: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. y = x → P y. +#A #x #P #Hx #y #Heq (cases (sym_eq ? ? ? Heq)); //; qed. -theorem eq_coerc: ∀A,B:Type[0].A→(A=B)→B. +theorem eq_coerc: ∀A,B:Type[0].A→(A=B)→B. #A #B #Ha #Heq (elim Heq); //; qed. -theorem trans_eq : ∀A.∀x,y,z:A. x = y → y = z → x = z. +theorem trans_eq : ∀A.∀x,y,z:A. x = y → y = z → x = z. #A #x #y #z #H1 #H2 >H1; //; qed. -theorem eq_f: ∀A,B.∀f:A→B.∀x,y:A. x=y → f x = f y. +theorem eq_f: ∀A,B.∀f:A→B.∀x,y:A. x=y → f x = f y. #A #B #f #x #y #H >H; //; qed. (* deleterio per auto? *) theorem eq_f2: ∀A,B,C.∀f:A→B→C. -∀x1,x2:A.∀y1,y2:B. x1=x2 → y1=y2 → f x1 y1 = f x2 y2. +∀x1,x2:A.∀y1,y2:B. x1=x2 → y1=y2 → f x1 y1 = f x2 y2. #A #B #C #f #x1 #x2 #y1 #y2 #E1 #E2 >E1; >E2; //; qed. (* hint to genereric equality @@ -80,11 +80,12 @@ inductive False: Prop ≝ . λA. A → False. *) inductive Not (A:Prop): Prop ≝ -nmk: (A → False) → Not A. +nmk: (A → False) → Not A. + interpretation "logical not" 'not x = (Not x). -theorem absurd : ∀A:Prop. A → ¬A → False. +theorem absurd : ∀A:Prop. A → ¬A → False. #A #H #Hn (elim Hn); /2/; qed. (* @@ -92,13 +93,13 @@ ntheorem absurd : ∀ A,C:Prop. A → ¬A → C. #A; #C; #H; #Hn; nelim (Hn H). nqed. *) -theorem not_to_not : ∀A,B:Prop. (A → B) → ¬B →¬A. +theorem not_to_not : ∀A,B:Prop. (A → B) → ¬B →¬A. /4/; qed. (* inequality *) interpretation "leibnitz's non-equality" 'neq t x y = (Not (eq t x y)). -theorem sym_not_eq: ∀A.∀x,y:A. x ≠ y → y ≠ x. +theorem sym_not_eq: ∀A.∀x,y:A. x ≠ y → y ≠ x. /3/; qed. (* and *) @@ -107,10 +108,10 @@ inductive And (A,B:Prop) : Prop ≝ interpretation "logical and" 'and x y = (And x y). -theorem proj1: ∀A,B:Prop. A ∧ B → A. +theorem proj1: ∀A,B:Prop. A ∧ B → A. #A #B #AB (elim AB) //; qed. -theorem proj2: ∀ A,B:Prop. A ∧ B → B. +theorem proj2: ∀ A,B:Prop. A ∧ B → B. #A #B #AB (elim AB) //; qed. (* or *) @@ -121,7 +122,7 @@ inductive Or (A,B:Prop) : Prop ≝ interpretation "logical or" 'or x y = (Or x y). definition decidable : Prop → Prop ≝ -λ A:Prop. A ∨ ¬ A. +λ A:Prop. A ∨ ¬ A. (* exists *) inductive ex (A:Type[0]) (P:A → Prop) : Prop ≝ @@ -134,7 +135,7 @@ inductive ex2 (A:Type[0]) (P,Q:A →Prop) : Prop ≝ (* iff *) definition iff := - λ A,B. (A → B) ∧ (B → A). + λ A,B. (A → B) ∧ (B → A). interpretation "iff" 'iff a b = (iff a b). @@ -142,84 +143,84 @@ interpretation "iff" 'iff a b = (iff a b). definition R0 ≝ λT:Type[0].λt:T.t. -definition R1 ≝ eq_rect_Type0. +definition R1 ≝ eq_rect_Type0. (* useless stuff *) definition R2 : ∀T0:Type[0]. ∀a0:T0. - ∀T1:∀x0:T0. a0=x0 → Type[0]. - ∀a1:T1 a0 (refl ? a0). - ∀T2:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0. R1 ?? T1 a1 ? p0 = x1 → Type[0]. - ∀a2:T2 a0 (refl ? a0) a1 (refl ? a1). + ∀T1:∀x0:T0. a0=x0 → Type[0]. + ∀a1:T1 a0 (refl ? a0). + ∀T2:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0. R1 ?? T1 a1 ? p0 = x1 → Type[0]. + ∀a2:T2 a0 (refl ? a0) a1 (refl ? a1). ∀b0:T0. - ∀e0:a0 = b0. + ∀e0:a0 = b0. ∀b1: T1 b0 e0. - ∀e1:R1 ?? T1 a1 ? e0 = b1. + ∀e1:R1 ?? T1 a1 ? e0 = b1. T2 b0 e0 b1 e1. #T0 #a0 #T1 #a1 #T2 #a2 #b0 #e0 #b1 #e1 -@(eq_rect_Type0 ????? e1) -@(R1 ?? ? ?? e0) +@(eq_rect_Type0 ????? e1) +@(R1 ?? ? ?? e0) @a2 qed. definition R3 : ∀T0:Type[0]. ∀a0:T0. - ∀T1:∀x0:T0. a0=x0 → Type[0]. - ∀a1:T1 a0 (refl ? a0). - ∀T2:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0. R1 ?? T1 a1 ? p0 = x1 → Type[0]. - ∀a2:T2 a0 (refl ? a0) a1 (refl ? a1). - ∀T3:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0.∀p1:R1 ?? T1 a1 ? p0 = x1. - ∀x2:T2 x0 p0 x1 p1.R2 ???? T2 a2 x0 p0 ? p1 = x2 → Type[0]. - ∀a3:T3 a0 (refl ? a0) a1 (refl ? a1) a2 (refl ? a2). + ∀T1:∀x0:T0. a0=x0 → Type[0]. + ∀a1:T1 a0 (refl ? a0). + ∀T2:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0. R1 ?? T1 a1 ? p0 = x1 → Type[0]. + ∀a2:T2 a0 (refl ? a0) a1 (refl ? a1). + ∀T3:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0.∀p1:R1 ?? T1 a1 ? p0 = x1. + ∀x2:T2 x0 p0 x1 p1.R2 ???? T2 a2 x0 p0 ? p1 = x2 → Type[0]. + ∀a3:T3 a0 (refl ? a0) a1 (refl ? a1) a2 (refl ? a2). ∀b0:T0. - ∀e0:a0 = b0. + ∀e0:a0 = b0. ∀b1: T1 b0 e0. - ∀e1:R1 ?? T1 a1 ? e0 = b1. + ∀e1:R1 ?? T1 a1 ? e0 = b1. ∀b2: T2 b0 e0 b1 e1. - ∀e2:R2 ???? T2 a2 b0 e0 ? e1 = b2. + ∀e2:R2 ???? T2 a2 b0 e0 ? e1 = b2. T3 b0 e0 b1 e1 b2 e2. #T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #b0 #e0 #b1 #e1 #b2 #e2 -@(eq_rect_Type0 ????? e2) -@(R2 ?? ? ???? e0 ? e1) +@(eq_rect_Type0 ????? e2) +@(R2 ?? ? ???? e0 ? e1) @a3 qed. definition R4 : ∀T0:Type[0]. ∀a0:T0. - ∀T1:∀x0:T0. eq T0 a0 x0 → Type[0]. - ∀a1:T1 a0 (refl T0 a0). - ∀T2:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1 → Type[0]. - ∀a2:T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1). - ∀T3:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1. - ∀x2:T2 x0 p0 x1 p1.eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2 → Type[0]. - ∀a3:T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) - a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2). - ∀T4:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1. - ∀x2:T2 x0 p0 x1 p1.∀p2:eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2. - ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3. + ∀T1:∀x0:T0. eq T0 a0 x0 → Type[0]. + ∀a1:T1 a0 (refl T0 a0). + ∀T2:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1 → Type[0]. + ∀a2:T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1). + ∀T3:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1. + ∀x2:T2 x0 p0 x1 p1.eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2 → Type[0]. + ∀a3:T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) + a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2). + ∀T4:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1. + ∀x2:T2 x0 p0 x1 p1.∀p2:eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2. + ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3. Type[0]. - ∀a4:T4 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) - a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2) - a3 (refl (T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) - a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2)) + ∀a4:T4 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) + a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2) + a3 (refl (T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1) + a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2)) a3). ∀b0:T0. - ∀e0:eq (T0 …) a0 b0. + ∀e0:eq (T0 …) a0 b0. ∀b1: T1 b0 e0. - ∀e1:eq (T1 …) (R1 T0 a0 T1 a1 b0 e0) b1. + ∀e1:eq (T1 …) (R1 T0 a0 T1 a1 b0 e0) b1. ∀b2: T2 b0 e0 b1 e1. - ∀e2:eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 b0 e0 b1 e1) b2. + ∀e2:eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 b0 e0 b1 e1) b2. ∀b3: T3 b0 e0 b1 e1 b2 e2. - ∀e3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 b0 e0 b1 e1 b2 e2) b3. + ∀e3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 b0 e0 b1 e1 b2 e2) b3. T4 b0 e0 b1 e1 b2 e2 b3 e3. #T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #T4 #a4 #b0 #e0 #b1 #e1 #b2 #e2 #b3 #e3 -@(eq_rect_Type0 ????? e3) -@(R3 ????????? e0 ? e1 ? e2) +@(eq_rect_Type0 ????? e3) +@(R3 ????????? e0 ? e1 ? e2) @a4 qed. (* TODO concrete definition by means of proof irrelevance *) -axiom streicherK : ∀T:Type[1].∀t:T.∀P:t = t → Type[2].P (refl ? t) → ∀p.P p. +axiom streicherK : ∀T:Type[1].∀t:T.∀P:t = t → Type[2].P (refl ? t) → ∀p.P p. \ No newline at end of file diff --git a/matitaB/matita/matitaEngine.ml b/matitaB/matita/matitaEngine.ml index 68cb16c84..437121b73 100644 --- a/matitaB/matita/matitaEngine.ml +++ b/matitaB/matita/matitaEngine.ml @@ -116,44 +116,9 @@ let activate_extraction baseuri fname = ;; -let eval_ast ~include_paths ?do_heavy_checks status (text,prefix_len,ast) = +let eval_ast ~include_paths ?do_heavy_checks status (text,prefix_len,ast as ast') = let baseuri = status#baseuri in - (* - let new_aliases,new_status = - GrafiteDisambiguate.eval_with_new_aliases status - (fun status -> - GrafiteEngine.eval_ast ~include_paths ?do_heavy_checks status - (text,prefix_len,ast)) in - *) - let new_status = - GrafiteEngine.eval_ast ~include_paths ?do_heavy_checks status - (text,prefix_len,ast) in - (* - let _,intermediate_states = - List.fold_left - (fun (status,acc) (k,value) -> - let v = GrafiteAst.description_of_alias value in - let b = - try - let NReference.Ref (uri,_) = NReference.reference_of_string v in - NUri.baseuri_of_uri uri = baseuri - with - NReference.IllFormedReference _ -> - false (* v is a description, not a URI *) - in - if b then - status,acc - else - let status = - GrafiteDisambiguate.set_proof_aliases status ~implicit_aliases:false - GrafiteAst.WithPreferences [k,value] - in - status, (status ,Some (k,value))::acc - ) (status,[]) new_aliases (* WARNING: this must be the old status! *) - in - (new_status,None)::intermediate_states - *) - [new_status,None] + GrafiteEngine.eval_ast ~include_paths ?do_heavy_checks status ast' ;; let baseuri_of_script ~include_paths fname = @@ -192,14 +157,8 @@ and eval_from_stream ~compiling ~asserted ~include_paths ?do_heavy_checks status | None -> asserted, true, status | Some (asserted,ast) -> cb status ast; - let new_statuses = - eval_ast ~include_paths ?do_heavy_checks status ("",0,ast) in let status = - match new_statuses with - [s,None] -> s - | _::(_,Some (_,value))::_ -> - raise (TryingToAdd (lazy (GrafiteAstPp.pp_alias value))) - | _ -> assert false + eval_ast ~include_paths ?do_heavy_checks status ("",0,ast) in asserted, false, status with exn when not matita_debug -> @@ -209,7 +168,7 @@ and eval_from_stream ~compiling ~asserted ~include_paths ?do_heavy_checks status in loop asserted status -and compile ~compiling ~asserted ~include_paths fname = +and compile ~compiling ~asserted ~include_paths ~outch fname = if List.mem fname compiling then raise (CircularDependency fname); let compiling = fname::compiling in let matita_debug = Helm_registry.get_bool "matita.debug" in @@ -279,10 +238,8 @@ and compile ~compiling ~asserted ~include_paths fname = pp_times fname true big_bang big_bang_u big_bang_s; (* XXX: update script -- currently to stdout *) let origbuf = Ulexing.from_utf8_channel (open_in fname) in - let outfile = open_out (fname ^ ".mad") in let interpr = GrafiteDisambiguate.get_interpr status#disambiguate_db in - ignore (SmallLexer.mk_small_printer interpr outfile origbuf); - close_out outfile; + ignore (SmallLexer.mk_small_printer interpr outch origbuf); asserted (* MATITA 1.0: debbo fare time_travel sulla ng_library? LexiconSync.time_travel @@ -297,7 +254,7 @@ and compile ~compiling ~asserted ~include_paths fname = pp_times fname false big_bang big_bang_u big_bang_s; clean_exit baseuri exn -and assert_ng ~already_included ~compiling ~asserted ~include_paths mapath = +and assert_ng ~already_included ~compiling ~asserted ~include_paths ?outch mapath = let _,baseuri,fullmapath,_ = Librarian.baseuri_of_script ~include_paths mapath in if List.mem fullmapath asserted then asserted,false else @@ -342,13 +299,19 @@ and assert_ng ~already_included ~compiling ~asserted ~include_paths mapath = (* maybe recompiling it I would get the same... *) raise (AlreadyLoaded (lazy mapath)) else - let asserted = compile ~compiling ~asserted ~include_paths fullmapath in + let outch, is_file_ch = + match outch with + | None -> open_out (fullmapath ^ ".mad"), true + | Some c -> c, false + in + let asserted = compile ~compiling ~asserted ~include_paths ~outch fullmapath in + if is_file_ch then close_out outch; fullmapath::asserted,true end ;; -let assert_ng ~include_paths mapath = +let assert_ng ~include_paths ?outch mapath = snd (assert_ng ~include_paths ~already_included:[] ~compiling:[] ~asserted:[] - mapath) + ?outch mapath) let get_ast status ~include_paths strm = snd (get_ast status ~compiling:[] ~asserted:[] ~include_paths strm) diff --git a/matitaB/matita/matitaEngine.mli b/matitaB/matita/matitaEngine.mli index d412d3ad0..ceffc6f7f 100644 --- a/matitaB/matita/matitaEngine.mli +++ b/matitaB/matita/matitaEngine.mli @@ -48,7 +48,6 @@ val eval_ast : GrafiteTypes.status -> string * int * GrafiteAst.statement -> - (GrafiteTypes.status * - (DisambiguateTypes.domain_item * GrafiteAst.alias_spec) option) list + GrafiteTypes.status -val assert_ng: include_paths:string list -> string -> bool +val assert_ng: include_paths:string list -> ?outch:out_channel -> string -> bool diff --git a/matitaB/matita/matitaScript.ml b/matitaB/matita/matitaScript.ml index af3cdf12e..5b794a6d8 100644 --- a/matitaB/matita/matitaScript.ml +++ b/matitaB/matita/matitaScript.ml @@ -65,30 +65,23 @@ let first_line s = String.sub s 0 nl_pos with Not_found -> s +(* WR: skipped_txt, nonskipped_txt servono ancora? + * (ora non abbiamo più gli alias e il codice sembra più complicato + * del necessario) + * (rif. nota 11/05/11 *) let eval_with_engine include_paths status skipped_txt nonskipped_txt st = let parsed_text_length = - String.length skipped_txt + String.length nonskipped_txt + String.length skipped_txt + String.length nonskipped_txt in let text = skipped_txt ^ nonskipped_txt in let prefix_len = MatitaGtkMisc.utf8_string_length skipped_txt in - let enriched_history_fragment = + let new_status = MatitaEngine.eval_ast ~include_paths ~do_heavy_checks:(Helm_registry.get_bool "matita.do_heavy_checks") status (text,prefix_len,st) in - let enriched_history_fragment = List.rev enriched_history_fragment in - (* really fragile *) - let res,_ = - List.fold_left - (fun (acc, to_prepend) (status,alias) -> - match alias with - | None -> (status,to_prepend ^ nonskipped_txt)::acc,"" - | Some (k,value) -> - let newtxt = GrafiteAstPp.pp_alias value in - (status,to_prepend ^ newtxt ^ "\n")::acc, "") - ([],skipped_txt) enriched_history_fragment - in + let res = new_status,skipped_txt ^ nonskipped_txt in res,"",parsed_text_length ;; @@ -106,7 +99,7 @@ let eval_nmacro include_paths (buffer : GText.buffer) status unparsed_text parse List.filter (fun x,_ -> List.mem x selected) menv in CicMathView.screenshot status sequents menv subst name; - [status, parsed_text], "", parsed_text_length + (status, parsed_text), "", parsed_text_length | TA.NCheck (_,t) -> let status = script#status in let _,_,menv,subst,_ = status#obj in @@ -126,13 +119,13 @@ let eval_nmacro include_paths (buffer : GText.buffer) status unparsed_text parse (* XXX use the metasenv, if possible *) in MatitaMathView.cicBrowser (Some (`NCic (t,ctx,m,s))); - [status, parsed_text], "", parsed_text_length + (status, parsed_text), "", parsed_text_length | TA.NIntroGuess _loc -> let names_ref = ref [] in let s = NTactics.intros_tac ~names_ref [] script#status in let rex = Pcre.regexp ~flags:[`MULTILINE] "\\A([\\n\\t\\r ]*).*\\Z" in let nl = Pcre.replace ~rex ~templ:"$1" parsed_text in - [s, nl ^ "#" ^ String.concat " " !names_ref ^ ";"], "", parsed_text_length + (s, nl ^ "#" ^ String.concat " " !names_ref ^ ";"), "", parsed_text_length | TA.NAutoInteractive (_loc, (None,a)) -> let trace_ref = ref [] in let s = NnAuto.auto_tac ~params:(None,a) ~trace_ref script#status in @@ -153,7 +146,7 @@ let eval_nmacro include_paths (buffer : GText.buffer) status unparsed_text parse in let rex = Pcre.regexp ~flags:[`MULTILINE] "\\A([\\n\\t\\r ]*).*\\Z" in let nl = Pcre.replace ~rex ~templ:"$1" parsed_text in - [s, nl ^ trace ^ thms ^ ";"], "", parsed_text_length + (s, nl ^ trace ^ thms ^ ";"), "", parsed_text_length | TA.NAutoInteractive (_, (Some _,_)) -> assert false let rec eval_executable include_paths (buffer : GText.buffer) @@ -166,7 +159,7 @@ status unparsed_text skipped_txt nonskipped_txt script ex loc eval_with_engine include_paths status skipped_txt nonskipped_txt (TA.Executable (loc, ex)) with - MatitaTypes.Cancel -> [], "", 0 + MatitaTypes.Cancel -> (status,skipped_txt), "", 0 | GrafiteEngine.NMacro (_loc,macro) -> eval_nmacro include_paths buffer status unparsed_text (skipped_txt ^ nonskipped_txt) script macro @@ -222,10 +215,8 @@ and eval_statement include_paths (buffer : GText.buffer) status script (offset+parsed_text_length, errorll)) in assert (text=""); (* no macros inside comments, please! *) - (match s with - | (statuses,text)::tl -> - (statuses,parsed_text ^ text)::tl,"",parsed_text_length + len - | [] -> [], "", 0) + let st,text = s in + (st,parsed_text ^ text),"",parsed_text_length + len let fresh_script_id = let i = ref 0 in @@ -341,6 +332,10 @@ object (self) ~callback:(fun () -> self#clean_dirty_lock)); ignore (GMain.Timeout.add ~ms:300000 ~callback:(fun _ -> self#_saveToBackupFile ();true)); + ignore (buffer#connect#changed + (fun _ -> + let curpos = buffer#cursor_position in + HLog.debug ("cursor at " ^ (string_of_int curpos)))); ignore (buffer#connect#modified_changed (fun _ -> self#set_star buffer#modified)); (* clean_locked is set to true only "during" a PRIMARY paste @@ -673,26 +668,23 @@ object (self) in let time2 = Unix.gettimeofday () in HLog.debug ("... done in " ^ string_of_float (time2 -. time1) ^ "s"); - let new_statuses, new_statements = - let statuses, texts = List.split entries in - statuses, texts - in - history <- new_statuses @ history; - statements <- new_statements @ statements; + let new_statuses, new_statements = entries in + history <- new_statuses :: history; + statements <- new_statements :: statements; let start = buffer#get_iter_at_mark (`MARK locked_mark) in - let new_text = String.concat "" (List.rev new_statements) in if statement <> None then - buffer#insert ~iter:start new_text + buffer#insert ~iter:start new_statements else begin let parsed_text = String.sub s 0 parsed_len in - if new_text <> parsed_text then begin + if new_statements <> parsed_text then begin let stop = start#copy#forward_chars (Glib.Utf8.length parsed_text) in buffer#delete ~start ~stop; - buffer#insert ~iter:start new_text; + buffer#insert ~iter:start new_statements; end; end; - self#moveMark (Glib.Utf8.length new_text); - buffer#insert ~iter:(buffer#get_iter_at_mark (`MARK locked_mark)) newtext + self#moveMark (Glib.Utf8.length new_statements); + buffer#insert ~iter:(buffer#get_iter_at_mark (`MARK locked_mark)) + newtext method private _retract offset status new_statements new_history = NCicLibrary.time_travel status; @@ -807,8 +799,78 @@ object (self) self#notify method loadFromFile f = - buffer#set_text (HExtlib.input_file f); + let script = HExtlib.input_file f in + let hs = MatitaScriptLexer.get_hot_spots script in + let hstext = + String.concat "\n" (List.map (fun (l1,l2,uri,desc) -> + let a,b = HExtlib.loc_of_floc l1 in + let c,d = HExtlib.loc_of_floc l2 in + let uri = match uri with None -> "()" | Some s -> s in + let desc = match desc with None -> "()" | Some s -> s in + Printf.sprintf "[%d,%d] [%d,%d] '%s' '%s'" a b c d uri desc) hs) + in + HLog.debug hstext; + buffer#set_text script; + + (* this is the default tag for the whole script (mainly used for + * setting the default behaviour when needed *) + let all_tag = buffer#create_tag [] in + buffer#apply_tag all_tag ~start:(buffer#get_iter `START) + ~stop:(buffer#get_iter `END); + ignore(all_tag#connect#event + ~callback:(fun ~origin event pos -> + match GdkEvent.get_type event with + | `MOTION_NOTIFY -> + Gdk.Window.set_cursor + (match source_view#get_window `TEXT with None -> assert false | Some x -> x) + (Gdk.Cursor.create `ARROW); + (* code for removing message from statusbar *) + false + | _ -> false)); + + let invisible_tag = buffer#create_tag [ `INVISIBLE true ] in + let apply_tag (l1,l2,uri,desc) = + let hyperlink_tag = + buffer#create_tag [ `FOREGROUND "blue" ; `UNDERLINE `SINGLE] in + (* hyperlink_tag#connect#after#changed ~callback:(fun _ -> HLog.debug "***"); *) + let a,b = HExtlib.loc_of_floc l1 in + let c,d = HExtlib.loc_of_floc l2 in + (* setting invisibility for and *) + buffer#apply_tag invisible_tag + ~start:(buffer#get_iter_at_char a) + ~stop:(buffer#get_iter_at_char b); + buffer#apply_tag invisible_tag + ~start:(buffer#get_iter_at_char c) + ~stop:(buffer#get_iter_at_char d); + (* setting hyperlink inside and *) + (* XXX: I'm not sure why I'm required to put those -1 and +1 + * otherwise I get too small a clickable area, especially for + * single characters hyperlinks. *) + buffer#apply_tag hyperlink_tag + ~start:(buffer#get_iter_at_char (b-1)) + ~stop:(buffer#get_iter_at_char (c+1)); + match uri with + | None -> () + | Some uri -> + ignore(hyperlink_tag#connect#event + ~callback:(fun ~origin event pos -> + match GdkEvent.get_type event with + `BUTTON_PRESS -> + let uri' = NReference.reference_of_string uri in + MatitaMathView.cicBrowser (Some (`NRef uri')); + true + | `MOTION_NOTIFY -> + Gdk.Window.set_cursor + (match source_view#get_window `TEXT with None -> assert false | Some x -> x) + (Gdk.Cursor.create `HAND1); + (* let ctxt = (MatitaMisc.get_gui ())#main#statusBar#new_context ~name:"href" in + let msg = ctxt#push uri in + (* href_statusbar_msg <- Some (ctxt, msg);*) *) + false + | _ -> false)) + in self#reset_buffer; + List.iter apply_tag hs; buffer#set_modified false method assignFileName file =