From: Enrico Tassi Date: Fri, 26 Sep 2008 16:38:29 +0000 (+0000) Subject: more push/pop to avoid confusion with imperative data structures employed by X-Git-Tag: make_still_working~4727 X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=commitdiff_plain;h=01001c883a8151edba81cd03a6f254d24a81c867;p=helm.git more push/pop to avoid confusion with imperative data structures employed by notation. the patch is not well tested, but already improves the situation make the library compile again. I commit it so that on monday we will have again the livecd and the .dsb package that are required by the sysadmins to install matita on the cs cluster --- diff --git a/helm/software/components/content_pres/cicNotationLexer.ml b/helm/software/components/content_pres/cicNotationLexer.ml index 4221417a0..f80821a5f 100644 --- a/helm/software/components/content_pres/cicNotationLexer.ml +++ b/helm/software/components/content_pres/cicNotationLexer.ml @@ -107,7 +107,7 @@ let level1_keywords = ] @ level1_layouts let level2_meta_keywords = - [ "if"; "then"; "else"; + [ "if"; "then"; "elCicNotationParser.se"; "fold"; "left"; "right"; "rec"; "fail"; "default"; @@ -115,18 +115,25 @@ let level2_meta_keywords = ] (* (string, unit) Hashtbl.t, to exploit multiple bindings *) -let level2_ast_keywords = Hashtbl.create 23 -let _ = - List.iter (fun k -> Hashtbl.add level2_ast_keywords k ()) +let initial_level2_ast_keywords () = Hashtbl.create 23;; + +let level2_ast_keywords = ref (initial_level2_ast_keywords ()) + +let initialize_keywords () = + List.iter (fun k -> Hashtbl.add !level2_ast_keywords k ()) [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match"; "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ] +;; + +let _ = initialize_keywords ();; -let add_level2_ast_keyword k = Hashtbl.add level2_ast_keywords k () -let remove_level2_ast_keyword k = Hashtbl.remove level2_ast_keywords k +let add_level2_ast_keyword k = Hashtbl.add !level2_ast_keywords k () +let remove_level2_ast_keyword k = Hashtbl.remove !level2_ast_keywords k (* (string, int) Hashtbl.t, with multiple bindings. * int is the unicode codepoint *) let ligatures = Hashtbl.create 23 + let _ = List.iter (fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol) @@ -295,7 +302,7 @@ and level2_ast_token = | placeholder -> return lexbuf ("PLACEHOLDER", "") | ident -> let lexeme = Ulexing.utf8_lexeme lexbuf in - if Hashtbl.mem level2_ast_keywords lexeme then + if Hashtbl.mem !level2_ast_keywords lexeme then return lexbuf ("", lexeme) else return lexbuf ("IDENT", lexeme) @@ -354,9 +361,18 @@ let level2_ast_token = ligatures_token level2_ast_token (* API implementation *) -let level1_pattern_lexer = mk_lexer level1_pattern_token -let level2_ast_lexer = mk_lexer level2_ast_token -let level2_meta_lexer = mk_lexer level2_meta_token +let initial_level1_pattern_lexer () = mk_lexer level1_pattern_token +let initial_level2_ast_lexer () = mk_lexer level2_ast_token +let initial_level2_meta_lexer () = mk_lexer level2_meta_token + + +let level1_pattern_lexer_ref = ref (initial_level1_pattern_lexer ()) +let level2_ast_lexer_ref = ref (initial_level2_ast_lexer ()) +let level2_meta_lexer_ref = ref (initial_level2_meta_lexer ()) + +let level1_pattern_lexer () = !level1_pattern_lexer_ref +let level2_ast_lexer () = !level2_ast_lexer_ref +let level2_meta_lexer () = !level2_meta_lexer_ref let lookup_ligatures lexeme = try @@ -364,4 +380,29 @@ let lookup_ligatures lexeme = then [ Utf8Macro.expand (String.sub lexeme 1 (String.length lexeme - 1)) ] else List.rev (Hashtbl.find_all ligatures lexeme) with Invalid_argument _ | Utf8Macro.Macro_not_found _ -> [] +;; + +let history = ref [];; + +let push () = + history := + (!level2_ast_keywords,!level1_pattern_lexer_ref, + !level2_ast_lexer_ref,!level2_meta_lexer_ref) :: !history; + level2_ast_keywords := initial_level2_ast_keywords (); + initialize_keywords (); + level1_pattern_lexer_ref := initial_level1_pattern_lexer (); + level2_ast_lexer_ref := initial_level2_ast_lexer (); + level2_meta_lexer_ref := initial_level2_meta_lexer (); +;; + +let pop () = + match !history with + | [] -> assert false + | (kwd,pl,al,ml) :: tl -> + level2_ast_keywords := kwd; + level1_pattern_lexer_ref := pl; + level2_ast_lexer_ref := al; + level2_meta_lexer_ref := ml; + history := tl +;; diff --git a/helm/software/components/content_pres/cicNotationLexer.mli b/helm/software/components/content_pres/cicNotationLexer.mli index cd5f0876d..f04575f5f 100644 --- a/helm/software/components/content_pres/cicNotationLexer.mli +++ b/helm/software/components/content_pres/cicNotationLexer.mli @@ -32,9 +32,9 @@ exception Error of int * int * string * passing values of type char Stream.t, they should be in fact Ulexing.lexbuf * casted with Obj.magic :-/ Read the comment in the .ml for the rationale *) -val level1_pattern_lexer: (string * string) Token.glexer -val level2_ast_lexer: (string * string) Token.glexer -val level2_meta_lexer: (string * string) Token.glexer +val level1_pattern_lexer: unit -> (string * string) Token.glexer +val level2_ast_lexer: unit -> (string * string) Token.glexer +val level2_meta_lexer: unit -> (string * string) Token.glexer (** XXX ZACK DEFCON 4 END *) @@ -46,3 +46,5 @@ val remove_level2_ast_keyword: string -> unit (** non idempotent *) val is_ligature_char: char -> bool val lookup_ligatures: string -> string list +val push: unit -> unit +val pop: unit -> unit diff --git a/helm/software/components/content_pres/cicNotationParser.ml b/helm/software/components/content_pres/cicNotationParser.ml index bb6033981..e3e54f112 100644 --- a/helm/software/components/content_pres/cicNotationParser.ml +++ b/helm/software/components/content_pres/cicNotationParser.ml @@ -33,21 +33,45 @@ module Env = CicNotationEnv exception Parse_error of string exception Level_not_found of int -let level1_pattern_grammar = - Grammar.gcreate CicNotationLexer.level1_pattern_lexer -let level2_ast_grammar = Grammar.gcreate CicNotationLexer.level2_ast_lexer -let level2_meta_grammar = Grammar.gcreate CicNotationLexer.level2_meta_lexer - let min_precedence = 0 let max_precedence = 100 -let level1_pattern = - Grammar.Entry.create level1_pattern_grammar "level1_pattern" -let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast" -let term = Grammar.Entry.create level2_ast_grammar "term" -let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs" -let protected_binder_vars = Grammar.Entry.create level2_ast_grammar "protected_binder_vars" -let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta" +type ('a,'b,'c,'d) grammars = { + level1_pattern: 'a Grammar.Entry.e; + level2_ast: 'b Grammar.Entry.e; + level2_ast_grammar : Grammar.g; + term: 'b Grammar.Entry.e; + let_defs: 'c Grammar.Entry.e; + protected_binder_vars: 'd Grammar.Entry.e; + level2_meta: 'b Grammar.Entry.e; +} + +let initial_grammars () = + let level1_pattern_grammar = + Grammar.gcreate (CicNotationLexer.level1_pattern_lexer ()) in + let level2_ast_grammar = + Grammar.gcreate (CicNotationLexer.level2_ast_lexer ()) in + let level2_meta_grammar = + Grammar.gcreate (CicNotationLexer.level2_meta_lexer ()) in + let level1_pattern = + Grammar.Entry.create level1_pattern_grammar "level1_pattern" in + let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast" in + let term = Grammar.Entry.create level2_ast_grammar "term" in + let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs" in + let protected_binder_vars = + Grammar.Entry.create level2_ast_grammar "protected_binder_vars" in + let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta" in + { level1_pattern=level1_pattern; + level2_ast=level2_ast; + term=term; + let_defs=let_defs; + protected_binder_vars=protected_binder_vars; + level2_meta=level2_meta; + level2_ast_grammar=level2_ast_grammar; +} +;; + +let grammars = ref (initial_grammars ());; let int_of_string s = try @@ -70,7 +94,8 @@ let gram_term = function | Ast.Self _ -> Gramext.Sself | Ast.Level precedence -> Gramext.Snterml - (Grammar.Entry.obj (term: 'a Grammar.Entry.e), level_of precedence) + (Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e), + level_of precedence) ;; let gram_of_literal = @@ -227,24 +252,8 @@ let compare_rule_id x y = (* mapping: rule_id -> owned keywords. (rule_id, string list) Hashtbl.t *) let initial_owned_keywords () = Hashtbl.create 23 - let owned_keywords = ref (initial_owned_keywords ()) -let history = ref [];; - -let push () = - history := !owned_keywords :: !history; - owned_keywords := (initial_owned_keywords ()) -;; - -let pop () = - match !history with - | [] -> assert false - | hd :: old_history -> - owned_keywords := hd; - history := old_history -;; - type checked_l1_pattern = CL1P of CicNotationPt.term * int let check_l1_pattern level1_pattern level associativity = @@ -326,7 +335,7 @@ let extend (CL1P (level1_pattern,precedence)) action = let level = level_of precedence in let _ = Grammar.extend - [ Grammar.Entry.obj (term: 'a Grammar.Entry.e), + [ Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e), Some (Gramext.Level level), [ None, Some (*Gramext.NonA*) Gramext.NonA, @@ -348,7 +357,7 @@ let delete rule_id = let keywords = Hashtbl.find !owned_keywords rule_id in List.iter CicNotationLexer.remove_level2_ast_keyword keywords with Not_found -> assert false); - Grammar.delete_rule term atoms + Grammar.delete_rule !grammars.term atoms (** {2 Grammar} *) @@ -378,7 +387,7 @@ let return_term_of_level loc term l = Ast.AttributedTerm (`Loc loc, term l) (* create empty precedence level for "term" *) -let _ = +let initialize_grammars () = let dummy_action = Gramext.action (fun _ -> failwith "internal error, lexer generated a dummy token") @@ -398,11 +407,12 @@ let _ = aux [] last in Grammar.extend - [ Grammar.Entry.obj (term: 'a Grammar.Entry.e), + [ Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e), None, - mk_level_list min_precedence max_precedence ] - + mk_level_list min_precedence max_precedence ]; (* {{{ Grammar for concrete syntax patterns, notation level 1 *) + begin + let level1_pattern = !grammars.level1_pattern in EXTEND GLOBAL: level1_pattern; @@ -501,9 +511,11 @@ EXTEND ] ]; END + end; (* }}} *) - (* {{{ Grammar for ast magics, notation level 2 *) + begin + let level2_meta = !grammars.level2_meta in EXTEND GLOBAL: level2_meta; l2_variable: [ @@ -537,9 +549,14 @@ EXTEND ] ]; END + end; (* }}} *) - (* {{{ Grammar for ast patterns, notation level 2 *) + begin + let level2_ast = !grammars.level2_ast in + let term = !grammars.term in + let let_defs = !grammars.let_defs in + let protected_binder_vars = !grammars.protected_binder_vars in EXTEND GLOBAL: level2_ast term let_defs protected_binder_vars; level2_ast: [ [ p = term -> p ] ]; @@ -732,7 +749,31 @@ EXTEND ] ]; END + end (* }}} *) +;; + +let _ = initialize_grammars ();; + +let history = ref [];; + +let push () = + CicNotationLexer.push (); + history := (!owned_keywords,!grammars) :: !history; + owned_keywords := (initial_owned_keywords ()); + grammars := initial_grammars (); + initialize_grammars () +;; + +let pop () = + CicNotationLexer.pop (); + match !history with + | [] -> assert false + | (kw,gram) :: old_history -> + owned_keywords := kw; + grammars := gram; + history := old_history +;; (** {2 API implementation} *) @@ -747,15 +788,15 @@ let exc_located_wrapper f = let parse_level1_pattern precedence lexbuf = exc_located_wrapper - (fun () -> Grammar.Entry.parse level1_pattern (Obj.magic lexbuf) precedence) + (fun () -> Grammar.Entry.parse !grammars.level1_pattern (Obj.magic lexbuf) precedence) let parse_level2_ast lexbuf = exc_located_wrapper - (fun () -> Grammar.Entry.parse level2_ast (Obj.magic lexbuf)) + (fun () -> Grammar.Entry.parse !grammars.level2_ast (Obj.magic lexbuf)) let parse_level2_meta lexbuf = exc_located_wrapper - (fun () -> Grammar.Entry.parse level2_meta (Obj.magic lexbuf)) + (fun () -> Grammar.Entry.parse !grammars.level2_meta (Obj.magic lexbuf)) let _ = parse_level1_pattern_ref := parse_level1_pattern; @@ -764,13 +805,19 @@ let _ = let parse_term lexbuf = exc_located_wrapper - (fun () -> (Grammar.Entry.parse term (Obj.magic lexbuf))) + (fun () -> (Grammar.Entry.parse !grammars.term (Obj.magic lexbuf))) + +let level2_ast_grammar () = !grammars.level2_ast_grammar +let term () = !grammars.term +let let_defs () = !grammars.let_defs +let protected_binder_vars () = !grammars.protected_binder_vars + (** {2 Debugging} *) let print_l2_pattern () = - Grammar.print_entry Format.std_formatter (Grammar.Entry.obj term); + Grammar.print_entry Format.std_formatter (Grammar.Entry.obj !grammars.term); Format.pp_print_flush Format.std_formatter (); - flush stdout + flush stdout (* vim:set encoding=utf8 foldmethod=marker: *) diff --git a/helm/software/components/content_pres/cicNotationParser.mli b/helm/software/components/content_pres/cicNotationParser.mli index 6c9b3f5ec..433711edd 100644 --- a/helm/software/components/content_pres/cicNotationParser.mli +++ b/helm/software/components/content_pres/cicNotationParser.mli @@ -57,15 +57,15 @@ val delete: rule_id -> unit (** {2 Grammar entries} * needed by grafite parser *) -val level2_ast_grammar: Grammar.g +val level2_ast_grammar: unit -> Grammar.g -val term : CicNotationPt.term Grammar.Entry.e +val term : unit -> CicNotationPt.term Grammar.Entry.e -val let_defs : +val let_defs : unit -> (CicNotationPt.term CicNotationPt.capture_variable list * CicNotationPt.term CicNotationPt.capture_variable * CicNotationPt.term * int) list Grammar.Entry.e -val protected_binder_vars : +val protected_binder_vars : unit -> (CicNotationPt.term list * CicNotationPt.term option) Grammar.Entry.e val parse_term: Ulexing.lexbuf -> CicNotationPt.term diff --git a/helm/software/components/content_pres/test_lexer.ml b/helm/software/components/content_pres/test_lexer.ml index e76756e0f..587d87bd8 100644 --- a/helm/software/components/content_pres/test_lexer.ml +++ b/helm/software/components/content_pres/test_lexer.ml @@ -37,9 +37,9 @@ let _ = Arg.parse arg_spec open_file usage; let lexer = match !level with - "1" -> CicNotationLexer.level1_pattern_lexer - | "2@" -> CicNotationLexer.level2_ast_lexer - | "2$" -> CicNotationLexer.level2_meta_lexer + "1" -> CicNotationLexer.level1_pattern_lexer () + | "2@" -> CicNotationLexer.level2_ast_lexer () + | "2$" -> CicNotationLexer.level2_meta_lexer () | l -> prerr_endline (Printf.sprintf "Unsupported level %s" l); exit 2 diff --git a/helm/software/components/grafite_parser/dependenciesParser.ml b/helm/software/components/grafite_parser/dependenciesParser.ml index b44ad4994..9e5936dad 100644 --- a/helm/software/components/grafite_parser/dependenciesParser.ml +++ b/helm/software/components/grafite_parser/dependenciesParser.ml @@ -40,7 +40,7 @@ let pp_dependency = function let parse_dependencies lexbuf = let tok_stream,_ = - CicNotationLexer.level2_ast_lexer.Token.tok_func (Obj.magic lexbuf) + (CicNotationLexer.level2_ast_lexer ()).Token.tok_func (Obj.magic lexbuf) in let rec parse acc = let continue, acc = diff --git a/helm/software/components/grafite_parser/grafiteParser.ml b/helm/software/components/grafite_parser/grafiteParser.ml index 0dd5c0885..976bea700 100644 --- a/helm/software/components/grafite_parser/grafiteParser.ml +++ b/helm/software/components/grafite_parser/grafiteParser.ml @@ -48,10 +48,20 @@ type statement = LexiconEngine.status -> LexiconEngine.status * ast_statement localized_option -let grammar = CicNotationParser.level2_ast_grammar +type parser_status = { + grammar : Grammar.g; + term : CicNotationPt.term Grammar.Entry.e; + statement : statement Grammar.Entry.e; +} -let term = CicNotationParser.term -let statement = Grammar.Entry.create grammar "statement" +let initial_parser () = + let grammar = CicNotationParser.level2_ast_grammar () in + let term = CicNotationParser.term () in + let statement = Grammar.Entry.create grammar "statement" in + { grammar = grammar; term = term; statement = statement } +;; + +let grafite_parser = ref (initial_parser ()) let add_raw_attribute ~text t = Ast.AttributedTerm (`Raw text, t) @@ -92,6 +102,12 @@ type by_continuation = | BYC_letsuchthat of string * CicNotationPt.term * string * CicNotationPt.term | BYC_wehaveand of string * CicNotationPt.term * string * CicNotationPt.term +let initialize_parser () = + (* {{{ parser initialization *) + let term = !grafite_parser.term in + let statement = !grafite_parser.statement in + let let_defs = CicNotationParser.let_defs () in + let protected_binder_vars = CicNotationParser.protected_binder_vars () in EXTEND GLOBAL: term statement; constructor: [ [ name = IDENT; SYMBOL ":"; typ = term -> (name, typ) ] ]; @@ -459,7 +475,8 @@ EXTEND ] ]; inductive_spec: [ [ - fst_name = IDENT; params = LIST0 CicNotationParser.protected_binder_vars; + fst_name = IDENT; + params = LIST0 protected_binder_vars; SYMBOL ":"; fst_typ = term; SYMBOL <:unicode>; OPT SYMBOL "|"; fst_constructors = LIST0 constructor SEP SYMBOL "|"; tl = OPT [ "with"; @@ -481,7 +498,8 @@ EXTEND ] ]; record_spec: [ [ - name = IDENT; params = LIST0 CicNotationParser.protected_binder_vars ; + name = IDENT; + params = LIST0 protected_binder_vars; SYMBOL ":"; typ = term; SYMBOL <:unicode>; SYMBOL "{" ; fields = LIST0 [ name = IDENT ; @@ -653,9 +671,9 @@ EXTEND Ast.Theorem (flavour, name, Ast.Implicit, Some body)) | IDENT "axiom"; name = IDENT; SYMBOL ":"; typ = term -> GrafiteAst.Obj (loc, Ast.Theorem (`Axiom, name, typ, None)) - | LETCOREC ; defs = CicNotationParser.let_defs -> + | LETCOREC ; defs = let_defs -> mk_rec_corec `CoInductive defs loc - | LETREC ; defs = CicNotationParser.let_defs -> + | LETREC ; defs = let_defs -> mk_rec_corec `Inductive defs loc | IDENT "inductive"; spec = inductive_spec -> let (params, ind_types) = spec in @@ -746,6 +764,10 @@ EXTEND ] ]; END +(* }}} *) +;; + +let _ = initialize_parser () ;; let exc_located_wrapper f = try @@ -763,5 +785,28 @@ let exc_located_wrapper f = let parse_statement lexbuf = exc_located_wrapper - (fun () -> (Grammar.Entry.parse statement (Obj.magic lexbuf))) + (fun () -> (Grammar.Entry.parse !grafite_parser.statement (Obj.magic lexbuf))) + +let statement () = !grafite_parser.statement + +let history = ref [] ;; + +let push () = + LexiconSync.push (); + history := !grafite_parser :: !history; + grafite_parser := initial_parser (); + initialize_parser () +;; + +let pop () = + LexiconSync.pop (); + match !history with + | [] -> assert false + | gp :: tail -> + grafite_parser := gp; + history := tail +;; + +(* vim:set foldmethod=marker: *) + diff --git a/helm/software/components/grafite_parser/grafiteParser.mli b/helm/software/components/grafite_parser/grafiteParser.mli index 47f0af02b..b0dc6e8fd 100644 --- a/helm/software/components/grafite_parser/grafiteParser.mli +++ b/helm/software/components/grafite_parser/grafiteParser.mli @@ -44,7 +44,10 @@ type statement = val parse_statement: Ulexing.lexbuf -> statement (** @raise End_of_file *) -val statement: statement Grammar.Entry.e +val statement: unit -> statement Grammar.Entry.e (* this callback is called on every include command *) val set_callback: (string -> unit) -> unit + +val push : unit -> unit +val pop : unit -> unit diff --git a/helm/software/components/grafite_parser/print_grammar.ml b/helm/software/components/grafite_parser/print_grammar.ml index d2eb817ad..bb2a68b32 100644 --- a/helm/software/components/grafite_parser/print_grammar.ml +++ b/helm/software/components/grafite_parser/print_grammar.ml @@ -263,7 +263,7 @@ let rec visit_entries fmt todo pped = ;; let ebnf_of_term () = - let g_entry = Grammar.Entry.obj CicNotationParser.term in + let g_entry = Grammar.Entry.obj (CicNotationParser.term ()) in let buff = Buffer.create 100 in let fmt = Format.formatter_of_buffer buff in visit_entries fmt [g_entry] []; diff --git a/helm/software/components/lexicon/cicNotation.ml b/helm/software/components/lexicon/cicNotation.ml index 32cb3f638..2b963e94e 100644 --- a/helm/software/components/lexicon/cicNotation.ml +++ b/helm/software/components/lexicon/cicNotation.ml @@ -39,8 +39,11 @@ let compare_notation_id x y = | _, RuleId _ -> 1 | x,y -> Pervasives.compare x y -let parser_ref_counter = RefCounter.create () -let rule_ids_to_items = Hashtbl.create 113 +let initial_parser_ref_counter () = RefCounter.create () +let initial_rule_ids_to_items ()= Hashtbl.create 113 + +let parser_ref_counter = ref (initial_parser_ref_counter ());; +let rule_ids_to_items = ref (initial_rule_ids_to_items ());; let process_notation st = match st with @@ -59,9 +62,9 @@ let process_notation st = CicNotationPt.AttributedTerm (`Loc loc,TermContentPres.instantiate_level2 env l2)) in rule_id := [ RuleId id ]; - Hashtbl.add rule_ids_to_items id item + Hashtbl.add !rule_ids_to_items id item in - RefCounter.incr ~create_cb parser_ref_counter item + RefCounter.incr ~create_cb !parser_ref_counter item in let pp_id = if dir <> Some `LeftToRight then @@ -81,10 +84,10 @@ let remove_notation = function | RuleId id -> let item = try - Hashtbl.find rule_ids_to_items id + Hashtbl.find !rule_ids_to_items id with Not_found -> assert false in RefCounter.decr ~delete_cb:(fun _ -> CicNotationParser.delete id) - parser_ref_counter item + !parser_ref_counter item | PrettyPrinterId id -> TermContentPres.remove_pretty_printer id | InterpretationId id -> TermAcicContent.remove_interpretation id @@ -106,7 +109,12 @@ let set_active_notations ids = in TermAcicContent.set_active_interpretations interp_ids +let history = ref [];; + let push () = + history := (!parser_ref_counter,!rule_ids_to_items) :: !history; + parser_ref_counter := initial_parser_ref_counter (); + rule_ids_to_items := initial_rule_ids_to_items (); TermContentPres.push (); TermAcicContent.push (); CicNotationParser.push () @@ -115,5 +123,11 @@ let push () = let pop () = TermContentPres.pop (); TermAcicContent.pop (); - CicNotationParser.pop () + CicNotationParser.pop (); + match !history with + | [] -> assert false + | (prc,riti) :: tail -> + parser_ref_counter := prc; + rule_ids_to_items := riti; + history := tail; ;; diff --git a/helm/software/matita/matitaScript.ml b/helm/software/matita/matitaScript.ml index 0234c4824..cbaa08721 100644 --- a/helm/software/matita/matitaScript.ml +++ b/helm/software/matita/matitaScript.ml @@ -118,19 +118,7 @@ let wrap_with_make include_paths (f : GrafiteParser.statement) x = HLog.error "please create it."; raise (Failure ("No root file for "^mafilename)) in - let b = - try - GrafiteSync.push (); - LexiconSync.push (); - let rc = MatitacLib.Make.make root [tgt] in - LexiconSync.pop (); - GrafiteSync.pop (); - rc - with - | exn -> - HLog.error ("Unexpected " ^ snd(MatitaExcPp.to_string exn)); - assert false - in + let b = MatitacLib.Make.make root [tgt] in if b then try f ~include_paths x with LexiconEngine.IncludedFileNotCompiled _ -> raise diff --git a/helm/software/matita/matitacLib.ml b/helm/software/matita/matitacLib.ml index 9c331d2ee..70ed5766d 100644 --- a/helm/software/matita/matitacLib.ml +++ b/helm/software/matita/matitacLib.ml @@ -198,7 +198,6 @@ let compile options fname = CicNotation2.load_notation ~include_paths:[] BuildTimeConf.core_notation_script in - let initial_lexicon_status = lexicon_status in let big_bang = Unix.gettimeofday () in let { Unix.tms_utime = big_bang_u ; Unix.tms_stime = big_bang_s} = Unix.times () @@ -281,8 +280,10 @@ let compile options fname = (HLog.error "there are still incomplete proofs at the end of the script"; pp_times fname false big_bang big_bang_u big_bang_s; +(* LexiconSync.time_travel ~present:lexicon_status ~past:initial_lexicon_status; +*) clean_exit baseuri false) else (if not (Helm_registry.get_bool "matita.moo" && @@ -302,15 +303,19 @@ let compile options fname = HLog.message (sprintf "execution of %s completed in %s." fname (hou^min^sec)); pp_times fname true big_bang big_bang_u big_bang_s; +(* LexiconSync.time_travel ~present:lexicon_status ~past:initial_lexicon_status; +*) true) with (* all exceptions should be wrapped to allow lexicon-undo (LS.time_travel) *) | AttemptToInsertAnAlias lexicon_status -> pp_times fname false big_bang big_bang_u big_bang_s; +(* LexiconSync.time_travel ~present:lexicon_status ~past:initial_lexicon_status; +*) clean_exit baseuri false | MatitaEngine.EnrichedWithLexiconStatus (exn, lex_stat) as exn' -> (match exn with @@ -320,7 +325,8 @@ let compile options fname = HLog.error (sprintf "Parse error at %d-%d: %s" x y err) | exn when matita_debug -> raise exn' | exn -> HLog.error (snd (MatitaExcPp.to_string exn))); - LexiconSync.time_travel ~present:lex_stat ~past:initial_lexicon_status; +(* LexiconSync.time_travel ~present:lex_stat ~past:initial_lexicon_status; + * *) pp_times fname false big_bang big_bang_u big_bang_s; clean_exit baseuri false | Sys.Break when not matita_debug -> @@ -343,6 +349,7 @@ module F = let is_readonly_buri_of opts file = let buri = List.assoc "baseuri" opts in Http_getter_storage.is_read_only (Librarian.mk_baseuri buri file) + ;; let root_and_target_of opts mafile = try @@ -350,38 +357,59 @@ module F = let root,baseuri,_,_ = Librarian.baseuri_of_script ~include_paths mafile in - let obj = - if Filename.check_suffix mafile ".mma" then - Filename.chop_suffix mafile ".mma" ^ ".ma" - else - LibraryMisc.obj_file_of_baseuri - ~must_exist:false ~baseuri ~writable:true - in + let obj = + if Filename.check_suffix mafile ".mma" then + Filename.chop_suffix mafile ".mma" ^ ".ma" + else + LibraryMisc.obj_file_of_baseuri + ~must_exist:false ~baseuri ~writable:true + in Some root, obj with Librarian.NoRootFor x -> None, "" + ;; let mtime_of_source_object s = try Some (Unix.stat s).Unix.st_mtime with Unix.Unix_error (Unix.ENOENT, "stat", f) when f = s -> None + ;; let mtime_of_target_object s = try Some (Unix.stat s).Unix.st_mtime with Unix.Unix_error (Unix.ENOENT, "stat", f) when f = s -> None + ;; let build options fname = - if Filename.check_suffix fname ".mma" then - let generated = Filename.chop_suffix fname ".mma" ^ ".ma" in - let atexit = dump generated in - let res = compile options fname in - let r = atexit res in - if r then r else begin - Sys.remove generated; - Printf.printf "rm %s\n" generated; flush stdout; r - end - else - compile options fname + let compile opts fname = + try + GrafiteSync.push (); + GrafiteParser.push (); + let rc = compile opts fname in + GrafiteParser.pop (); + GrafiteSync.pop (); + rc + with + | Sys.Break -> + GrafiteParser.pop (); + GrafiteSync.pop (); + false + | exn -> + HLog.error ("Unexpected " ^ snd(MatitaExcPp.to_string exn)); + assert false + in + if Filename.check_suffix fname ".mma" then + let generated = Filename.chop_suffix fname ".mma" ^ ".ma" in + let atexit = dump generated in + let res = compile options fname in + let r = atexit res in + if r then r else begin + Sys.remove generated; + Printf.printf "rm %s\n" generated; flush stdout; r + end + else + compile options fname + ;; - let load_deps_file = Librarian.load_deps_file + let load_deps_file = Librarian.load_deps_file;; end