] @ level1_layouts
let level2_meta_keywords =
- [ "if"; "then"; "else";
+ [ "if"; "then"; "elCicNotationParser.se";
"fold"; "left"; "right"; "rec";
"fail";
"default";
]
(* (string, unit) Hashtbl.t, to exploit multiple bindings *)
-let level2_ast_keywords = Hashtbl.create 23
-let _ =
- List.iter (fun k -> Hashtbl.add level2_ast_keywords k ())
+let initial_level2_ast_keywords () = Hashtbl.create 23;;
+
+let level2_ast_keywords = ref (initial_level2_ast_keywords ())
+
+let initialize_keywords () =
+ List.iter (fun k -> Hashtbl.add !level2_ast_keywords k ())
[ "CProp"; "Prop"; "Type"; "Set"; "let"; "match";
"with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ]
+;;
+
+let _ = initialize_keywords ();;
-let add_level2_ast_keyword k = Hashtbl.add level2_ast_keywords k ()
-let remove_level2_ast_keyword k = Hashtbl.remove level2_ast_keywords k
+let add_level2_ast_keyword k = Hashtbl.add !level2_ast_keywords k ()
+let remove_level2_ast_keyword k = Hashtbl.remove !level2_ast_keywords k
(* (string, int) Hashtbl.t, with multiple bindings.
* int is the unicode codepoint *)
let ligatures = Hashtbl.create 23
+
let _ =
List.iter
(fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol)
| placeholder -> return lexbuf ("PLACEHOLDER", "")
| ident ->
let lexeme = Ulexing.utf8_lexeme lexbuf in
- if Hashtbl.mem level2_ast_keywords lexeme then
+ if Hashtbl.mem !level2_ast_keywords lexeme then
return lexbuf ("", lexeme)
else
return lexbuf ("IDENT", lexeme)
(* API implementation *)
-let level1_pattern_lexer = mk_lexer level1_pattern_token
-let level2_ast_lexer = mk_lexer level2_ast_token
-let level2_meta_lexer = mk_lexer level2_meta_token
+let initial_level1_pattern_lexer () = mk_lexer level1_pattern_token
+let initial_level2_ast_lexer () = mk_lexer level2_ast_token
+let initial_level2_meta_lexer () = mk_lexer level2_meta_token
+
+
+let level1_pattern_lexer_ref = ref (initial_level1_pattern_lexer ())
+let level2_ast_lexer_ref = ref (initial_level2_ast_lexer ())
+let level2_meta_lexer_ref = ref (initial_level2_meta_lexer ())
+
+let level1_pattern_lexer () = !level1_pattern_lexer_ref
+let level2_ast_lexer () = !level2_ast_lexer_ref
+let level2_meta_lexer () = !level2_meta_lexer_ref
let lookup_ligatures lexeme =
try
then [ Utf8Macro.expand (String.sub lexeme 1 (String.length lexeme - 1)) ]
else List.rev (Hashtbl.find_all ligatures lexeme)
with Invalid_argument _ | Utf8Macro.Macro_not_found _ -> []
+;;
+
+let history = ref [];;
+
+let push () =
+ history :=
+ (!level2_ast_keywords,!level1_pattern_lexer_ref,
+ !level2_ast_lexer_ref,!level2_meta_lexer_ref) :: !history;
+ level2_ast_keywords := initial_level2_ast_keywords ();
+ initialize_keywords ();
+ level1_pattern_lexer_ref := initial_level1_pattern_lexer ();
+ level2_ast_lexer_ref := initial_level2_ast_lexer ();
+ level2_meta_lexer_ref := initial_level2_meta_lexer ();
+;;
+
+let pop () =
+ match !history with
+ | [] -> assert false
+ | (kwd,pl,al,ml) :: tl ->
+ level2_ast_keywords := kwd;
+ level1_pattern_lexer_ref := pl;
+ level2_ast_lexer_ref := al;
+ level2_meta_lexer_ref := ml;
+ history := tl
+;;
* passing values of type char Stream.t, they should be in fact Ulexing.lexbuf
* casted with Obj.magic :-/ Read the comment in the .ml for the rationale *)
-val level1_pattern_lexer: (string * string) Token.glexer
-val level2_ast_lexer: (string * string) Token.glexer
-val level2_meta_lexer: (string * string) Token.glexer
+val level1_pattern_lexer: unit -> (string * string) Token.glexer
+val level2_ast_lexer: unit -> (string * string) Token.glexer
+val level2_meta_lexer: unit -> (string * string) Token.glexer
(** XXX ZACK DEFCON 4 END *)
val is_ligature_char: char -> bool
val lookup_ligatures: string -> string list
+val push: unit -> unit
+val pop: unit -> unit
exception Parse_error of string
exception Level_not_found of int
-let level1_pattern_grammar =
- Grammar.gcreate CicNotationLexer.level1_pattern_lexer
-let level2_ast_grammar = Grammar.gcreate CicNotationLexer.level2_ast_lexer
-let level2_meta_grammar = Grammar.gcreate CicNotationLexer.level2_meta_lexer
-
let min_precedence = 0
let max_precedence = 100
-let level1_pattern =
- Grammar.Entry.create level1_pattern_grammar "level1_pattern"
-let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast"
-let term = Grammar.Entry.create level2_ast_grammar "term"
-let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs"
-let protected_binder_vars = Grammar.Entry.create level2_ast_grammar "protected_binder_vars"
-let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta"
+type ('a,'b,'c,'d) grammars = {
+ level1_pattern: 'a Grammar.Entry.e;
+ level2_ast: 'b Grammar.Entry.e;
+ level2_ast_grammar : Grammar.g;
+ term: 'b Grammar.Entry.e;
+ let_defs: 'c Grammar.Entry.e;
+ protected_binder_vars: 'd Grammar.Entry.e;
+ level2_meta: 'b Grammar.Entry.e;
+}
+
+let initial_grammars () =
+ let level1_pattern_grammar =
+ Grammar.gcreate (CicNotationLexer.level1_pattern_lexer ()) in
+ let level2_ast_grammar =
+ Grammar.gcreate (CicNotationLexer.level2_ast_lexer ()) in
+ let level2_meta_grammar =
+ Grammar.gcreate (CicNotationLexer.level2_meta_lexer ()) in
+ let level1_pattern =
+ Grammar.Entry.create level1_pattern_grammar "level1_pattern" in
+ let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast" in
+ let term = Grammar.Entry.create level2_ast_grammar "term" in
+ let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs" in
+ let protected_binder_vars =
+ Grammar.Entry.create level2_ast_grammar "protected_binder_vars" in
+ let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta" in
+ { level1_pattern=level1_pattern;
+ level2_ast=level2_ast;
+ term=term;
+ let_defs=let_defs;
+ protected_binder_vars=protected_binder_vars;
+ level2_meta=level2_meta;
+ level2_ast_grammar=level2_ast_grammar;
+}
+;;
+
+let grammars = ref (initial_grammars ());;
let int_of_string s =
try
| Ast.Self _ -> Gramext.Sself
| Ast.Level precedence ->
Gramext.Snterml
- (Grammar.Entry.obj (term: 'a Grammar.Entry.e), level_of precedence)
+ (Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e),
+ level_of precedence)
;;
let gram_of_literal =
(* mapping: rule_id -> owned keywords. (rule_id, string list) Hashtbl.t *)
let initial_owned_keywords () = Hashtbl.create 23
-
let owned_keywords = ref (initial_owned_keywords ())
-let history = ref [];;
-
-let push () =
- history := !owned_keywords :: !history;
- owned_keywords := (initial_owned_keywords ())
-;;
-
-let pop () =
- match !history with
- | [] -> assert false
- | hd :: old_history ->
- owned_keywords := hd;
- history := old_history
-;;
-
type checked_l1_pattern = CL1P of CicNotationPt.term * int
let check_l1_pattern level1_pattern level associativity =
let level = level_of precedence in
let _ =
Grammar.extend
- [ Grammar.Entry.obj (term: 'a Grammar.Entry.e),
+ [ Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e),
Some (Gramext.Level level),
[ None,
Some (*Gramext.NonA*) Gramext.NonA,
let keywords = Hashtbl.find !owned_keywords rule_id in
List.iter CicNotationLexer.remove_level2_ast_keyword keywords
with Not_found -> assert false);
- Grammar.delete_rule term atoms
+ Grammar.delete_rule !grammars.term atoms
(** {2 Grammar} *)
Ast.AttributedTerm (`Loc loc, term l)
(* create empty precedence level for "term" *)
-let _ =
+let initialize_grammars () =
let dummy_action =
Gramext.action (fun _ ->
failwith "internal error, lexer generated a dummy token")
aux [] last
in
Grammar.extend
- [ Grammar.Entry.obj (term: 'a Grammar.Entry.e),
+ [ Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e),
None,
- mk_level_list min_precedence max_precedence ]
-
+ mk_level_list min_precedence max_precedence ];
(* {{{ Grammar for concrete syntax patterns, notation level 1 *)
+ begin
+ let level1_pattern = !grammars.level1_pattern in
EXTEND
GLOBAL: level1_pattern;
]
];
END
+ end;
(* }}} *)
-
(* {{{ Grammar for ast magics, notation level 2 *)
+ begin
+ let level2_meta = !grammars.level2_meta in
EXTEND
GLOBAL: level2_meta;
l2_variable: [
]
];
END
+ end;
(* }}} *)
-
(* {{{ Grammar for ast patterns, notation level 2 *)
+ begin
+ let level2_ast = !grammars.level2_ast in
+ let term = !grammars.term in
+ let let_defs = !grammars.let_defs in
+ let protected_binder_vars = !grammars.protected_binder_vars in
EXTEND
GLOBAL: level2_ast term let_defs protected_binder_vars;
level2_ast: [ [ p = term -> p ] ];
]
];
END
+ end
(* }}} *)
+;;
+
+let _ = initialize_grammars ();;
+
+let history = ref [];;
+
+let push () =
+ CicNotationLexer.push ();
+ history := (!owned_keywords,!grammars) :: !history;
+ owned_keywords := (initial_owned_keywords ());
+ grammars := initial_grammars ();
+ initialize_grammars ()
+;;
+
+let pop () =
+ CicNotationLexer.pop ();
+ match !history with
+ | [] -> assert false
+ | (kw,gram) :: old_history ->
+ owned_keywords := kw;
+ grammars := gram;
+ history := old_history
+;;
(** {2 API implementation} *)
let parse_level1_pattern precedence lexbuf =
exc_located_wrapper
- (fun () -> Grammar.Entry.parse level1_pattern (Obj.magic lexbuf) precedence)
+ (fun () -> Grammar.Entry.parse !grammars.level1_pattern (Obj.magic lexbuf) precedence)
let parse_level2_ast lexbuf =
exc_located_wrapper
- (fun () -> Grammar.Entry.parse level2_ast (Obj.magic lexbuf))
+ (fun () -> Grammar.Entry.parse !grammars.level2_ast (Obj.magic lexbuf))
let parse_level2_meta lexbuf =
exc_located_wrapper
- (fun () -> Grammar.Entry.parse level2_meta (Obj.magic lexbuf))
+ (fun () -> Grammar.Entry.parse !grammars.level2_meta (Obj.magic lexbuf))
let _ =
parse_level1_pattern_ref := parse_level1_pattern;
let parse_term lexbuf =
exc_located_wrapper
- (fun () -> (Grammar.Entry.parse term (Obj.magic lexbuf)))
+ (fun () -> (Grammar.Entry.parse !grammars.term (Obj.magic lexbuf)))
+
+let level2_ast_grammar () = !grammars.level2_ast_grammar
+let term () = !grammars.term
+let let_defs () = !grammars.let_defs
+let protected_binder_vars () = !grammars.protected_binder_vars
+
(** {2 Debugging} *)
let print_l2_pattern () =
- Grammar.print_entry Format.std_formatter (Grammar.Entry.obj term);
+ Grammar.print_entry Format.std_formatter (Grammar.Entry.obj !grammars.term);
Format.pp_print_flush Format.std_formatter ();
- flush stdout
+ flush stdout
(* vim:set encoding=utf8 foldmethod=marker: *)
(** {2 Grammar entries}
* needed by grafite parser *)
-val level2_ast_grammar: Grammar.g
+val level2_ast_grammar: unit -> Grammar.g
-val term : CicNotationPt.term Grammar.Entry.e
+val term : unit -> CicNotationPt.term Grammar.Entry.e
-val let_defs :
+val let_defs : unit ->
(CicNotationPt.term CicNotationPt.capture_variable list * CicNotationPt.term CicNotationPt.capture_variable * CicNotationPt.term * int) list
Grammar.Entry.e
-val protected_binder_vars :
+val protected_binder_vars : unit ->
(CicNotationPt.term list * CicNotationPt.term option) Grammar.Entry.e
val parse_term: Ulexing.lexbuf -> CicNotationPt.term
Arg.parse arg_spec open_file usage;
let lexer =
match !level with
- "1" -> CicNotationLexer.level1_pattern_lexer
- | "2@" -> CicNotationLexer.level2_ast_lexer
- | "2$" -> CicNotationLexer.level2_meta_lexer
+ "1" -> CicNotationLexer.level1_pattern_lexer ()
+ | "2@" -> CicNotationLexer.level2_ast_lexer ()
+ | "2$" -> CicNotationLexer.level2_meta_lexer ()
| l ->
prerr_endline (Printf.sprintf "Unsupported level %s" l);
exit 2
let parse_dependencies lexbuf =
let tok_stream,_ =
- CicNotationLexer.level2_ast_lexer.Token.tok_func (Obj.magic lexbuf)
+ (CicNotationLexer.level2_ast_lexer ()).Token.tok_func (Obj.magic lexbuf)
in
let rec parse acc =
let continue, acc =
LexiconEngine.status ->
LexiconEngine.status * ast_statement localized_option
-let grammar = CicNotationParser.level2_ast_grammar
+type parser_status = {
+ grammar : Grammar.g;
+ term : CicNotationPt.term Grammar.Entry.e;
+ statement : statement Grammar.Entry.e;
+}
-let term = CicNotationParser.term
-let statement = Grammar.Entry.create grammar "statement"
+let initial_parser () =
+ let grammar = CicNotationParser.level2_ast_grammar () in
+ let term = CicNotationParser.term () in
+ let statement = Grammar.Entry.create grammar "statement" in
+ { grammar = grammar; term = term; statement = statement }
+;;
+
+let grafite_parser = ref (initial_parser ())
let add_raw_attribute ~text t = Ast.AttributedTerm (`Raw text, t)
| BYC_letsuchthat of string * CicNotationPt.term * string * CicNotationPt.term
| BYC_wehaveand of string * CicNotationPt.term * string * CicNotationPt.term
+let initialize_parser () =
+ (* {{{ parser initialization *)
+ let term = !grafite_parser.term in
+ let statement = !grafite_parser.statement in
+ let let_defs = CicNotationParser.let_defs () in
+ let protected_binder_vars = CicNotationParser.protected_binder_vars () in
EXTEND
GLOBAL: term statement;
constructor: [ [ name = IDENT; SYMBOL ":"; typ = term -> (name, typ) ] ];
]
];
inductive_spec: [ [
- fst_name = IDENT; params = LIST0 CicNotationParser.protected_binder_vars;
+ fst_name = IDENT;
+ params = LIST0 protected_binder_vars;
SYMBOL ":"; fst_typ = term; SYMBOL <:unicode<def>>; OPT SYMBOL "|";
fst_constructors = LIST0 constructor SEP SYMBOL "|";
tl = OPT [ "with";
] ];
record_spec: [ [
- name = IDENT; params = LIST0 CicNotationParser.protected_binder_vars ;
+ name = IDENT;
+ params = LIST0 protected_binder_vars;
SYMBOL ":"; typ = term; SYMBOL <:unicode<def>>; SYMBOL "{" ;
fields = LIST0 [
name = IDENT ;
Ast.Theorem (flavour, name, Ast.Implicit, Some body))
| IDENT "axiom"; name = IDENT; SYMBOL ":"; typ = term ->
GrafiteAst.Obj (loc, Ast.Theorem (`Axiom, name, typ, None))
- | LETCOREC ; defs = CicNotationParser.let_defs ->
+ | LETCOREC ; defs = let_defs ->
mk_rec_corec `CoInductive defs loc
- | LETREC ; defs = CicNotationParser.let_defs ->
+ | LETREC ; defs = let_defs ->
mk_rec_corec `Inductive defs loc
| IDENT "inductive"; spec = inductive_spec ->
let (params, ind_types) = spec in
]
];
END
+(* }}} *)
+;;
+
+let _ = initialize_parser () ;;
let exc_located_wrapper f =
try
let parse_statement lexbuf =
exc_located_wrapper
- (fun () -> (Grammar.Entry.parse statement (Obj.magic lexbuf)))
+ (fun () -> (Grammar.Entry.parse !grafite_parser.statement (Obj.magic lexbuf)))
+
+let statement () = !grafite_parser.statement
+
+let history = ref [] ;;
+
+let push () =
+ LexiconSync.push ();
+ history := !grafite_parser :: !history;
+ grafite_parser := initial_parser ();
+ initialize_parser ()
+;;
+
+let pop () =
+ LexiconSync.pop ();
+ match !history with
+ | [] -> assert false
+ | gp :: tail ->
+ grafite_parser := gp;
+ history := tail
+;;
+
+(* vim:set foldmethod=marker: *)
+
val parse_statement: Ulexing.lexbuf -> statement (** @raise End_of_file *)
-val statement: statement Grammar.Entry.e
+val statement: unit -> statement Grammar.Entry.e
(* this callback is called on every include command *)
val set_callback: (string -> unit) -> unit
+
+val push : unit -> unit
+val pop : unit -> unit
;;
let ebnf_of_term () =
- let g_entry = Grammar.Entry.obj CicNotationParser.term in
+ let g_entry = Grammar.Entry.obj (CicNotationParser.term ()) in
let buff = Buffer.create 100 in
let fmt = Format.formatter_of_buffer buff in
visit_entries fmt [g_entry] [];
| _, RuleId _ -> 1
| x,y -> Pervasives.compare x y
-let parser_ref_counter = RefCounter.create ()
-let rule_ids_to_items = Hashtbl.create 113
+let initial_parser_ref_counter () = RefCounter.create ()
+let initial_rule_ids_to_items ()= Hashtbl.create 113
+
+let parser_ref_counter = ref (initial_parser_ref_counter ());;
+let rule_ids_to_items = ref (initial_rule_ids_to_items ());;
let process_notation st =
match st with
CicNotationPt.AttributedTerm
(`Loc loc,TermContentPres.instantiate_level2 env l2)) in
rule_id := [ RuleId id ];
- Hashtbl.add rule_ids_to_items id item
+ Hashtbl.add !rule_ids_to_items id item
in
- RefCounter.incr ~create_cb parser_ref_counter item
+ RefCounter.incr ~create_cb !parser_ref_counter item
in
let pp_id =
if dir <> Some `LeftToRight then
| RuleId id ->
let item =
try
- Hashtbl.find rule_ids_to_items id
+ Hashtbl.find !rule_ids_to_items id
with Not_found -> assert false in
RefCounter.decr ~delete_cb:(fun _ -> CicNotationParser.delete id)
- parser_ref_counter item
+ !parser_ref_counter item
| PrettyPrinterId id -> TermContentPres.remove_pretty_printer id
| InterpretationId id -> TermAcicContent.remove_interpretation id
in
TermAcicContent.set_active_interpretations interp_ids
+let history = ref [];;
+
let push () =
+ history := (!parser_ref_counter,!rule_ids_to_items) :: !history;
+ parser_ref_counter := initial_parser_ref_counter ();
+ rule_ids_to_items := initial_rule_ids_to_items ();
TermContentPres.push ();
TermAcicContent.push ();
CicNotationParser.push ()
let pop () =
TermContentPres.pop ();
TermAcicContent.pop ();
- CicNotationParser.pop ()
+ CicNotationParser.pop ();
+ match !history with
+ | [] -> assert false
+ | (prc,riti) :: tail ->
+ parser_ref_counter := prc;
+ rule_ids_to_items := riti;
+ history := tail;
;;
HLog.error "please create it.";
raise (Failure ("No root file for "^mafilename))
in
- let b =
- try
- GrafiteSync.push ();
- LexiconSync.push ();
- let rc = MatitacLib.Make.make root [tgt] in
- LexiconSync.pop ();
- GrafiteSync.pop ();
- rc
- with
- | exn ->
- HLog.error ("Unexpected " ^ snd(MatitaExcPp.to_string exn));
- assert false
- in
+ let b = MatitacLib.Make.make root [tgt] in
if b then
try f ~include_paths x with LexiconEngine.IncludedFileNotCompiled _ ->
raise
CicNotation2.load_notation ~include_paths:[]
BuildTimeConf.core_notation_script
in
- let initial_lexicon_status = lexicon_status in
let big_bang = Unix.gettimeofday () in
let { Unix.tms_utime = big_bang_u ; Unix.tms_stime = big_bang_s} =
Unix.times ()
(HLog.error
"there are still incomplete proofs at the end of the script";
pp_times fname false big_bang big_bang_u big_bang_s;
+(*
LexiconSync.time_travel
~present:lexicon_status ~past:initial_lexicon_status;
+*)
clean_exit baseuri false)
else
(if not (Helm_registry.get_bool "matita.moo" &&
HLog.message
(sprintf "execution of %s completed in %s." fname (hou^min^sec));
pp_times fname true big_bang big_bang_u big_bang_s;
+(*
LexiconSync.time_travel
~present:lexicon_status ~past:initial_lexicon_status;
+*)
true)
with
(* all exceptions should be wrapped to allow lexicon-undo (LS.time_travel) *)
| AttemptToInsertAnAlias lexicon_status ->
pp_times fname false big_bang big_bang_u big_bang_s;
+(*
LexiconSync.time_travel
~present:lexicon_status ~past:initial_lexicon_status;
+*)
clean_exit baseuri false
| MatitaEngine.EnrichedWithLexiconStatus (exn, lex_stat) as exn' ->
(match exn with
HLog.error (sprintf "Parse error at %d-%d: %s" x y err)
| exn when matita_debug -> raise exn'
| exn -> HLog.error (snd (MatitaExcPp.to_string exn)));
- LexiconSync.time_travel ~present:lex_stat ~past:initial_lexicon_status;
+(* LexiconSync.time_travel ~present:lex_stat ~past:initial_lexicon_status;
+ * *)
pp_times fname false big_bang big_bang_u big_bang_s;
clean_exit baseuri false
| Sys.Break when not matita_debug ->
let is_readonly_buri_of opts file =
let buri = List.assoc "baseuri" opts in
Http_getter_storage.is_read_only (Librarian.mk_baseuri buri file)
+ ;;
let root_and_target_of opts mafile =
try
let root,baseuri,_,_ =
Librarian.baseuri_of_script ~include_paths mafile
in
- let obj =
- if Filename.check_suffix mafile ".mma" then
- Filename.chop_suffix mafile ".mma" ^ ".ma"
- else
- LibraryMisc.obj_file_of_baseuri
- ~must_exist:false ~baseuri ~writable:true
- in
+ let obj =
+ if Filename.check_suffix mafile ".mma" then
+ Filename.chop_suffix mafile ".mma" ^ ".ma"
+ else
+ LibraryMisc.obj_file_of_baseuri
+ ~must_exist:false ~baseuri ~writable:true
+ in
Some root, obj
with Librarian.NoRootFor x -> None, ""
+ ;;
let mtime_of_source_object s =
try Some (Unix.stat s).Unix.st_mtime
with Unix.Unix_error (Unix.ENOENT, "stat", f) when f = s -> None
+ ;;
let mtime_of_target_object s =
try Some (Unix.stat s).Unix.st_mtime
with Unix.Unix_error (Unix.ENOENT, "stat", f) when f = s -> None
+ ;;
let build options fname =
- if Filename.check_suffix fname ".mma" then
- let generated = Filename.chop_suffix fname ".mma" ^ ".ma" in
- let atexit = dump generated in
- let res = compile options fname in
- let r = atexit res in
- if r then r else begin
- Sys.remove generated;
- Printf.printf "rm %s\n" generated; flush stdout; r
- end
- else
- compile options fname
+ let compile opts fname =
+ try
+ GrafiteSync.push ();
+ GrafiteParser.push ();
+ let rc = compile opts fname in
+ GrafiteParser.pop ();
+ GrafiteSync.pop ();
+ rc
+ with
+ | Sys.Break ->
+ GrafiteParser.pop ();
+ GrafiteSync.pop ();
+ false
+ | exn ->
+ HLog.error ("Unexpected " ^ snd(MatitaExcPp.to_string exn));
+ assert false
+ in
+ if Filename.check_suffix fname ".mma" then
+ let generated = Filename.chop_suffix fname ".mma" ^ ".ma" in
+ let atexit = dump generated in
+ let res = compile options fname in
+ let r = atexit res in
+ if r then r else begin
+ Sys.remove generated;
+ Printf.printf "rm %s\n" generated; flush stdout; r
+ end
+ else
+ compile options fname
+ ;;
- let load_deps_file = Librarian.load_deps_file
+ let load_deps_file = Librarian.load_deps_file;;
end