From: Enrico Tassi Date: Tue, 2 Nov 2010 17:08:43 +0000 (+0000) Subject: big change in parsing, trying to make all functional X-Git-Tag: make_still_working~2743 X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=commitdiff_plain;h=f3f6b451707a3feb8245717e3fa7ca25df0ce8ef;p=helm.git big change in parsing, trying to make all functional --- diff --git a/matita/components/content/.depend b/matita/components/content/.depend index a4aeebd43..d5941b8ef 100644 --- a/matita/components/content/.depend +++ b/matita/components/content/.depend @@ -1,7 +1,7 @@ content.cmi: -notationUtil.cmi: -notationEnv.cmi: -notationPp.cmi: +notationUtil.cmi: notationPt.cmo +notationEnv.cmi: notationPt.cmo +notationPp.cmi: notationPt.cmo notationEnv.cmi interpretations.cmi: notationPt.cmo notationPt.cmo: notationPt.cmx: diff --git a/matita/components/content/interpretations.ml b/matita/components/content/interpretations.ml index e9166c20a..aaa87d083 100644 --- a/matita/components/content/interpretations.ml +++ b/matita/components/content/interpretations.ml @@ -35,8 +35,6 @@ let debug_print s = if debug then prerr_endline (Lazy.force s) else () let load_patterns32 = ref (fun _ -> assert false);; let set_load_patterns32 f = load_patterns32 := f;; -type interpretation_id = int - let idref id t = Ast.AttributedTerm (`IdRef id, t) type cic_id = string @@ -51,11 +49,11 @@ module StringMap = Map.Make(String);; type db = { counter: int; - pattern32_matrix: (bool * NotationPt.cic_appl_pattern * interpretation_id) list; + pattern32_matrix: (bool * NotationPt.cic_appl_pattern * int) list; level2_patterns32: (string * string * NotationPt.argument_pattern list * NotationPt.cic_appl_pattern) IntMap.t; - interpretations: interpretation_id list StringMap.t; (* symb -> id list *) + interpretations: int list StringMap.t; (* symb -> id list *) } let initial_db = { @@ -136,7 +134,7 @@ let add_interpretation (status : #status) dsc (symbol, args) appl_pattern = } in !load_patterns32 status#interp_db.pattern32_matrix; - status,id + status let toggle_active_interpretations status b = status#set_interp_db { status#interp_db with diff --git a/matita/components/content/interpretations.mli b/matita/components/content/interpretations.mli index d04b56a8f..8ee3fc01f 100644 --- a/matita/components/content/interpretations.mli +++ b/matita/components/content/interpretations.mli @@ -40,8 +40,6 @@ class status : method set_interp_status: #g_status -> 'self end -type interpretation_id - type cic_id = string val add_interpretation: @@ -49,7 +47,7 @@ val add_interpretation: string -> (* id / description *) string * NotationPt.argument_pattern list -> (* symbol, level 2 pattern *) NotationPt.cic_appl_pattern -> (* level 3 pattern *) - 'status * interpretation_id + 'status val set_load_patterns32: ((bool * NotationPt.cic_appl_pattern * int) list -> unit) -> unit diff --git a/matita/components/content_pres/.depend b/matita/components/content_pres/.depend index 8d74439eb..7b0acd5dc 100644 --- a/matita/components/content_pres/.depend +++ b/matita/components/content_pres/.depend @@ -7,8 +7,8 @@ content2presMatcher.cmi: termContentPres.cmi: cicNotationParser.cmi boxPp.cmi: mpresentation.cmi cicNotationPres.cmi box.cmi cicNotationPres.cmi: mpresentation.cmi box.cmi -content2pres.cmi: cicNotationPres.cmi -sequent2pres.cmi: cicNotationPres.cmi +content2pres.cmi: termContentPres.cmi cicNotationPres.cmi +sequent2pres.cmi: termContentPres.cmi cicNotationPres.cmi renderingAttrs.cmo: renderingAttrs.cmi renderingAttrs.cmx: renderingAttrs.cmi cicNotationLexer.cmo: cicNotationLexer.cmi diff --git a/matita/components/content_pres/cicNotationLexer.ml b/matita/components/content_pres/cicNotationLexer.ml index e0225d72e..eaabf61e7 100644 --- a/matita/components/content_pres/cicNotationLexer.ml +++ b/matita/components/content_pres/cicNotationLexer.ml @@ -29,6 +29,9 @@ open Printf exception Error of int * int * string +module StringSet = Set.Make(String) + +(* Lexer *) let regexp number = xml_digit+ let regexp utf8_blank = " " | "\r\n" | "\n" | "\t" | [160] (* this is a nbsp *) let regexp percentage = @@ -111,22 +114,6 @@ let level2_meta_keywords = "anonymous"; "ident"; "number"; "term"; "fresh" ] - (* (string, unit) Hashtbl.t, to exploit multiple bindings *) -let initial_level2_ast_keywords () = Hashtbl.create 23;; - -let level2_ast_keywords = ref (initial_level2_ast_keywords ()) - -let initialize_keywords () = - List.iter (fun k -> Hashtbl.add !level2_ast_keywords k ()) - [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match"; - "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ] -;; - -let _ = initialize_keywords ();; - -let add_level2_ast_keyword k = Hashtbl.add !level2_ast_keywords k () -let remove_level2_ast_keyword k = Hashtbl.remove !level2_ast_keywords k - (* (string, int) Hashtbl.t, with multiple bindings. * int is the unicode codepoint *) let ligatures = Hashtbl.create 23 @@ -286,7 +273,7 @@ let rec comment_token acc depth = comment_token acc depth lexbuf (** @param k continuation to be invoked when no ligature has been found *) -let rec ligatures_token k = +let ligatures_token k = lexer | ligature -> let lexeme = Ulexing.utf8_lexeme lexbuf in @@ -301,21 +288,21 @@ let rec ligatures_token k = Ulexing.rollback lexbuf; k lexbuf -and level2_ast_token = +let rec level2_ast_token status = lexer | let_rec -> return lexbuf ("LETREC","") | let_corec -> return lexbuf ("LETCOREC","") | we_proved -> return lexbuf ("WEPROVED","") | we_have -> return lexbuf ("WEHAVE","") - | utf8_blank+ -> ligatures_token level2_ast_token lexbuf + | utf8_blank+ -> ligatures_token (level2_ast_token status) lexbuf | meta -> let s = Ulexing.utf8_lexeme lexbuf in return lexbuf ("META", String.sub s 1 (String.length s - 1)) | implicit -> return lexbuf ("IMPLICIT", "") | placeholder -> return lexbuf ("PLACEHOLDER", "") - | ident -> handle_keywords lexbuf (Hashtbl.mem !level2_ast_keywords) "IDENT" + | ident -> handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT" | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf) - | pident -> handle_keywords lexbuf (Hashtbl.mem !level2_ast_keywords) "PIDENT" + | pident -> handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT" | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf) | tex_token -> return lexbuf (expand_macro lexbuf) | nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf) @@ -338,7 +325,7 @@ and level2_ast_token = Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4) in return lexbuf ("NOTE", comment) *) - ligatures_token level2_ast_token lexbuf + ligatures_token (level2_ast_token status) lexbuf | begincomment -> return lexbuf ("BEGINCOMMENT","") | endcomment -> return lexbuf ("ENDCOMMENT","") | eof -> return_eoi lexbuf @@ -365,44 +352,26 @@ and level1_pattern_token = | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf) let level1_pattern_token = ligatures_token level1_pattern_token -let level2_ast_token = ligatures_token level2_ast_token +let level2_ast_token status = ligatures_token (level2_ast_token status) (* API implementation *) - -let initial_level1_pattern_lexer () = mk_lexer level1_pattern_token -let initial_level2_ast_lexer () = mk_lexer level2_ast_token -let initial_level2_meta_lexer () = mk_lexer level2_meta_token - - -let level1_pattern_lexer_ref = ref (initial_level1_pattern_lexer ()) -let level2_ast_lexer_ref = ref (initial_level2_ast_lexer ()) -let level2_meta_lexer_ref = ref (initial_level2_meta_lexer ()) - -let level1_pattern_lexer () = !level1_pattern_lexer_ref -let level2_ast_lexer () = !level2_ast_lexer_ref -let level2_meta_lexer () = !level2_meta_lexer_ref - -let history = ref [];; - -let push () = - history := - (!level2_ast_keywords,!level1_pattern_lexer_ref, - !level2_ast_lexer_ref,!level2_meta_lexer_ref) :: !history; - level2_ast_keywords := initial_level2_ast_keywords (); - initialize_keywords (); - level1_pattern_lexer_ref := initial_level1_pattern_lexer (); - level2_ast_lexer_ref := initial_level2_ast_lexer (); - level2_meta_lexer_ref := initial_level2_meta_lexer (); -;; - -let pop () = - match !history with - | [] -> assert false - | (kwd,pl,al,ml) :: tl -> - level2_ast_keywords := kwd; - level1_pattern_lexer_ref := pl; - level2_ast_lexer_ref := al; - level2_meta_lexer_ref := ml; - history := tl +type lexers = { + level1_pattern_lexer : (string * string) Token.glexer; + level2_ast_lexer : (string * string) Token.glexer; + level2_meta_lexer : (string * string) Token.glexer +} + +let mk_lexers keywords = + let initial_keywords = + [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match"; + "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ] + in + let status = + List.fold_right StringSet.add (initial_keywords @ keywords) StringSet.empty + in + { + level1_pattern_lexer = mk_lexer level1_pattern_token; + level2_ast_lexer = mk_lexer (level2_ast_token status); + level2_meta_lexer = mk_lexer level2_meta_token + } ;; - diff --git a/matita/components/content_pres/cicNotationLexer.mli b/matita/components/content_pres/cicNotationLexer.mli index 1edacd960..3bb42f5cb 100644 --- a/matita/components/content_pres/cicNotationLexer.mli +++ b/matita/components/content_pres/cicNotationLexer.mli @@ -28,20 +28,12 @@ * error message *) exception Error of int * int * string - (** XXX ZACK DEFCON 4 BEGIN: never use the tok_func field of the glexers below - * passing values of type char Stream.t, they should be in fact Ulexing.lexbuf - * casted with Obj.magic :-/ Read the comment in the .ml for the rationale *) +type lexers = { + level1_pattern_lexer : (string * string) Token.glexer; + level2_ast_lexer : (string * string) Token.glexer; + level2_meta_lexer : (string * string) Token.glexer +} -val level1_pattern_lexer: unit -> (string * string) Token.glexer -val level2_ast_lexer: unit -> (string * string) Token.glexer -val level2_meta_lexer: unit -> (string * string) Token.glexer +val mk_lexers : string list -> lexers - (** XXX ZACK DEFCON 4 END *) -val add_level2_ast_keyword: string -> unit (** non idempotent *) -val remove_level2_ast_keyword: string -> unit (** non idempotent *) - -(** {2 Ligatures} *) - -val push: unit -> unit -val pop: unit -> unit diff --git a/matita/components/content_pres/cicNotationParser.ml b/matita/components/content_pres/cicNotationParser.ml index 2d52f0b89..b828f8bc5 100644 --- a/matita/components/content_pres/cicNotationParser.ml +++ b/matita/components/content_pres/cicNotationParser.ml @@ -46,32 +46,17 @@ type ('a,'b,'c,'d) grammars = { level2_meta: 'b Grammar.Entry.e; } -let initial_grammars () = - let level1_pattern_grammar = - Grammar.gcreate (CicNotationLexer.level1_pattern_lexer ()) in - let level2_ast_grammar = - Grammar.gcreate (CicNotationLexer.level2_ast_lexer ()) in - let level2_meta_grammar = - Grammar.gcreate (CicNotationLexer.level2_meta_lexer ()) in - let level1_pattern = - Grammar.Entry.create level1_pattern_grammar "level1_pattern" in - let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast" in - let term = Grammar.Entry.create level2_ast_grammar "term" in - let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs" in - let protected_binder_vars = - Grammar.Entry.create level2_ast_grammar "protected_binder_vars" in - let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta" in - { level1_pattern=level1_pattern; - level2_ast=level2_ast; - term=term; - let_defs=let_defs; - protected_binder_vars=protected_binder_vars; - level2_meta=level2_meta; - level2_ast_grammar=level2_ast_grammar; -} -;; +type checked_l1_pattern = CL1P of NotationPt.term * int -let grammars = ref (initial_grammars ());; +type db = { + grammars: + (int -> NotationPt.term, + Ast.term, + (Ast.term Ast.capture_variable list * + Ast.term Ast.capture_variable * Ast.term * int) list, + Ast.term list * Ast.term option) grammars; + items: (checked_l1_pattern * int * Gramext.g_assoc * NotationPt.term) list +} let int_of_string s = try @@ -90,11 +75,12 @@ let gram_symbol s = Gramext.Stoken ("SYMBOL", s) let gram_ident s = Gramext.Stoken ("IDENT", s) let gram_number s = Gramext.Stoken ("NUMBER", s) let gram_keyword s = Gramext.Stoken ("", s) -let gram_term = function +let gram_term status = function | Ast.Self _ -> Gramext.Sself | Ast.Level precedence -> Gramext.Snterml - (Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e), + (Grammar.Entry.obj + (status#notation_parser_db.grammars.term: 'a Grammar.Entry.e), level_of precedence) ;; @@ -152,7 +138,7 @@ let flatten_opt = aux [] (* given a level 1 pattern computes the new RHS of "term" grammar entry *) -let extract_term_production pattern = +let extract_term_production status pattern = let rec aux = function | Ast.AttributedTerm (_, t) -> aux t | Ast.Literal l -> aux_literal l @@ -224,7 +210,7 @@ let extract_term_production pattern = function | Ast.NumVar s -> [Binding (s, Env.NumType), gram_number ""] | Ast.TermVar (s,(Ast.Self level|Ast.Level level as lv)) -> - [Binding (s, Env.TermType level), gram_term lv] + [Binding (s, Env.TermType level), gram_term status lv] | Ast.IdentVar s -> [Binding (s, Env.StringType), gram_ident ""] | Ast.Ascription (p, s) -> assert false (* TODO *) | Ast.FreshVar _ -> assert false @@ -252,11 +238,6 @@ let compare_rule_id x y = in aux (x,y) - (* mapping: rule_id -> owned keywords. (rule_id, string list) Hashtbl.t *) -let initial_owned_keywords () = Hashtbl.create 23 -let owned_keywords = ref (initial_owned_keywords ()) - -type checked_l1_pattern = CL1P of NotationPt.term * int let check_l1_pattern level1_pattern pponly level associativity = let variables = ref 0 in @@ -335,14 +316,28 @@ let check_l1_pattern level1_pattern pponly level associativity = CL1P (cp,level) ;; -let extend (CL1P (level1_pattern,precedence)) action = +let extend status (CL1P (level1_pattern,precedence)) action = + assert false +(* + CicNotationParser.new_parser (); + let create_cb_old (l1, precedence, associativity, l2) = + ignore( + CicNotationParser.extend l1 + (fun env loc -> + NotationPt.AttributedTerm + (`Loc loc,TermContentPres.instantiate_level2 env l2))) + in + RefCounter.iter create_cb_old !parser_ref_counter; +*) +(* let p_bindings, p_atoms = - List.split (extract_term_production level1_pattern) + List.split (extract_term_production status level1_pattern) in let level = level_of precedence in - let _ = + let () = Grammar.extend - [ Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e), + [ Grammar.Entry.obj + (status#notation_parser_db.grammars.term: 'a Grammar.Entry.e), Some (Gramext.Level level), [ None, Some (*Gramext.NonA*) Gramext.NonA, @@ -355,23 +350,11 @@ let extend (CL1P (level1_pattern,precedence)) action = let keywords = NotationUtil.keywords_of_term level1_pattern in let rule_id = p_atoms in List.iter CicNotationLexer.add_level2_ast_keyword keywords; - Hashtbl.add !owned_keywords rule_id keywords; (* keywords may be [] *) rule_id - -let delete rule_id = - let atoms = rule_id in - (try - let keywords = Hashtbl.find !owned_keywords rule_id in - List.iter CicNotationLexer.remove_level2_ast_keyword keywords - with Not_found -> assert false); - Grammar.delete_rule !grammars.term atoms +*) (** {2 Grammar} *) -let parse_level1_pattern_ref = ref (fun _ _ -> assert false) -let parse_level2_ast_ref = ref (fun _ -> assert false) -let parse_level2_meta_ref = ref (fun _ -> assert false) - let fold_cluster binder terms ty body = List.fold_right (fun term body -> Ast.Binder (binder, (term, ty), body)) @@ -393,8 +376,37 @@ let return_term loc term = Ast.AttributedTerm (`Loc loc, term) let return_term_of_level loc term l = Ast.AttributedTerm (`Loc loc, term l) +(** {2 API implementation} *) + +let exc_located_wrapper f = + try + f () + with + | Stdpp.Exc_located (floc, Stream.Error msg) -> + raise (HExtlib.Localized (floc, Parse_error msg)) + | Stdpp.Exc_located (floc, HExtlib.Localized (_,exn)) -> + raise (HExtlib.Localized (floc, (Parse_error (Printexc.to_string exn)))) + | Stdpp.Exc_located (floc, exn) -> + raise (HExtlib.Localized (floc, (Parse_error (Printexc.to_string exn)))) + +let parse_level1_pattern grammars precedence lexbuf = + exc_located_wrapper + (fun () -> Grammar.Entry.parse grammars.level1_pattern (Obj.magic lexbuf) precedence) + +let parse_level2_ast grammars lexbuf = + exc_located_wrapper + (fun () -> Grammar.Entry.parse grammars.level2_ast (Obj.magic lexbuf)) + +let parse_level2_meta grammars lexbuf = + exc_located_wrapper + (fun () -> Grammar.Entry.parse grammars.level2_meta (Obj.magic lexbuf)) + +let parse_term grammars lexbuf = + exc_located_wrapper + (fun () -> (Grammar.Entry.parse grammars.term (Obj.magic lexbuf))) + (* create empty precedence level for "term" *) -let initialize_grammars () = +let initialize_grammars grammars = let dummy_action = Gramext.action (fun _ -> failwith "internal error, lexer generated a dummy token") @@ -414,12 +426,12 @@ let initialize_grammars () = aux [] last in Grammar.extend - [ Grammar.Entry.obj (!grammars.term: 'a Grammar.Entry.e), + [ Grammar.Entry.obj (grammars.term: 'a Grammar.Entry.e), None, mk_level_list min_precedence max_precedence ]; (* {{{ Grammar for concrete syntax patterns, notation level 1 *) begin - let level1_pattern = !grammars.level1_pattern in + let level1_pattern = grammars.level1_pattern in EXTEND GLOBAL: level1_pattern; @@ -533,7 +545,7 @@ EXTEND (* }}} *) (* {{{ Grammar for ast magics, notation level 2 *) begin - let level2_meta = !grammars.level2_meta in + let level2_meta = grammars.level2_meta in EXTEND GLOBAL: level2_meta; l2_variable: [ @@ -563,7 +575,7 @@ EXTEND [ magic = l2_magic -> Ast.Magic magic | var = l2_variable -> Ast.Variable var | blob = UNPARSED_AST -> - !parse_level2_ast_ref (Ulexing.from_utf8_string blob) + parse_level2_ast grammars (Ulexing.from_utf8_string blob) ] ]; END @@ -571,10 +583,10 @@ END (* }}} *) (* {{{ Grammar for ast patterns, notation level 2 *) begin - let level2_ast = !grammars.level2_ast in - let term = !grammars.term in - let let_defs = !grammars.let_defs in - let protected_binder_vars = !grammars.protected_binder_vars in + let level2_ast = grammars.level2_ast in + let term = grammars.term in + let let_defs = grammars.let_defs in + let protected_binder_vars = grammars.protected_binder_vars in EXTEND GLOBAL: level2_ast term let_defs protected_binder_vars; level2_ast: [ [ p = term -> p ] ]; @@ -635,7 +647,7 @@ EXTEND List.map (fun n -> Ast.Ident (n, None)) names, Some ty | name = IDENT -> [Ast.Ident (name, None)], None | blob = UNPARSED_META -> - let meta = !parse_level2_meta_ref (Ulexing.from_utf8_string blob) in + let meta = parse_level2_meta grammars (Ulexing.from_utf8_string blob) in match meta with | Ast.Variable (Ast.FreshVar _) -> [meta], None | Ast.Variable (Ast.TermVar ("_",_)) -> [Ast.Ident ("_", None)], None @@ -645,7 +657,7 @@ EXTEND single_arg: [ [ name = IDENT -> Ast.Ident (name, None) | blob = UNPARSED_META -> - let meta = !parse_level2_meta_ref (Ulexing.from_utf8_string blob) in + let meta = parse_level2_meta grammars (Ulexing.from_utf8_string blob) in match meta with | Ast.Variable (Ast.FreshVar _) | Ast.Variable (Ast.IdentVar _) -> meta @@ -776,80 +788,77 @@ EXTEND return_term loc (Ast.Cast (p1, p2)) | LPAREN; p = term; RPAREN -> p | blob = UNPARSED_META -> - !parse_level2_meta_ref (Ulexing.from_utf8_string blob) + parse_level2_meta grammars (Ulexing.from_utf8_string blob) ] ]; END - end + end; (* }}} *) + grammars ;; -let _ = initialize_grammars ();; - -let history = ref [];; - -let push () = - CicNotationLexer.push (); - history := (!owned_keywords,!grammars) :: !history; - owned_keywords := (initial_owned_keywords ()); - grammars := initial_grammars (); - initialize_grammars () -;; - -let pop () = - CicNotationLexer.pop (); - match !history with - | [] -> assert false - | (kw,gram) :: old_history -> - owned_keywords := kw; - grammars := gram; - history := old_history +let initial_grammars = + let lexers = CicNotationLexer.mk_lexers [] in + let level1_pattern_grammar = + Grammar.gcreate lexers.CicNotationLexer.level1_pattern_lexer in + let level2_ast_grammar = + Grammar.gcreate lexers.CicNotationLexer.level2_ast_lexer in + let level2_meta_grammar = + Grammar.gcreate lexers.CicNotationLexer.level2_meta_lexer in + let level1_pattern = + Grammar.Entry.create level1_pattern_grammar "level1_pattern" in + let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast" in + let term = Grammar.Entry.create level2_ast_grammar "term" in + let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs" in + let protected_binder_vars = + Grammar.Entry.create level2_ast_grammar "protected_binder_vars" in + let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta" in + initialize_grammars { level1_pattern=level1_pattern; + level2_ast=level2_ast; + term=term; + let_defs=let_defs; + protected_binder_vars=protected_binder_vars; + level2_meta=level2_meta; + level2_ast_grammar=level2_ast_grammar; + } ;; -(** {2 API implementation} *) - -let exc_located_wrapper f = - try - f () - with - | Stdpp.Exc_located (floc, Stream.Error msg) -> - raise (HExtlib.Localized (floc, Parse_error msg)) - | Stdpp.Exc_located (floc, HExtlib.Localized (_,exn)) -> - raise (HExtlib.Localized (floc, (Parse_error (Printexc.to_string exn)))) - | Stdpp.Exc_located (floc, exn) -> - raise (HExtlib.Localized (floc, (Parse_error (Printexc.to_string exn)))) - -let parse_level1_pattern precedence lexbuf = - exc_located_wrapper - (fun () -> Grammar.Entry.parse !grammars.level1_pattern (Obj.magic lexbuf) precedence) - -let parse_level2_ast lexbuf = - exc_located_wrapper - (fun () -> Grammar.Entry.parse !grammars.level2_ast (Obj.magic lexbuf)) - -let parse_level2_meta lexbuf = - exc_located_wrapper - (fun () -> Grammar.Entry.parse !grammars.level2_meta (Obj.magic lexbuf)) - -let _ = - parse_level1_pattern_ref := parse_level1_pattern; - parse_level2_ast_ref := parse_level2_ast; - parse_level2_meta_ref := parse_level2_meta - -let parse_term lexbuf = - exc_located_wrapper - (fun () -> (Grammar.Entry.parse !grammars.term (Obj.magic lexbuf))) - -let level2_ast_grammar () = !grammars.level2_ast_grammar -let term () = !grammars.term -let let_defs () = !grammars.let_defs -let protected_binder_vars () = !grammars.protected_binder_vars - +class type g_status = + object + method notation_parser_db: db + end + +class status = + object(self) + val db = { grammars = initial_grammars ; items = [] } + method notation_parser_db = db + method set_notation_parser_db v = {< db = v >} + method set_notation_parser_status + : 'status. #g_status as 'status -> 'self + = fun o -> {< db = o#notation_parser_db >} + end + +let parse_level1_pattern status = + parse_level1_pattern status#notation_parser_db.grammars +let parse_level2_ast status = + parse_level2_ast status#notation_parser_db.grammars +let parse_level2_meta status = + parse_level2_meta status#notation_parser_db.grammars +let parse_term status = + parse_term status#notation_parser_db.grammars + +let level2_ast_grammar status = + status#notation_parser_db.grammars.level2_ast_grammar +let term status = status#notation_parser_db.grammars.term +let let_defs status = status#notation_parser_db.grammars.let_defs +let protected_binder_vars status = + status#notation_parser_db.grammars.protected_binder_vars (** {2 Debugging} *) -let print_l2_pattern () = - Grammar.print_entry Format.std_formatter (Grammar.Entry.obj !grammars.term); +let print_l2_pattern status = + Grammar.print_entry Format.std_formatter + (Grammar.Entry.obj status#notation_parser_db.grammars.term); Format.pp_print_flush Format.std_formatter (); flush stdout diff --git a/matita/components/content_pres/cicNotationParser.mli b/matita/components/content_pres/cicNotationParser.mli index c25ba1864..b70eb141f 100644 --- a/matita/components/content_pres/cicNotationParser.mli +++ b/matita/components/content_pres/cicNotationParser.mli @@ -26,54 +26,60 @@ exception Parse_error of string exception Level_not_found of int +type db + +class type g_status = + object + method notation_parser_db: db + end + +class status: + object('self) + inherit g_status + method set_notation_parser_db: db -> 'self + method set_notation_parser_status: 'status. #g_status as 'status -> 'self + end + type checked_l1_pattern = private CL1P of NotationPt.term * int (** {2 Parsing functions} *) (** concrete syntax pattern: notation level 1, the * integer is the precedence *) -val parse_level1_pattern: int -> Ulexing.lexbuf -> NotationPt.term +val parse_level1_pattern: #status -> int -> Ulexing.lexbuf -> NotationPt.term (** AST pattern: notation level 2 *) -val parse_level2_ast: Ulexing.lexbuf -> NotationPt.term -val parse_level2_meta: Ulexing.lexbuf -> NotationPt.term +val parse_level2_ast: #status -> Ulexing.lexbuf -> NotationPt.term +val parse_level2_meta: #status -> Ulexing.lexbuf -> NotationPt.term (** {2 Grammar extension} *) -type rule_id - -val compare_rule_id : rule_id -> rule_id -> int - val check_l1_pattern: (* level1_pattern, pponly, precedence, assoc *) NotationPt.term -> bool -> int -> Gramext.g_assoc -> checked_l1_pattern val extend: + #status as 'status -> checked_l1_pattern -> (NotationEnv.t -> NotationPt.location -> NotationPt.term) -> - rule_id - -val delete: rule_id -> unit + 'status (** {2 Grammar entries} * needed by grafite parser *) -val level2_ast_grammar: unit -> Grammar.g +val level2_ast_grammar: #status -> Grammar.g -val term : unit -> NotationPt.term Grammar.Entry.e +val term : #status -> NotationPt.term Grammar.Entry.e -val let_defs : unit -> +val let_defs : #status -> (NotationPt.term NotationPt.capture_variable list * NotationPt.term NotationPt.capture_variable * NotationPt.term * int) list Grammar.Entry.e -val protected_binder_vars : unit -> +val protected_binder_vars : #status -> (NotationPt.term list * NotationPt.term option) Grammar.Entry.e -val parse_term: Ulexing.lexbuf -> NotationPt.term +val parse_term: #status -> Ulexing.lexbuf -> NotationPt.term (** {2 Debugging} *) (** print "level2_pattern" entry on stdout, flushing afterwards *) -val print_l2_pattern: unit -> unit - -val push: unit -> unit -val pop: unit -> unit +val print_l2_pattern: #status -> unit diff --git a/matita/components/content_pres/termContentPres.ml b/matita/components/content_pres/termContentPres.ml index ae4474760..6b0d9d0c8 100644 --- a/matita/components/content_pres/termContentPres.ml +++ b/matita/components/content_pres/termContentPres.ml @@ -524,8 +524,7 @@ let add_pretty_printer status l2 (CicNotationParser.CL1P (l1,precedence)) = level1_patterns21 = IntMap.add id l1' status#content_pres_db.level1_patterns21; pattern21_matrix = (l2',id)::status#content_pres_db.pattern21_matrix } in - let status = load_patterns21 status status#content_pres_db.pattern21_matrix in - status,id + load_patterns21 status status#content_pres_db.pattern21_matrix (* presentation -> content *) diff --git a/matita/components/content_pres/termContentPres.mli b/matita/components/content_pres/termContentPres.mli index 41018d390..69cb38b99 100644 --- a/matita/components/content_pres/termContentPres.mli +++ b/matita/components/content_pres/termContentPres.mli @@ -39,13 +39,11 @@ class status : method set_content_pres_status: #g_status -> 'self end -type pretty_printer_id - val add_pretty_printer: #status as 'status -> NotationPt.term -> (* level 2 pattern *) CicNotationParser.checked_l1_pattern -> - 'status * pretty_printer_id + 'status (** {2 content -> pres} *) diff --git a/matita/components/extlib/.depend b/matita/components/extlib/.depend index dcc6377a0..f6168c1bc 100644 --- a/matita/components/extlib/.depend +++ b/matita/components/extlib/.depend @@ -6,7 +6,6 @@ hLog.cmi: trie.cmi: discrimination_tree.cmi: hTopoSort.cmi: -refCounter.cmi: graphvizPp.cmi: componentsConf.cmo: componentsConf.cmi componentsConf.cmx: componentsConf.cmi @@ -24,7 +23,5 @@ discrimination_tree.cmo: trie.cmi discrimination_tree.cmi discrimination_tree.cmx: trie.cmx discrimination_tree.cmi hTopoSort.cmo: hTopoSort.cmi hTopoSort.cmx: hTopoSort.cmi -refCounter.cmo: refCounter.cmi -refCounter.cmx: refCounter.cmi graphvizPp.cmo: graphvizPp.cmi graphvizPp.cmx: graphvizPp.cmi diff --git a/matita/components/extlib/Makefile b/matita/components/extlib/Makefile index 4aa36a488..8167ec3b4 100644 --- a/matita/components/extlib/Makefile +++ b/matita/components/extlib/Makefile @@ -10,7 +10,6 @@ INTERFACE_FILES = \ trie.mli \ discrimination_tree.mli \ hTopoSort.mli \ - refCounter.mli \ graphvizPp.mli \ $(NULL) IMPLEMENTATION_FILES = \ diff --git a/matita/components/extlib/refCounter.ml b/matita/components/extlib/refCounter.ml deleted file mode 100644 index f5edb69f5..000000000 --- a/matita/components/extlib/refCounter.ml +++ /dev/null @@ -1,49 +0,0 @@ -(* Copyright (C) 2006, HELM Team. - * - * This file is part of HELM, an Hypertextual, Electronic - * Library of Mathematics, developed at the Computer Science - * Department, University of Bologna, Italy. - * - * HELM is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * HELM is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with HELM; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, - * MA 02111-1307, USA. - * - * For details, see the HELM World-Wide-Web page, - * http://helm.cs.unibo.it/ - *) - -type 'a t = ('a, int ref) Hashtbl.t - -let create () = Hashtbl.create 113 - -let incr ~create_cb counters item = - try - let counter = Hashtbl.find counters item in - incr counter - with Not_found -> - Hashtbl.add counters item (ref 1); - create_cb item - -let decr ~delete_cb counters item = - try - let counter = Hashtbl.find counters item in - decr counter; - if !counter = 0 then begin - Hashtbl.remove counters item; - delete_cb item - end - with Not_found -> - prerr_endline "RefCounter: attempt to decrease unexistent counter"; - assert false - diff --git a/matita/components/extlib/refCounter.mli b/matita/components/extlib/refCounter.mli deleted file mode 100644 index fc5216648..000000000 --- a/matita/components/extlib/refCounter.mli +++ /dev/null @@ -1,31 +0,0 @@ -(* Copyright (C) 2006, HELM Team. - * - * This file is part of HELM, an Hypertextual, Electronic - * Library of Mathematics, developed at the Computer Science - * Department, University of Bologna, Italy. - * - * HELM is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * HELM is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with HELM; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, - * MA 02111-1307, USA. - * - * For details, see the HELM World-Wide-Web page, - * http://helm.cs.unibo.it/ - *) - -type 'a t - -val create: unit -> 'a t -val incr: create_cb:('a -> unit) -> 'a t -> 'a -> unit -val decr: delete_cb:('a -> unit) -> 'a t -> 'a -> unit - diff --git a/matita/components/grafite/.depend b/matita/components/grafite/.depend index f305b1580..0e7eba453 100644 --- a/matita/components/grafite/.depend +++ b/matita/components/grafite/.depend @@ -1,8 +1,5 @@ grafiteAstPp.cmi: grafiteAst.cmo -grafiteMarshal.cmi: grafiteAst.cmo grafiteAst.cmo: grafiteAst.cmx: grafiteAstPp.cmo: grafiteAst.cmo grafiteAstPp.cmi grafiteAstPp.cmx: grafiteAst.cmx grafiteAstPp.cmi -grafiteMarshal.cmo: grafiteAstPp.cmi grafiteAst.cmo grafiteMarshal.cmi -grafiteMarshal.cmx: grafiteAstPp.cmx grafiteAst.cmx grafiteMarshal.cmi diff --git a/matita/components/grafite/grafiteAst.ml b/matita/components/grafite/grafiteAst.ml index e32131a06..46dfef451 100644 --- a/matita/components/grafite/grafiteAst.ml +++ b/matita/components/grafite/grafiteAst.ml @@ -84,7 +84,7 @@ type nmacro = let magic = 35 type command = - | Include of loc * string + | Include of loc * string * unit * string | Set of loc * string * string | Print of loc * string diff --git a/matita/components/grafite/grafiteAstPp.ml b/matita/components/grafite/grafiteAstPp.ml index d38344824..202e1776c 100644 --- a/matita/components/grafite/grafiteAstPp.ml +++ b/matita/components/grafite/grafiteAstPp.ml @@ -130,7 +130,7 @@ let pp_ncommand = function ;; let pp_command = function - | Include (_,path) -> "include \"" ^ path ^ "\"" + | Include (_,path,_,_) -> "include \"" ^ path ^ "\"" | Print (_,s) -> "print " ^ s | Set (_, name, value) -> Printf.sprintf "set \"%s\" \"%s\"" name value diff --git a/matita/components/grafite_engine/grafiteEngine.ml b/matita/components/grafite_engine/grafiteEngine.ml index 8eb438e18..00160ac81 100644 --- a/matita/components/grafite_engine/grafiteEngine.ml +++ b/matita/components/grafite_engine/grafiteEngine.ml @@ -554,7 +554,18 @@ let eval_comment opts status (text,prefix_len,c) = status, [] let rec eval_command opts status (text,prefix_len,cmd) = let status,uris = match cmd with - | GrafiteAst.Include (loc, baseuri) -> + | GrafiteAst.Include (loc, fname, mode, _) -> + let include_paths = assert false in + let never_include = assert false in + let mode = assert false in + let baseuri = assert false in + let status = + let _root, buri, fullpath, _rrelpath = + Librarian.baseuri_of_script ~include_paths fname in + if never_include then raise (GrafiteParser.NoInclusionPerformed fullpath) + else + LexiconEngine.eval_command status (LexiconAst.Include (loc,buri,mode,fullpath)) + in let status,obj = GrafiteTypes.Serializer.require ~baseuri:(NUri.uri_of_string baseuri) status in diff --git a/matita/components/grafite_parser/.depend b/matita/components/grafite_parser/.depend index 2766b04d0..df2432b9f 100644 --- a/matita/components/grafite_parser/.depend +++ b/matita/components/grafite_parser/.depend @@ -1,8 +1,7 @@ dependenciesParser.cmi: grafiteParser.cmi: cicNotation2.cmi: -nEstatus.cmi: -grafiteDisambiguate.cmi: nEstatus.cmi +grafiteDisambiguate.cmi: print_grammar.cmi: dependenciesParser.cmo: dependenciesParser.cmi dependenciesParser.cmx: dependenciesParser.cmi @@ -10,8 +9,6 @@ grafiteParser.cmo: grafiteParser.cmi grafiteParser.cmx: grafiteParser.cmi cicNotation2.cmo: grafiteParser.cmi cicNotation2.cmi cicNotation2.cmx: grafiteParser.cmx cicNotation2.cmi -nEstatus.cmo: nEstatus.cmi -nEstatus.cmx: nEstatus.cmi grafiteDisambiguate.cmo: grafiteDisambiguate.cmi grafiteDisambiguate.cmx: grafiteDisambiguate.cmi print_grammar.cmo: print_grammar.cmi diff --git a/matita/components/grafite_parser/cicNotation2.ml b/matita/components/grafite_parser/cicNotation2.ml index e02517177..47a21ae62 100644 --- a/matita/components/grafite_parser/cicNotation2.ml +++ b/matita/components/grafite_parser/cicNotation2.ml @@ -26,12 +26,15 @@ (* $Id$ *) let load_notation status ~include_paths fname = + assert false +(* let ic = open_in fname in let lexbuf = Ulexing.from_utf8_channel ic in let status = ref status in try while true do - status := fst (GrafiteParser.parse_statement ~include_paths lexbuf !status) + status := fst (GrafiteParser.parse_statement lexbuf !status) done; assert false with End_of_file -> close_in ic; !status +*) diff --git a/matita/components/grafite_parser/dependenciesParser.ml b/matita/components/grafite_parser/dependenciesParser.ml index 57370660a..16c2a2c69 100644 --- a/matita/components/grafite_parser/dependenciesParser.ml +++ b/matita/components/grafite_parser/dependenciesParser.ml @@ -40,7 +40,8 @@ let pp_dependency = function let parse_dependencies lexbuf = let tok_stream,_ = - (CicNotationLexer.level2_ast_lexer ()).Token.tok_func (Obj.magic lexbuf) + let lexers = (CicNotationLexer.mk_lexers []) in + lexers.CicNotationLexer.level2_ast_lexer.Token.tok_func (Obj.magic lexbuf) in let rec parse acc = let continue, acc = diff --git a/matita/components/grafite_parser/grafiteParser.ml b/matita/components/grafite_parser/grafiteParser.ml index 0679c8895..29d52c01f 100644 --- a/matita/components/grafite_parser/grafiteParser.ml +++ b/matita/components/grafite_parser/grafiteParser.ml @@ -38,32 +38,23 @@ type 'a localized_option = type ast_statement = G.statement -type 'status statement = - ?never_include:bool -> - (* do not call LexiconEngine to do includes, always raise NoInclusionPerformed *) - include_paths:string list -> (#LE.status as 'status) -> - 'status * ast_statement localized_option - -type 'status parser_status = { - grammar : Grammar.g; - term : N.term Grammar.Entry.e; - statement : #LE.status as 'status statement Grammar.Entry.e; -} - -let grafite_callback = ref (fun _ -> ()) -let set_grafite_callback cb = grafite_callback := cb - -let lexicon_callback = ref (fun _ -> ()) -let set_lexicon_callback cb = lexicon_callback := cb - -let initial_parser () = - let grammar = CicNotationParser.level2_ast_grammar () in - let term = CicNotationParser.term () in - let statement = Grammar.Entry.create grammar "statement" in - { grammar = grammar; term = term; statement = statement } -;; +let exc_located_wrapper f = + try + f () + with + | Stdpp.Exc_located (_, End_of_file) -> raise End_of_file + | Stdpp.Exc_located (floc, Stream.Error msg) -> + raise (HExtlib.Localized (floc,CicNotationParser.Parse_error msg)) + | Stdpp.Exc_located (floc, HExtlib.Localized(_,exn)) -> + raise (HExtlib.Localized + (floc,CicNotationParser.Parse_error (Printexc.to_string exn))) + | Stdpp.Exc_located (floc, exn) -> + raise (HExtlib.Localized + (floc,CicNotationParser.Parse_error (Printexc.to_string exn))) -let grafite_parser = ref (initial_parser ()) +let parse_statement grafite_parser lexbuf = + exc_located_wrapper + (fun () -> (Grammar.Entry.parse (Obj.magic grafite_parser) (Obj.magic lexbuf))) let add_raw_attribute ~text t = N.AttributedTerm (`Raw text, t) @@ -102,31 +93,26 @@ type by_continuation = | BYC_letsuchthat of string * N.term * string * N.term | BYC_wehaveand of string * N.term * string * N.term -let initialize_parser () = +let mk_parser statement lstatus = +(* let grammar = CicNotationParser.level2_ast_grammar lstatus in *) + let term = CicNotationParser.term lstatus in + let let_defs = CicNotationParser.let_defs lstatus in + let protected_binder_vars = CicNotationParser.protected_binder_vars lstatus in (* {{{ parser initialization *) - let term = !grafite_parser.term in - let statement = !grafite_parser.statement in - let let_defs = CicNotationParser.let_defs () in - let protected_binder_vars = CicNotationParser.protected_binder_vars () in EXTEND GLOBAL: term statement; constructor: [ [ name = IDENT; SYMBOL ":"; typ = term -> (name, typ) ] ]; tactic_term: [ [ t = term LEVEL "90" -> t ] ]; +(* MATITA 1.0 new_name: [ [ SYMBOL "_" -> None | id = IDENT -> Some id ] ]; - ident_list0: [ [ LPAREN; idents = LIST0 new_name; RPAREN -> idents ] ]; +*) ident_list1: [ [ LPAREN; idents = LIST1 IDENT; RPAREN -> idents ] ]; tactic_term_list1: [ [ tactic_terms = LIST1 tactic_term SEP SYMBOL "," -> tactic_terms ] ]; - reduction_kind: [ - [ IDENT "normalize" -> `Normalize - | IDENT "simplify" -> `Simpl - | IDENT "unfold"; t = OPT tactic_term -> `Unfold t - | IDENT "whd" -> `Whd ] - ]; nreduction_kind: [ [ IDENT "normalize" ; delta = OPT [ IDENT "nodelta" -> () ] -> let delta = match delta with None -> true | _ -> false in @@ -191,11 +177,7 @@ EXTEND | SYMBOL "<" -> `RightToLeft ] ]; int: [ [ num = NUMBER -> int_of_string num ] ]; - intros_names: [ - [ idents = OPT ident_list0 -> - match idents with None -> [] | Some idents -> idents - ] - ]; +(* MATITA 1.0 intros_spec: [ [ OPT [ IDENT "names" ]; num = OPT [ num = int -> num ]; @@ -203,7 +185,8 @@ EXTEND num, idents ] ]; - using: [ [ using = OPT [ IDENT "using"; t = tactic_term -> t ] -> using ] ]; +*) +(* MATITA 1.0 using: [ [ using = OPT [ IDENT "using"; t = tactic_term -> t ] -> using ] ]; *) ntactic: [ [ SYMBOL "@"; t = tactic_term -> G.NTactic(loc,[G.NApply (loc, t)]) | IDENT "apply"; t = tactic_term -> G.NTactic(loc,[G.NApply (loc, t)]) @@ -314,6 +297,7 @@ EXTEND ] ]; +(* MATITA 1.0 by_continuation: [ [ WEPROVED; ty = tactic_term ; LPAREN ; id = IDENT ; RPAREN ; t1 = OPT [IDENT "that" ; IDENT "is" ; IDENT "equivalent" ; "to" ; t2 = tactic_term -> t2] -> BYC_weproved (ty,Some id,t1) | WEPROVED; ty = tactic_term ; t1 = OPT [IDENT "that" ; IDENT "is" ; IDENT "equivalent" ; "to" ; t2 = tactic_term -> t2] ; @@ -326,11 +310,14 @@ EXTEND BYC_wehaveand (id1,t1,id2,t2) ] ]; +*) +(* MATITA 1.0 rewriting_step_continuation : [ [ "done" -> true | -> false ] ]; +*) (* MATITA 1.0 atomic_tactical: [ "sequence" LEFTA @@ -498,11 +485,11 @@ EXTEND p2 = [ blob = UNPARSED_AST -> add_raw_attribute ~text:(Printf.sprintf "@{%s}" blob) - (CicNotationParser.parse_level2_ast + (CicNotationParser.parse_level2_ast lstatus (Ulexing.from_utf8_string blob)) | blob = UNPARSED_META -> add_raw_attribute ~text:(Printf.sprintf "${%s}" blob) - (CicNotationParser.parse_level2_meta + (CicNotationParser.parse_level2_meta lstatus (Ulexing.from_utf8_string blob)) ] -> let assoc = @@ -512,7 +499,7 @@ EXTEND in let p1 = add_raw_attribute ~text:s - (CicNotationParser.parse_level1_pattern prec + (CicNotationParser.parse_level1_pattern lstatus prec (Ulexing.from_utf8_string s)) in (dir, p1, assoc, prec, p2) @@ -630,86 +617,50 @@ EXTEND ]; statement: [ [ ex = executable -> - fun ?(never_include=false) ~include_paths status -> - let stm = G.Executable (loc, ex) in - !grafite_callback stm; - status, LSome stm + LSome (G.Executable (loc, ex)) | com = comment -> - fun ?(never_include=false) ~include_paths status -> - let stm = G.Comment (loc, com) in - !grafite_callback stm; - status, LSome stm + LSome (G.Comment (loc, com)) | (iloc,fname,normal,mode) = include_command ; SYMBOL "." -> - fun ?(never_include=false) ~include_paths status -> - let _root, buri, fullpath, _rrelpath = - Librarian.baseuri_of_script ~include_paths fname in - if never_include then raise (NoInclusionPerformed fullpath) - else - begin - let stm = - G.Executable - (loc, G.Command (loc, G.Include (iloc,fname))) in - !grafite_callback stm; - let status = - LE.eval_command status (L.Include (iloc,buri,mode,fullpath)) in - let stm = - G.Executable - (loc,G.Command (loc,G.Include (iloc,buri))) - in - status, LSome stm - end + LSome (G.Executable + (loc,G.Command (loc,G.Include (iloc,fname,(),"")))) | scom = lexicon_command ; SYMBOL "." -> + assert false +(* fun ?(never_include=false) ~include_paths status -> - !lexicon_callback scom; let status = LE.eval_command status scom in status, LNone loc +*) | EOI -> raise End_of_file ] ]; -END + END; (* }}} *) + statement ;; -let _ = initialize_parser () ;; - -let exc_located_wrapper f = - try - f () - with - | Stdpp.Exc_located (_, End_of_file) -> raise End_of_file - | Stdpp.Exc_located (floc, Stream.Error msg) -> - raise (HExtlib.Localized (floc,CicNotationParser.Parse_error msg)) - | Stdpp.Exc_located (floc, HExtlib.Localized(_,exn)) -> - raise - (HExtlib.Localized (floc,CicNotationParser.Parse_error (Printexc.to_string exn))) - | Stdpp.Exc_located (floc, exn) -> - raise - (HExtlib.Localized (floc,CicNotationParser.Parse_error (Printexc.to_string exn))) - -let parse_statement lexbuf = - exc_located_wrapper - (fun () -> (Grammar.Entry.parse (Obj.magic !grafite_parser.statement) (Obj.magic lexbuf))) - -let statement () = Obj.magic !grafite_parser.statement +type db = ast_statement localized_option Grammar.Entry.e ;; -let history = ref [] ;; +class type g_status = + object + inherit LexiconEngine.g_status + method parser_db: db + end -let push () = - LexiconSync.push (); - history := !grafite_parser :: !history; - grafite_parser := initial_parser (); - initialize_parser () -;; +class status = + let lstatus = assert false in + let grammar = CicNotationParser.level2_ast_grammar lstatus in + object + inherit LexiconEngine.status + val db = + mk_parser (Grammar.Entry.create grammar "statement") lstatus + method parser_db = db + method set_parser_db v = {< db = v >} + method set_parser_status + : 'status. #g_status as 'status -> 'self + = fun o -> {< db = o#parser_db >}#set_lexicon_engine_status o + end -let pop () = - LexiconSync.pop (); - match !history with - | [] -> assert false - | gp :: tail -> - grafite_parser := gp; - history := tail -;; +let parse_statement status = + parse_statement status#parser_db (* vim:set foldmethod=marker: *) - - diff --git a/matita/components/grafite_parser/grafiteParser.mli b/matita/components/grafite_parser/grafiteParser.mli index 98942e685..4e4b035ab 100644 --- a/matita/components/grafite_parser/grafiteParser.mli +++ b/matita/components/grafite_parser/grafiteParser.mli @@ -31,24 +31,26 @@ type ast_statement = GrafiteAst.statement exception NoInclusionPerformed of string (* full path *) -type 'status statement = - ?never_include:bool -> - (* do not call LexiconEngine to do includes, always raise NoInclusionPerformed *) - include_paths:string list -> - (#LexiconEngine.status as 'status) -> - 'status * ast_statement localized_option - -val parse_statement: Ulexing.lexbuf -> #LexiconEngine.status statement (** @raise End_of_file *) - -val statement: unit -> #LexiconEngine.status statement Grammar.Entry.e - -(* this callback is called before every grafite statement *) -val set_grafite_callback: - (ast_statement -> unit) -> unit - -(* this callback is called before every lexicon command *) -val set_lexicon_callback: - (LexiconAst.command -> unit) -> unit - -val push : unit -> unit -val pop : unit -> unit +type db + +class type g_status = + object + inherit LexiconEngine.g_status + method parser_db: db + end + +class status : + object('self) + inherit LexiconEngine.status + method parser_db : db + method set_parser_db : db -> 'self + method set_parser_status : 'status. #g_status as 'status -> 'self + end + + (* never_include: do not call LexiconEngine to do includes, + * always raise NoInclusionPerformed *) +(** @raise End_of_file *) +val parse_statement: + #status -> + Ulexing.lexbuf -> + ast_statement localized_option diff --git a/matita/components/grafite_parser/print_grammar.ml b/matita/components/grafite_parser/print_grammar.ml index 893a3f53c..5bc87f247 100644 --- a/matita/components/grafite_parser/print_grammar.ml +++ b/matita/components/grafite_parser/print_grammar.ml @@ -264,8 +264,8 @@ let rec visit_entries fmt todo pped = visit_entries fmt todo pped ;; -let ebnf_of_term () = - let g_entry = Grammar.Entry.obj (CicNotationParser.term ()) in +let ebnf_of_term status = + let g_entry = Grammar.Entry.obj (CicNotationParser.term status) in let buff = Buffer.create 100 in let fmt = Format.formatter_of_buffer buff in visit_entries fmt [g_entry] []; diff --git a/matita/components/grafite_parser/print_grammar.mli b/matita/components/grafite_parser/print_grammar.mli index 584a79b3e..28becba1d 100644 --- a/matita/components/grafite_parser/print_grammar.mli +++ b/matita/components/grafite_parser/print_grammar.mli @@ -25,4 +25,4 @@ (* $Id: print_grammar.ml 6977 2006-10-25 12:41:21Z sacerdot $ *) -val ebnf_of_term: unit -> string +val ebnf_of_term: #GrafiteParser.status -> string diff --git a/matita/components/lexicon/.depend b/matita/components/lexicon/.depend index 16c114516..fbe5dd026 100644 --- a/matita/components/lexicon/.depend +++ b/matita/components/lexicon/.depend @@ -15,7 +15,5 @@ lexiconEngine.cmo: lexiconMarshal.cmi lexiconAstPp.cmi lexiconAst.cmo \ cicNotation.cmi lexiconEngine.cmi lexiconEngine.cmx: lexiconMarshal.cmx lexiconAstPp.cmx lexiconAst.cmx \ cicNotation.cmx lexiconEngine.cmi -lexiconSync.cmo: lexiconEngine.cmi lexiconAst.cmo cicNotation.cmi \ - lexiconSync.cmi -lexiconSync.cmx: lexiconEngine.cmx lexiconAst.cmx cicNotation.cmx \ - lexiconSync.cmi +lexiconSync.cmo: lexiconEngine.cmi lexiconAst.cmo lexiconSync.cmi +lexiconSync.cmx: lexiconEngine.cmx lexiconAst.cmx lexiconSync.cmi diff --git a/matita/components/lexicon/cicNotation.ml b/matita/components/lexicon/cicNotation.ml index 93d8a5570..1934c8526 100644 --- a/matita/components/lexicon/cicNotation.ml +++ b/matita/components/lexicon/cicNotation.ml @@ -27,39 +27,23 @@ open LexiconAst -type notation_id = - | RuleId of CicNotationParser.rule_id - | InterpretationId of Interpretations.interpretation_id - | PrettyPrinterId of TermContentPres.pretty_printer_id - class type g_status = object ('self) inherit Interpretations.g_status inherit TermContentPres.g_status + inherit CicNotationParser.g_status end class status = object (self) inherit Interpretations.status inherit TermContentPres.status + inherit CicNotationParser.status method set_notation_status : 'status. #g_status as 'status -> 'self - = fun o -> (self#set_interp_status o)#set_content_pres_status o + = fun o -> ((self#set_interp_status o)#set_content_pres_status o)#set_notation_parser_status o end -let compare_notation_id x y = - match x,y with - | RuleId i1, RuleId i2 -> CicNotationParser.compare_rule_id i1 i2 - | RuleId _, _ -> ~-1 - | _, RuleId _ -> 1 - | x,y -> Pervasives.compare x y - -let initial_parser_ref_counter () = RefCounter.create () -let initial_rule_ids_to_items ()= Hashtbl.create 113 - -let parser_ref_counter = ref (initial_parser_ref_counter ());; -let rule_ids_to_items = ref (initial_rule_ids_to_items ());; - let process_notation status st = match st with | Notation (loc, dir, l1, associativity, precedence, l2) -> @@ -67,62 +51,23 @@ let process_notation status st = CicNotationParser.check_l1_pattern l1 (dir = Some `RightToLeft) precedence associativity in - let item = (l1, precedence, associativity, l2) in - let rule_id = ref [] in - let _ = + let status = if dir <> Some `RightToLeft then - let create_cb (l1, precedence, associativity, l2) = - let id = - CicNotationParser.extend l1 - (fun env loc -> - NotationPt.AttributedTerm - (`Loc loc,TermContentPres.instantiate_level2 env l2)) in - rule_id := [ RuleId id ]; - Hashtbl.add !rule_ids_to_items id item - in - RefCounter.incr ~create_cb !parser_ref_counter item + CicNotationParser.extend status l1 + (fun env loc -> + NotationPt.AttributedTerm + (`Loc loc,TermContentPres.instantiate_level2 env l2)) + else status in - let status,pp_id = + let status = if dir <> Some `LeftToRight then - let status,id = TermContentPres.add_pretty_printer status l2 l1 in - status,[ PrettyPrinterId id ] + let status = TermContentPres.add_pretty_printer status l2 l1 in + status else - status,[] + status in - status,!rule_id @ pp_id - | Interpretation (loc, dsc, l2, l3) -> - let status,interp_id = - Interpretations.add_interpretation status dsc l2 l3 - in - status,[InterpretationId interp_id] - | st -> status,[] - -let remove_notation = function - | RuleId id -> - let item = - try - Hashtbl.find !rule_ids_to_items id - with Not_found -> assert false in - RefCounter.decr ~delete_cb:(fun _ -> CicNotationParser.delete id) - !parser_ref_counter item - | PrettyPrinterId id -> () - | InterpretationId id -> () - -let history = ref [];; - -let push () = - history := (!parser_ref_counter,!rule_ids_to_items) :: !history; - parser_ref_counter := initial_parser_ref_counter (); - rule_ids_to_items := initial_rule_ids_to_items (); - CicNotationParser.push () -;; + status + | Interpretation (loc, dsc, l2, l3) -> + Interpretations.add_interpretation status dsc l2 l3 + | st -> status -let pop () = - CicNotationParser.pop (); - match !history with - | [] -> assert false - | (prc,riti) :: tail -> - parser_ref_counter := prc; - rule_ids_to_items := riti; - history := tail; -;; diff --git a/matita/components/lexicon/cicNotation.mli b/matita/components/lexicon/cicNotation.mli index aa500f371..16422729b 100644 --- a/matita/components/lexicon/cicNotation.mli +++ b/matita/components/lexicon/cicNotation.mli @@ -23,27 +23,21 @@ * http://helm.cs.unibo.it/ *) -type notation_id - class type g_status = object ('self) inherit Interpretations.g_status inherit TermContentPres.g_status + inherit CicNotationParser.g_status end class status : object ('self) inherit Interpretations.status inherit TermContentPres.status + inherit CicNotationParser.status method set_notation_status: #g_status -> 'self end -val compare_notation_id : notation_id -> notation_id -> int - val process_notation: - #status as 'status -> LexiconAst.command -> 'status * notation_id list - -val remove_notation: notation_id -> unit + #status as 'status -> LexiconAst.command -> 'status -val push: unit -> unit -val pop: unit -> unit diff --git a/matita/components/lexicon/lexiconEngine.ml b/matita/components/lexicon/lexiconEngine.ml index 7f16fe883..d92b74091 100644 --- a/matita/components/lexicon/lexiconEngine.ml +++ b/matita/components/lexicon/lexiconEngine.ml @@ -37,14 +37,12 @@ type lexicon_status = { aliases: L.alias_spec DisambiguateTypes.Environment.t; multi_aliases: L.alias_spec list DisambiguateTypes.Environment.t; lexicon_content_rev: LexiconMarshal.lexicon; - notation_ids: CicNotation.notation_id list; (** in-scope notation ids *) } let initial_status = { aliases = DisambiguateTypes.Environment.empty; multi_aliases = DisambiguateTypes.Environment.empty; lexicon_content_rev = []; - notation_ids = []; } class type g_status = @@ -153,9 +151,7 @@ let rec eval_command ?(mode=L.WithPreferences) sstatus cmd = (loc, dsc, (symbol, args), disambiguate cic_appl_pattern) | _-> cmd in - let sstatus,notation_ids' = CicNotation.process_notation sstatus cmd in - let status = - { status with notation_ids = notation_ids' @ status.notation_ids } in + let sstatus = CicNotation.process_notation sstatus cmd in let sstatus = sstatus#set_lstatus status in match cmd with | L.Include (loc, baseuri, mode, fullpath) -> diff --git a/matita/components/lexicon/lexiconEngine.mli b/matita/components/lexicon/lexiconEngine.mli index 7e87a22e0..ad1ce3f86 100644 --- a/matita/components/lexicon/lexiconEngine.mli +++ b/matita/components/lexicon/lexiconEngine.mli @@ -29,7 +29,6 @@ type lexicon_status = { aliases: LexiconAst.alias_spec DisambiguateTypes.Environment.t; multi_aliases: LexiconAst.alias_spec list DisambiguateTypes.Environment.t; lexicon_content_rev: LexiconMarshal.lexicon; - notation_ids: CicNotation.notation_id list; (** in-scope notation ids *) } class type g_status = diff --git a/matita/components/lexicon/lexiconSync.ml b/matita/components/lexicon/lexiconSync.ml index 1971ac33e..ce9f56482 100644 --- a/matita/components/lexicon/lexiconSync.ml +++ b/matita/components/lexicon/lexiconSync.ml @@ -59,29 +59,3 @@ let add_aliases_for_objs status = in LexiconEngine.set_proof_aliases status new_env ) status - -module OrderedId = -struct - type t = CicNotation.notation_id - let compare = CicNotation.compare_notation_id -end - -module IdSet = Set.Make (OrderedId) - - (** @return l2 \ l1 *) -let id_list_diff l2 l1 = - let module S = IdSet in - let s1 = List.fold_left (fun set uri -> S.add uri set) S.empty l1 in - let s2 = List.fold_left (fun set uri -> S.add uri set) S.empty l2 in - let diff = S.diff s2 s1 in - S.fold (fun uri uris -> uri :: uris) diff [] - -let time_travel ~present ~past = - let notation_to_remove = - id_list_diff present#lstatus.LexiconEngine.notation_ids - past#lstatus.LexiconEngine.notation_ids - in - List.iter CicNotation.remove_notation notation_to_remove - -let push () = CicNotation.push ();; -let pop () = CicNotation.pop ();; diff --git a/matita/components/lexicon/lexiconSync.mli b/matita/components/lexicon/lexiconSync.mli index acdf7fd66..a081e53f1 100644 --- a/matita/components/lexicon/lexiconSync.mli +++ b/matita/components/lexicon/lexiconSync.mli @@ -26,9 +26,6 @@ val add_aliases_for_objs: #LexiconEngine.status as 'status -> NUri.uri list -> 'status -val time_travel: - present:#LexiconEngine.status -> past:#LexiconEngine.status -> unit - (** perform a diff between the aliases contained in two statuses, assuming * that the second one can only have more aliases than the first one * @return the list of aliases that should be added to aliases of from in @@ -37,5 +34,3 @@ val alias_diff: from:#LexiconEngine.status -> #LexiconEngine.status -> (DisambiguateTypes.domain_item * LexiconAst.alias_spec) list -val push: unit -> unit -val pop: unit -> unit diff --git a/matita/components/ng_refiner/.depend b/matita/components/ng_refiner/.depend index da0ab80fc..2633e1aba 100644 --- a/matita/components/ng_refiner/.depend +++ b/matita/components/ng_refiner/.depend @@ -2,10 +2,9 @@ nDiscriminationTree.cmi: nCicMetaSubst.cmi: nCicUnifHint.cmi: nCicCoercion.cmi: nCicUnifHint.cmi -nRstatus.cmi: nCicCoercion.cmi nCicRefineUtil.cmi: -nCicUnification.cmi: nRstatus.cmi -nCicRefiner.cmi: nRstatus.cmi +nCicUnification.cmi: nCicCoercion.cmi +nCicRefiner.cmi: nCicCoercion.cmi nDiscriminationTree.cmo: nDiscriminationTree.cmi nDiscriminationTree.cmx: nDiscriminationTree.cmi nCicMetaSubst.cmo: nCicMetaSubst.cmi @@ -16,8 +15,6 @@ nCicCoercion.cmo: nDiscriminationTree.cmi nCicUnifHint.cmi nCicMetaSubst.cmi \ nCicCoercion.cmi nCicCoercion.cmx: nDiscriminationTree.cmx nCicUnifHint.cmx nCicMetaSubst.cmx \ nCicCoercion.cmi -nRstatus.cmo: nCicCoercion.cmi nRstatus.cmi -nRstatus.cmx: nCicCoercion.cmx nRstatus.cmi nCicRefineUtil.cmo: nCicMetaSubst.cmi nCicRefineUtil.cmi nCicRefineUtil.cmx: nCicMetaSubst.cmx nCicRefineUtil.cmi nCicUnification.cmo: nCicUnifHint.cmi nCicMetaSubst.cmi nCicUnification.cmi diff --git a/matita/components/ng_tactics/.depend b/matita/components/ng_tactics/.depend index a90df82fa..47f203f0a 100644 --- a/matita/components/ng_tactics/.depend +++ b/matita/components/ng_tactics/.depend @@ -2,7 +2,7 @@ continuationals.cmi: nCicTacReduction.cmi: nTacStatus.cmi: continuationals.cmi nCicElim.cmi: -nTactics.cmi: nTacStatus.cmi continuationals.cmi +nTactics.cmi: nTacStatus.cmi nnAuto.cmi: nTacStatus.cmi nDestructTac.cmi: nTacStatus.cmi nInversion.cmi: nTacStatus.cmi diff --git a/matita/components/statuses.txt b/matita/components/statuses.txt index 64f41167b..03b17e31d 100644 --- a/matita/components/statuses.txt +++ b/matita/components/statuses.txt @@ -2,7 +2,7 @@ grafitetypes -> tac -> auto + eq + (grafitedisambiguate = lexicon+nciccoercion) |--> dumpable | | |--> nciclibrary | unif_hint | - interpretation + termcontentpres = cicnotation + interpretation + termcontentpres + notation_parser= cicnotation