| NumValue of string
| OptValue of value option
| ListValue of value list
- | LocValue of Stdpp.location
+ | DisambiguationValue of (Stdpp.location * string option * string option)
type value_type =
| TermType of int (* the level of the expected term *)
| NumValue of string
| OptValue of value option
| ListValue of value list
- | LocValue of Stdpp.location
+ | DisambiguationValue of (Stdpp.location * string option * string option)
type value_type =
| TermType of int (* the level of the expected term *)
| Env.OptValue (Some v) -> "Some " ^ pp_value status v
| Env.OptValue None -> "None"
| Env.ListValue l -> sprintf "[%s]" (String.concat "; " (List.map (pp_value status) l))
- | Env.LocValue l -> sprintf "#"
+ | Env.DisambiguationValue _ -> sprintf "#"
let rec pp_value_type =
function
("(" number (',' number)* ")")? (* reference spec *)
let regexp qstring = '"' [^ '"']* '"'
-let regexp hreftag = "<A href=\"" uri "\">"
+let regexp hreftag = "<A"
+let regexp href = "href=\"" uri "\""
+let regexp hreftitle = "title=" qstring
let regexp hrefclose = "</A>"
let regexp tex_token = '\\' ident
let begin_cnum, end_cnum = Ulexing.loc lexbuf in
raise (Error (begin_cnum, end_cnum, msg))
+let loc_of_buf lexbuf =
+ HExtlib.floc_of_loc (Ulexing.loc lexbuf)
+
let return_with_loc token begin_cnum end_cnum =
let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in
token, flocation
return lexbuf (name, s)
;;
-let get_uri buf =
- Ulexing.utf8_sub_lexeme buf 9 (Ulexing.lexeme_length buf - 11)
-;;
-
let rec level2_meta_token =
lexer
| utf8_blank+ -> level2_meta_token lexbuf
- | hreftag -> prerr_endline ("****** HREF : " ^ (Ulexing.utf8_lexeme lexbuf));return lexbuf ("HREF", get_uri lexbuf)
- | hrefclose -> return lexbuf ("HREFEND","")
+ | hreftag -> return lexbuf ("ATAG","")
+ | hrefclose -> return lexbuf ("ATAGEND","")
| ident ->
handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "IDENT"
| variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
Ulexing.rollback lexbuf;
k lexbuf
-let rec level2_ast_token status =
- lexer
- | let_rec -> return lexbuf ("LETREC","")
- | let_corec -> return lexbuf ("LETCOREC","")
- | we_proved -> return lexbuf ("WEPROVED","")
- | we_have -> return lexbuf ("WEHAVE","")
- | utf8_blank+ -> ligatures_token (level2_ast_token status) lexbuf
- | meta ->
- let s = Ulexing.utf8_lexeme lexbuf in
- return lexbuf ("META", String.sub s 1 (String.length s - 1))
- | implicit -> return lexbuf ("IMPLICIT", "")
- | placeholder -> return lexbuf ("PLACEHOLDER", "")
- | hreftag -> prerr_endline ("****** HREF : " ^ (Ulexing.utf8_lexeme lexbuf));return lexbuf ("HREF", get_uri lexbuf)
- | hrefclose -> return lexbuf ("HREFEND","")
- | ident -> handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT"
- | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
- | pident -> handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT"
- | number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
- | tex_token -> return lexbuf (expand_macro lexbuf)
- | nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf)
- | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf)
- | qstring ->
- return lexbuf ("QSTRING", remove_quotes (Ulexing.utf8_lexeme lexbuf))
- | csymbol ->
- return lexbuf ("CSYMBOL", remove_left_quote (Ulexing.utf8_lexeme lexbuf))
- | "${" -> read_unparsed_group "UNPARSED_META" lexbuf
- | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf
- | '(' -> return lexbuf ("LPAREN", "")
- | ')' -> return lexbuf ("RPAREN", "")
- | meta_ident ->
- return lexbuf ("UNPARSED_META",
- remove_left_quote (Ulexing.utf8_lexeme lexbuf))
- | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous")
- | beginnote ->
- let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in
-(* let comment =
- Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4)
- in
- return lexbuf ("NOTE", comment) *)
- ligatures_token (level2_ast_token status) lexbuf
- | begincomment -> return lexbuf ("BEGINCOMMENT","")
- | endcomment -> return lexbuf ("ENDCOMMENT","")
- | eof -> return_eoi lexbuf
- | _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
+module LocalizeOD =
+ struct
+ type t = Stdpp.location
+ let compare = Pervasives.compare
+ end
+
+module LocalizeEnv = Map.Make (LocalizeOD)
+
+let so_pp = function
+ | None -> "()"
+ | Some s -> "*" ^ s
+;;
+
+let update_table loc desc href loctable =
+ if desc <> None || href <> None
+ then
+ (let s,e = HExtlib.loc_of_floc loc in
+ prerr_endline (Printf.sprintf "*** [%d,%d] \"%s\",\"%s\""
+ s e (so_pp href) (so_pp desc));
+ LocalizeEnv.add loc (href,desc) loctable)
+ else loctable
+;;
-and level1_pattern_token =
+let level2_ast_token loctable status =
+ let rec aux desc href =
+ lexer
+ | let_rec -> return lexbuf ("LETREC","")
+ | let_corec -> return lexbuf ("LETCOREC","")
+ | we_proved -> return lexbuf ("WEPROVED","")
+ | we_have -> return lexbuf ("WEHAVE","")
+ | utf8_blank+ -> ligatures_token (aux desc href) lexbuf
+ | meta ->
+ let s = Ulexing.utf8_lexeme lexbuf in
+ return lexbuf ("META", String.sub s 1 (String.length s - 1))
+ | implicit -> return lexbuf ("IMPLICIT", "")
+ | placeholder -> return lexbuf ("PLACEHOLDER", "")
+ | hreftag -> aux_in_tag None None lexbuf
+ | hrefclose -> aux None None lexbuf
+ | ident -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT"
+ | variable_ident ->
+ return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
+ | pident -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT"
+ | number -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
+ | tex_token -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ return lexbuf (expand_macro lexbuf)
+ | nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf)
+ | uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf)
+ | qstring ->
+ return lexbuf ("QSTRING", remove_quotes (Ulexing.utf8_lexeme lexbuf))
+ | csymbol ->
+ return lexbuf ("CSYMBOL", remove_left_quote (Ulexing.utf8_lexeme lexbuf))
+ | "${" -> read_unparsed_group "UNPARSED_META" lexbuf
+ | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf
+ | '(' -> return lexbuf ("LPAREN", "")
+ | ')' -> return lexbuf ("RPAREN", "")
+ | meta_ident ->
+ return lexbuf ("UNPARSED_META",
+ remove_left_quote (Ulexing.utf8_lexeme lexbuf))
+ | meta_anonymous -> return lexbuf ("UNPARSED_META", "anonymous")
+ | beginnote ->
+ let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in
+ (* let comment =
+ Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4)
+ in
+ return lexbuf ("NOTE", comment) *)
+ ligatures_token (aux desc href) lexbuf
+ | begincomment -> return lexbuf ("BEGINCOMMENT","")
+ | endcomment -> return lexbuf ("ENDCOMMENT","")
+ | eof -> return_eoi lexbuf
+ | _ -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
+
+ and aux_in_tag desc href = lexer
+ | utf8_blank+ -> ligatures_token (aux_in_tag desc href) lexbuf
+ | href ->
+ aux_in_tag desc
+ (Some (Ulexing.utf8_sub_lexeme lexbuf 6 (Ulexing.lexeme_length lexbuf - 7)))
+ lexbuf
+ | hreftitle ->
+ aux_in_tag
+ (Some (Ulexing.utf8_sub_lexeme lexbuf 7 (Ulexing.lexeme_length lexbuf - 8)))
+ href lexbuf
+ | ">" -> aux desc href lexbuf
+ | _ -> aux None None lexbuf
+ in aux None None
+
+let rec level1_pattern_token =
lexer
| utf8_blank+ -> ligatures_token level1_pattern_token lexbuf
| number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
- | hreftag -> prerr_endline ("****** HREF : " ^ (Ulexing.utf8_lexeme lexbuf));return lexbuf ("HREF", get_uri lexbuf)
- | hrefclose -> return lexbuf ("HREFEND","")
+ | hreftag -> return lexbuf ("ATAG", "")
+ | hrefclose -> return lexbuf ("ATAGEND","")
| ident ->handle_keywords lexbuf (fun x -> List.mem x level1_keywords) "IDENT"
| variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
| pident->handle_keywords lexbuf (fun x->List.mem x level1_keywords) "PIDENT"
| _ -> return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
let level1_pattern_token = ligatures_token level1_pattern_token
-let level2_ast_token status = ligatures_token (level2_ast_token status)
+let level2_ast_token loctable status =
+ ligatures_token (level2_ast_token loctable status)
(* API implementation *)
type lexers = {
level2_meta_lexer : (string * string) Token.glexer
}
-let mk_lexers keywords =
+let mk_lexers loctable keywords =
let initial_keywords =
[ "CProp"; "Prop"; "Type"; "Set"; "let"; "match";
"with"; "in"; "and"; "to"; "as"; "on"; "return"; "done" ]
in
{
level1_pattern_lexer = mk_lexer level1_pattern_token;
- level2_ast_lexer = mk_lexer (level2_ast_token status);
+ level2_ast_lexer = mk_lexer (level2_ast_token loctable status);
level2_meta_lexer = mk_lexer level2_meta_token
}
;;
* error message *)
exception Error of int * int * string
+module LocalizeEnv :
+sig
+ include Map.S with type key = Stdpp.location
+end
+
type lexers = {
- level1_pattern_lexer : (string * string) Token.glexer;
- level2_ast_lexer : (string * string) Token.glexer;
- level2_meta_lexer : (string * string) Token.glexer
+ level1_pattern_lexer : (string * string) Token.glexer;
+ level2_ast_lexer : (string * string) Token.glexer;
+ level2_meta_lexer : (string * string) Token.glexer
}
-val mk_lexers : string list -> lexers
+val mk_lexers :
+ (string option * string option) LocalizeEnv.t ref -> string list -> lexers
level2_ast_grammar : Grammar.g;
term: 'b Grammar.Entry.e;
ident: 'e Grammar.Entry.e;
+ sym_attributes: (string option * string option) Grammar.Entry.e;
+ sym_table: (string * Stdpp.location Grammar.Entry.e) list;
let_defs: 'c Grammar.Entry.e;
- located: Stdpp.location Grammar.Entry.e;
protected_binder_vars: 'd Grammar.Entry.e;
level2_meta: 'b Grammar.Entry.e;
}
Ast.term Ast.capture_variable * Ast.term * int) list,
Ast.term list * Ast.term option, Env.ident_or_var) grammars;
keywords: string list;
- items: (string * Ast.term * (NotationEnv.t -> Ast.location -> Ast.term)) list
+ items: (string * Ast.term * (NotationEnv.t -> Ast.location -> Ast.term)) list;
+ loctable: (string option * string option) CicNotationLexer.LocalizeEnv.t ref
}
let int_of_string s =
raise (Level_not_found precedence);
string_of_int precedence
-let gram_symbol s =
- Gramext.srules
- [ [ Gramext.Stoken ("SYMBOL",s) ], Gramext.action (fun _ loc -> loc) ]
+let add_symbol_to_grammar_explicit level2_ast_grammar
+ sym_attributes sym_table s =
+ try
+ let _ = List.assoc s sym_table
+ in sym_table
+ with Not_found ->
+ let entry = Grammar.Entry.create level2_ast_grammar ("sym" ^ s) in
+ Grammar.extend
+ [ Grammar.Entry.obj entry,
+ None,
+ [ None,
+ Some (*Gramext.NonA*) Gramext.NonA,
+ [ [Gramext.Stoken ("SYMBOL",s)], (* concrete l1 syntax *)
+ (Gramext.action (fun _ loc -> None, loc))
+ ; [Gramext.Stoken ("ATAG","")
+ ;Gramext.Snterm (Grammar.Entry.obj sym_attributes)
+ ;Gramext.Stoken ("SYMBOL",">")
+ ;Gramext.Stoken ("SYMBOL",s)
+ ;Gramext.Stoken ("ATAGEND","")],
+ (Gramext.action (fun _ uridesc _ _ _ loc -> (Some uridesc),loc))
+ ]]];
+ prerr_endline ("adding to grammar symbol " ^ s);
+ (s,entry)::sym_table
+
+let add_symbol_to_grammar status s =
+ let sym_attributes = status#notation_parser_db.grammars.sym_attributes in
+ let sym_table = status#notation_parser_db.grammars.sym_table in
+ let level2_ast_grammar =
+ status#notation_parser_db.grammars.level2_ast_grammar
+ in
+ let sym_table =
+ add_symbol_to_grammar_explicit level2_ast_grammar sym_attributes sym_table s
+ in
+ let grammars =
+ { status#notation_parser_db.grammars with sym_table = sym_table }
+ in
+ let notation_parser_db =
+ { status#notation_parser_db with grammars = grammars } in
+ status#set_notation_parser_db notation_parser_db
+
+let gram_symbol status s =
+ let sym_table = status#notation_parser_db.grammars.sym_table in
+ let entry =
+ try List.assoc s sym_table
+ with Not_found ->
+ (let syms = List.map fst (status#notation_parser_db.grammars.sym_table) in
+ let syms = List.map (fun x -> "\"" ^ x ^ "\"") syms in
+ prerr_endline ("new symbol non-terminals: " ^ (String.concat ", " syms));
+ prerr_endline ("unable to find symbol \"" ^ s ^ "\""); assert false)
+ in
+ Gramext.Snterm (Grammar.Entry.obj entry)
+
let gram_ident status =
Gramext.Snterm (Grammar.Entry.obj
(status#notation_parser_db.grammars.ident : 'a Grammar.Entry.e))
level_of precedence)
;;
-let gram_of_literal =
+let gram_of_literal status =
function
- | `Symbol s -> gram_symbol s
+ | `Symbol s -> gram_symbol status s
| `Keyword s -> gram_keyword s
| `Number s -> gram_number s
-let make_action action bindings =
+let make_action status action bindings =
let rec aux (vl : NotationEnv.t) =
function
[] -> Gramext.action (fun (loc: Ast.location) -> action vl loc)
- | NoBinding :: tl -> Gramext.action
- (fun (loc: Ast.location) ->
- aux (("",(Env.NoType,Env.LocValue loc))::vl) tl)
+ | NoBinding :: tl ->
+ Gramext.action
+ (fun (_,(loc: Ast.location)) ->
+ let uri,desc =
+ try
+ CicNotationLexer.LocalizeEnv.find loc
+ !(status#notation_parser_db.loctable)
+ with Not_found -> None, None
+ in aux (("",(Env.NoType,
+ Env.DisambiguationValue (loc,uri,desc)))::vl) tl)
(* LUCA: DEFCON 3 BEGIN *)
| Binding (name, Env.TermType l) :: tl ->
Gramext.action
in
aux []
+(* given a level 1 pattern, adds productions for symbols when needed *)
+let update_sym_grammar status pattern =
+ let rec aux status = function
+ | Ast.AttributedTerm (_, t) -> aux status t
+ | Ast.Literal l -> aux_literal status l
+ | Ast.Layout l -> aux_layout status l
+ | Ast.Magic m -> aux_magic status m
+ | Ast.Variable v -> aux_variable status v
+ | t ->
+ prerr_endline (NotationPp.pp_term status t);
+ assert false
+ and aux_literal status =
+ function
+ | `Symbol s -> add_symbol_to_grammar status s
+ | `Keyword s -> status
+ | `Number s -> status
+ and aux_layout status = function
+ | Ast.Sub (p1, p2) -> aux (aux status p1) p2
+ | Ast.Sup (p1, p2) -> aux (aux status p1) p2
+ | Ast.Below (p1, p2) -> aux (aux status p1) p2
+ | Ast.Above (p1, p2) -> aux (aux status p1) p2
+ | Ast.Frac (p1, p2) -> aux (aux status p1) p2
+ | Ast.InfRule (p1, p2, p3) -> aux (aux (aux status p1) p2) p3
+ | Ast.Atop (p1, p2) -> aux (aux status p1) p2
+ | Ast.Over (p1, p2) -> aux (aux status p1) p2
+ | Ast.Root (p1, p2) -> aux (aux status p1) p2
+ | Ast.Sqrt p -> aux status p
+ | Ast.Break -> status
+ | Ast.Box (_, pl) -> List.fold_left aux status pl
+ | Ast.Group pl -> List.fold_left aux status pl
+ | Ast.Mstyle (_,pl) -> List.fold_left aux status pl
+ | Ast.Mpadded (_,pl) -> List.fold_left aux status pl
+ | Ast.Maction l -> List.fold_left aux status l
+ and aux_magic status magic =
+ match magic with
+ | Ast.Opt p -> aux status p
+ | Ast.List0 (p, s)
+ | Ast.List1 (p, s) ->
+ let status =
+ match s with None -> status | Some s' -> aux_literal status s'
+ in
+ aux status p
+ | _ -> assert false
+ and aux_variable status _ = status
+ in
+ aux status pattern
+
(* given a level 1 pattern computes the new RHS of "term" grammar entry *)
let extract_term_production status pattern =
let rec aux = function
assert false
and aux_literal =
function
- | `Symbol s -> [NoBinding, gram_symbol s]
+ | `Symbol s -> [NoBinding, gram_symbol status s]
| `Keyword s ->
(* assumption: s will be registered as a keyword with the lexer *)
[NoBinding, gram_keyword s]
| `Number s -> [NoBinding, gram_number s]
and aux_layout = function
- | Ast.Sub (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\sub "] @ aux p2
- | Ast.Sup (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\sup "] @ aux p2
- | Ast.Below (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\below "] @ aux p2
- | Ast.Above (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\above "] @ aux p2
- | Ast.Frac (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\frac "] @ aux p2
- | Ast.InfRule (p1, p2, p3) -> [NoBinding, gram_symbol "\\infrule "] @ aux p1 @ aux p2 @ aux p3
- | Ast.Atop (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\atop "] @ aux p2
- | Ast.Over (p1, p2) -> aux p1 @ [NoBinding, gram_symbol "\\over "] @ aux p2
+ | Ast.Sub (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\sub "] @ aux p2
+ | Ast.Sup (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\sup "] @ aux p2
+ | Ast.Below (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\below "] @ aux p2
+ | Ast.Above (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\above "] @ aux p2
+ | Ast.Frac (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\frac "] @ aux p2
+ | Ast.InfRule (p1, p2, p3) -> [NoBinding, gram_symbol status "\\infrule "] @ aux p1 @ aux p2 @ aux p3
+ | Ast.Atop (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\atop "] @ aux p2
+ | Ast.Over (p1, p2) -> aux p1 @ [NoBinding, gram_symbol status "\\over "] @ aux p2
| Ast.Root (p1, p2) ->
- [NoBinding, gram_symbol "\\root "] @ aux p2
- @ [NoBinding, gram_symbol "\\of "] @ aux p1
- | Ast.Sqrt p -> [NoBinding, gram_symbol "\\sqrt "] @ aux p
+ [NoBinding, gram_symbol status "\\root "] @ aux p2
+ @ [NoBinding, gram_symbol status "\\of "] @ aux p1
+ | Ast.Sqrt p -> [NoBinding, gram_symbol status "\\sqrt "] @ aux p
| Ast.Break -> []
| Ast.Box (_, pl) -> List.flatten (List.map aux pl)
| Ast.Group pl -> List.flatten (List.map aux pl)
match magic with
| Ast.List0 (_, None) -> Gramext.Slist0 s
| Ast.List1 (_, None) -> Gramext.Slist1 s
- | Ast.List0 (_, Some l) -> Gramext.Slist0sep (s, gram_of_literal l)
- | Ast.List1 (_, Some l) -> Gramext.Slist1sep (s, gram_of_literal l)
+ | Ast.List0 (_, Some l) ->
+ Gramext.Slist0sep (s, gram_of_literal status l)
+ | Ast.List1 (_, Some l) ->
+ Gramext.Slist1sep (s, gram_of_literal status l)
| _ -> assert false
in
[ Env (List.map Env.list_declaration p_names),
let p_bindings, p_atoms = List.split (aux p) in
let p_names = flatten_opt p_bindings in
let action =
- make_action (fun (env : NotationEnv.t) (loc : Ast.location) -> env)
- p_bindings
+ make_action status
+ (fun (env : NotationEnv.t) (loc : Ast.location) -> env) p_bindings
in
p_bindings, p_atoms, p_names, action
in
let parse_level2_ast grammars lexbuf =
exc_located_wrapper
- (fun () -> Grammar.Entry.parse grammars.level2_ast (Obj.magic lexbuf))
+ (fun () ->
+ Grammar.Entry.parse grammars.level2_ast (Obj.magic lexbuf))
let parse_level2_meta grammars lexbuf =
exc_located_wrapper
(fun () -> Grammar.Entry.parse grammars.level2_meta (Obj.magic lexbuf))
(* create empty precedence level for "term" *)
-let initialize_grammars grammars =
+let initialize_grammars loctable grammars =
let dummy_action =
Gramext.action (fun _ ->
failwith "internal error, lexer generated a dummy token")
begin
let level2_ast = grammars.level2_ast in
let term = grammars.term in
+ let atag_attributes = grammars.sym_attributes in
let let_defs = grammars.let_defs in
let ident = grammars.ident in
let protected_binder_vars = grammars.protected_binder_vars in
EXTEND
- GLOBAL: level2_ast term let_defs protected_binder_vars ident;
+ GLOBAL: level2_ast term let_defs protected_binder_vars ident atag_attributes;
level2_ast: [ [ p = term -> p ] ];
sort: [
[ "Prop" -> `Prop
| SYMBOL <:unicode<lambda>> (* λ *) -> `Lambda
]
];
- tident: [
- [ uri = HREF;
- id = IDENT;
- HREFEND -> (id, `Uri uri) ]];
gident: [
- [ fullident = tident -> fullident;
- | id = IDENT -> (id, `Ambiguous) ]];
+ [ id = IDENT ->
+ try
+ let uri,_ = CicNotationLexer.LocalizeEnv.find loc
+ !loctable in
+ match uri with
+ | Some u -> id, `Uri u
+ | None -> id, `Ambiguous
+ with
+ | Not_found -> id, `Ambiguous ]];
arg: [
[ LPAREN; names = LIST1 gident SEP SYMBOL ",";
SYMBOL ":"; ty = term; RPAREN ->
| _ -> failwith ("Invalid index name: " ^ blob)
]
];
- located: [
- [ s = SYMBOL -> loc ]
- ];
let_defs: [
[ defs = LIST1 [
name = single_arg;
grammars
;;
-let initial_grammars keywords =
- let lexers = CicNotationLexer.mk_lexers keywords in
+let initial_grammars loctable keywords =
+ let lexers = CicNotationLexer.mk_lexers loctable keywords in
let level1_pattern_grammar =
Grammar.gcreate lexers.CicNotationLexer.level1_pattern_lexer in
let level2_ast_grammar =
let level2_ast = Grammar.Entry.create level2_ast_grammar "level2_ast" in
let term = Grammar.Entry.create level2_ast_grammar "term" in
let ident = Grammar.Entry.create level2_ast_grammar "ident" in
- let located = Grammar.Entry.create level2_ast_grammar "located" in
+ (* unexpanded TeX macros terminated by a space (see comment in
+ * CicNotationLexer) *)
+ let initial_symbols =
+ ["\\sub ";"\\sup ";"\\below ";"\\above ";"\\frac "
+ ;"\\infrule ";"\\atop ";"\\over ";"\\root ";"\\of ";"\\sqrt "] in
+ let sym_attributes =
+ Grammar.Entry.create level2_ast_grammar "atag_attributes" in
+ let sym_table =
+ List.fold_left
+ (add_symbol_to_grammar_explicit level2_ast_grammar sym_attributes)
+ [] initial_symbols
+ in
let let_defs = Grammar.Entry.create level2_ast_grammar "let_defs" in
let protected_binder_vars =
Grammar.Entry.create level2_ast_grammar "protected_binder_vars" in
let level2_meta = Grammar.Entry.create level2_meta_grammar "level2_meta" in
- initialize_grammars { level1_pattern=level1_pattern;
+ initialize_grammars loctable
+ { level1_pattern=level1_pattern;
level2_ast=level2_ast;
term=term;
ident=ident;
+ sym_table=sym_table;
+ sym_attributes=sym_attributes;
let_defs=let_defs;
- located=located;
protected_binder_vars=protected_binder_vars;
level2_meta=level2_meta;
level2_ast_grammar=level2_ast_grammar;
class status0 ~keywords:kwds =
object
- val db = { grammars = initial_grammars kwds; keywords = kwds; items = [] }
+ val db =
+ let lt = ref CicNotationLexer.LocalizeEnv.empty in
+ { grammars = initial_grammars lt kwds; keywords = kwds;
+ items = []; loctable = lt }
method notation_parser_db = db
method set_notation_parser_db v = {< db = v >}
method set_notation_parser_status
: 'status. #g_status as 'status -> 'self
= fun o -> {< db = o#notation_parser_db >}
+ method reset_loctable () =
+ db.loctable := CicNotationLexer.LocalizeEnv.empty
end
class virtual status ~keywords:kwds =
let extend (status : #status) (CL1P (level1_pattern,precedence)) action =
(* move inside constructor XXX *)
let add1item status (level, level1_pattern, action) =
+ let status = update_sym_grammar status level1_pattern in
let p_bindings, p_atoms =
- List.split (extract_term_production status level1_pattern) in
+ List.split (extract_term_production status level1_pattern)
+ in
Grammar.extend
[ Grammar.Entry.obj
(status#notation_parser_db.grammars.term : 'a Grammar.Entry.e),
[ None,
Some (*Gramext.NonA*) Gramext.NonA,
[ p_atoms, (* concrete l1 syntax *)
- (make_action
+ (make_action status
(fun (env: NotationEnv.t) (loc: Ast.location) ->
(action env loc))
p_bindings) ]]];
inherit g_status
method set_notation_parser_db: db -> 'self
method set_notation_parser_status: 'status. #g_status as 'status -> 'self
+ method reset_loctable: unit -> unit
end
type checked_l1_pattern = private CL1P of NotationPt.term * int
--- /dev/null
+(* Copyright (C) 2011, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://helm.cs.unibo.it/
+ *)
+
+module Domain =
+ struct
+ type t = Stdpp.location * string (* location, name *)
+ let compare = Pervasives.compare
+ end
+
+module Table = Map.Make (Domain)
+
+module Ast = NotationPt
+
+let populate =
+ let rec aux lastloc acc = function
+ | Ast.AttributedTerm (`Loc l,u) -> aux l acc u
+ | Ast.Appl tl ->
+ List.fold_left (aux lastloc) acc tl
+ | Ast.Binder (k,ty,body) ->
+ let acc' = aux_var lastloc acc ty in
+ aux lastloc acc' body
+ | Ast.Case (t,ity,oty,pl) ->
+ (* FIXME: ity contains interpretation info
+ * but we don't have the location for it
+ * this must be rethought
+ *)
+ let acc = aux lastloc acc t in
+ let acc = match oty with
+ | None -> acc
+ | Some oty' -> aux lastloc acc oty'
+ in
+ List.fold_left (aux_pattern lastloc) acc pl
+ | Ast.Cast (u,v) ->
+ aux lastloc (aux lastloc acc v) u
+ | Ast.LetIn (ty,u,v) ->
+ let acc = aux_var lastloc acc ty in
+ let acc = aux lastloc acc u in
+ aux lastloc acc v
+ | Ast.LetRec (ik,l,t) ->
+ let acc = aux lastloc acc t in
+ List.fold_left (aux_letrec lastloc) acc l
+ | Ast.Ident (id,`Uri u) -> Table.add (lastloc,id) u acc
+ | _ -> acc
+ and aux_pattern lastloc acc = function
+ | Ast.Pattern (_,_,tys),t ->
+ (* FIXME: patterns may contain disambiguation info
+ * for constructors *)
+ let acc = aux lastloc acc t in
+ List.fold_left (aux_var lastloc) acc tys
+ | _,t -> aux lastloc acc t
+ and aux_letrec lastloc acc (args,ty,body,_) =
+ let acc = aux lastloc acc body in
+ let acc = List.fold_left (aux_var lastloc) acc args
+ in
+ aux_var lastloc acc ty
+ and aux_var lastloc acc (_,ty) =
+ match ty with
+ | None -> acc
+ | Some ty' -> aux lastloc acc ty'
+
+ in aux Stdpp.dummy_loc (Table.empty)
+;;
--- /dev/null
+module Table :
+sig
+ include Map.S with type key = (Stdpp.location * string)
+end
+
+val populate : NotationPt.term -> string Table.t
--- /dev/null
+(* Copyright (C) 2005, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://helm.cs.unibo.it/
+ *)
+
+(* $Id: cicNotationLexer.ml 11231 2011-03-30 11:52:27Z ricciott $ *)
+
+open Printf
+
+exception Error of int * int * string
+
+module StringSet = Set.Make(String)
+
+(* Lexer *)
+let regexp number = xml_digit+
+let regexp utf8_blank = " " | "\r\n" | "\n" | "\t" | [160] (* this is a nbsp *)
+let regexp percentage =
+ ('-' | "") [ '0' - '9' ] + '%'
+let regexp floatwithunit =
+ ('-' | "") [ '0' - '9' ] + ["."] [ '0' - '9' ] + ([ 'a' - 'z' ] + | "" )
+let regexp color = "#" [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f'
+'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ]
+[ '0' - '9' 'a' - 'f' 'A' - 'F' ] [ '0' - '9' 'a' - 'f' 'A' - 'F' ]
+
+ (* ZACK: breaks unicode's binder followed by an ascii letter without blank *)
+(* let regexp ident_letter = xml_letter *)
+
+let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ]
+
+ (* must be in sync with "is_ligature_char" below *)
+let regexp ligature_char = [ "'`~!?@*()[]<>-+=|:;.,/\"" ]
+let regexp ligature = ligature_char ligature_char+
+
+let regexp we_proved = "we" utf8_blank+ "proved"
+let regexp we_have = "we" utf8_blank+ "have"
+let regexp let_rec = "let" utf8_blank+ "rec"
+let regexp let_corec = "let" utf8_blank+ "corec"
+let regexp ident_decoration = '\'' | '?' | '`'
+let regexp ident_cont = ident_letter | xml_digit | '_'
+let regexp ident_start = ident_letter
+let regexp ident = ident_letter ident_cont* ident_decoration*
+let regexp variable_ident = '_' '_' number
+let regexp pident = '_' ident
+
+let regexp uri_step = [ 'a' - 'z' 'A' - 'Z' '0' - '9' '_' '-' ''' ]+
+
+let regexp uri =
+ ("cic:/" | "theory:/") (* schema *)
+(* ident ('/' ident)* |+ path +| *)
+ uri_step ('/' uri_step)* (* path *)
+ ('.' ident)+ (* ext *)
+(* ("#xpointer(" number ('/' number)+ ")")? (* xpointer *) *)
+ ("(" number (',' number)* ")")? (* reference spec *)
+
+let regexp qstring = '"' [^ '"']* '"'
+let regexp hreftag = "<A"
+let regexp href = "href=\"" uri "\""
+let regexp hreftitle = "title=" qstring
+let regexp hrefclose = "</A>"
+
+let regexp tex_token = '\\' ident
+
+let regexp delim_begin = "\\["
+let regexp delim_end = "\\]"
+
+let regexp qkeyword = "'" ( ident | pident ) "'"
+
+let regexp implicit = '?'
+let regexp placeholder = '%'
+let regexp meta = implicit number
+
+let regexp csymbol = '\'' ident
+
+let regexp begin_group = "@{" | "${"
+let regexp end_group = '}'
+let regexp wildcard = "$_"
+let regexp ast_ident = "@" ident
+let regexp ast_csymbol = "@" csymbol
+let regexp meta_ident = "$" ident
+let regexp meta_anonymous = "$_"
+
+let regexp begincomment = "(**" utf8_blank
+let regexp beginnote = "(*"
+let regexp endcomment = "*)"
+(* let regexp comment_char = [^'*'] | '*'[^')']
+let regexp note = "|+" ([^'*'] | "**") comment_char* "+|" *)
+
+let level1_layouts =
+ [ "sub"; "sup";
+ "below"; "above";
+ "over"; "atop"; "frac";
+ "sqrt"; "root"; "mstyle" ; "mpadded"; "maction"
+
+ ]
+
+let level1_keywords =
+ [ "hbox"; "hvbox"; "hovbox"; "vbox";
+ "break";
+ "list0"; "list1"; "sep";
+ "opt";
+ "term"; "ident"; "number";
+ ] @ level1_layouts
+
+let level2_meta_keywords =
+ [ "if"; "then"; "elCicNotationParser.se";
+ "fold"; "left"; "right"; "rec";
+ "fail";
+ "default";
+ "anonymous"; "ident"; "number"; "term"; "fresh"
+ ]
+
+ (* (string, int) Hashtbl.t, with multiple bindings.
+ * int is the unicode codepoint *)
+let ligatures = Hashtbl.create 23
+
+let _ =
+ List.iter
+ (fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol)
+ [ ("->", <:unicode<to>>); ("=>", <:unicode<Rightarrow>>);
+ (":=", <:unicode<def>>);
+ ]
+
+let regexp nreference =
+ "cic:/" (* schema *)
+ uri_step ('/' uri_step)* (* path *)
+ '.'
+ ( "dec"
+ | "def" "(" number ")"
+ | "fix" "(" number "," number "," number ")"
+ | "cfx" "(" number ")"
+ | "ind" "(" number "," number "," number ")"
+ | "con" "(" number "," number "," number ")") (* ext + reference *)
+
+let error lexbuf msg =
+ let begin_cnum, end_cnum = Ulexing.loc lexbuf in
+ raise (Error (begin_cnum, end_cnum, msg))
+let error_at_end lexbuf msg =
+ let begin_cnum, end_cnum = Ulexing.loc lexbuf in
+ raise (Error (begin_cnum, end_cnum, msg))
+
+let loc_of_buf lexbuf =
+ HExtlib.floc_of_loc (Ulexing.loc lexbuf)
+
+let return_with_loc token begin_cnum end_cnum =
+ let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in
+ token, flocation
+
+let return lexbuf token =
+ let begin_cnum, end_cnum = Ulexing.loc lexbuf in
+ return_with_loc token begin_cnum end_cnum
+
+let return_lexeme lexbuf name = return lexbuf (name, Ulexing.utf8_lexeme lexbuf)
+
+let return_symbol lexbuf s = return lexbuf ("SYMBOL", s)
+let return_eoi lexbuf = return lexbuf ("EOI", "")
+
+let remove_quotes s = String.sub s 1 (String.length s - 2)
+
+let mk_lexer token =
+ let tok_func stream =
+(* let lexbuf = Ulexing.from_utf8_stream stream in *)
+(** XXX Obj.magic rationale.
+ * The problem.
+ * camlp5 constraints the tok_func field of Token.glexer to have type:
+ * Stream.t char -> (Stream.t 'te * flocation_function)
+ * In order to use ulex we have (in theory) to instantiate a new lexbuf each
+ * time a char Stream.t is passed, destroying the previous lexbuf which may
+ * have consumed a character from the old stream which is lost forever :-(
+ * The "solution".
+ * Instead of passing to camlp5 a char Stream.t we pass a lexbuf, casting it to
+ * char Stream.t with Obj.magic where needed.
+ *)
+ let lexbuf = Obj.magic stream in
+ Token.make_stream_and_location
+ (fun () ->
+ try
+ token lexbuf
+ with
+ | Ulexing.Error -> error_at_end lexbuf "Unexpected character"
+ | Ulexing.InvalidCodepoint p ->
+ error_at_end lexbuf (sprintf "Invalid code point: %d" p))
+ in
+ {
+ Token.tok_func = tok_func;
+ Token.tok_using = (fun _ -> ());
+ Token.tok_removing = (fun _ -> ());
+ Token.tok_match = Token.default_match;
+ Token.tok_text = Token.lexer_text;
+ Token.tok_comm = None;
+ }
+
+let expand_macro lexbuf =
+ let macro =
+ Ulexing.utf8_sub_lexeme lexbuf 1 (Ulexing.lexeme_length lexbuf - 1)
+ in
+ try
+ ("SYMBOL", Utf8Macro.expand macro)
+ with Utf8Macro.Macro_not_found _ ->
+(* FG: unexpanded TeX macros are terminated by a space for rendering *)
+ "SYMBOL", (Ulexing.utf8_lexeme lexbuf ^ " ")
+
+let remove_quotes s = String.sub s 1 (String.length s - 2)
+let remove_left_quote s = String.sub s 1 (String.length s - 1)
+
+let rec level2_pattern_token_group counter buffer =
+ lexer
+ | end_group ->
+ if (counter > 0) then
+ Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
+ snd (Ulexing.loc lexbuf)
+ | begin_group ->
+ Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
+ ignore (level2_pattern_token_group (counter + 1) buffer lexbuf) ;
+ level2_pattern_token_group counter buffer lexbuf
+ | _ ->
+ Buffer.add_string buffer (Ulexing.utf8_lexeme lexbuf) ;
+ level2_pattern_token_group counter buffer lexbuf
+
+let read_unparsed_group token_name lexbuf =
+ let buffer = Buffer.create 16 in
+ let begin_cnum, _ = Ulexing.loc lexbuf in
+ let end_cnum = level2_pattern_token_group 0 buffer lexbuf in
+ return_with_loc (token_name, Buffer.contents buffer) begin_cnum end_cnum
+
+let handle_keywords lexbuf k name =
+ let s = Ulexing.utf8_lexeme lexbuf in
+ if k s then
+ return lexbuf ("", s)
+ else
+ return lexbuf (name, s)
+;;
+
+let rec level2_meta_token =
+ lexer
+ | utf8_blank+ -> level2_meta_token lexbuf
+ | hreftag -> return lexbuf ("ATAG","")
+ | hrefclose -> return lexbuf ("ATAGEND","")
+ | ident ->
+ handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "IDENT"
+ | variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
+ | pident ->
+ handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "PIDENT"
+ | "@{" -> read_unparsed_group "UNPARSED_AST" lexbuf
+ | ast_ident ->
+ return lexbuf ("UNPARSED_AST",
+ remove_left_quote (Ulexing.utf8_lexeme lexbuf))
+ | ast_csymbol ->
+ return lexbuf ("UNPARSED_AST",
+ remove_left_quote (Ulexing.utf8_lexeme lexbuf))
+ | eof -> return_eoi lexbuf
+
+let rec comment_token acc depth =
+ lexer
+ | beginnote ->
+ let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
+ comment_token acc (depth + 1) lexbuf
+ | endcomment ->
+ let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
+ if depth = 0
+ then acc
+ else comment_token acc (depth - 1) lexbuf
+ | _ ->
+ let acc = acc ^ Ulexing.utf8_lexeme lexbuf in
+ comment_token acc depth lexbuf
+
+ (** @param k continuation to be invoked when no ligature has been found *)
+let ligatures_token k =
+ lexer
+ | ligature ->
+ let lexeme = Ulexing.utf8_lexeme lexbuf in
+ (match List.rev (Hashtbl.find_all ligatures lexeme) with
+ | [] -> (* ligature not found, rollback and try default lexer *)
+ Ulexing.rollback lexbuf;
+ k lexbuf
+ | default_lig :: _ -> (* ligatures found, use the default one *)
+ return_symbol lexbuf default_lig)
+ | eof -> return_eoi lexbuf
+ | _ -> (* not a ligature, rollback and try default lexer *)
+ Ulexing.rollback lexbuf;
+ k lexbuf
+
+module LocalizeOD =
+ struct
+ type t = Stdpp.location
+ let compare = Pervasives.compare
+ end
+
+let update_table loc desc href loctable =
+ if desc <> None || href <> None
+ then
+ (let s,e = HExtlib.loc_of_floc loc in
+ prerr_endline (Printf.sprintf "*** [%d,%d] \"%s\",\"%s\""
+ s e (so_pp href) (so_pp desc));
+ LocalizeEnv.add loc (href,desc) loctable)
+ else loctable
+;;
+
+let get_hot_spots =
+ let rec aux loc1 desc href =
+ lexer
+ | hreftag -> aux_in_tag (Ulexing.loc lexbuf) None None lexbuf
+ | hrefclose ->
+ try
+ let loc1 = HExtlib.floc_of_loc (HExtlib.unopt loc1) in
+ let loc2 = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ (loc1,loc2,href,desc) :: aux None None None lexbuf
+ with Failure _ -> aux None None None lexbuf
+ | beginnote -> (* FIXME commenti come in smallLexer *)
+ let _comment = comment_token (Ulexing.utf8_lexeme lexbuf) 0 lexbuf in
+ (* let comment =
+ Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4)
+ in
+ return lexbuf ("NOTE", comment) *)
+ ligatures_token (aux desc href) lexbuf
+ | begincomment -> return lexbuf ("BEGINCOMMENT","")
+ | endcomment -> return lexbuf ("ENDCOMMENT","")
+ | eof -> []
+ | _ -> aux loc1 desc href lexbuf
+ and aux_in_tag loc1 desc href = lexer
+ | utf8_blank+ -> aux_in_tag loc1 desc href lexbuf
+ | href ->
+ aux_in_tag loc1 desc
+ (Some (Ulexing.utf8_sub_lexeme lexbuf 6 (Ulexing.lexeme_length lexbuf - 7)))
+ lexbuf
+ | hreftitle ->
+ aux_in_tag loc1
+ (Some (Ulexing.utf8_sub_lexeme lexbuf 7 (Ulexing.lexeme_length lexbuf - 8)))
+ href lexbuf
+ | ">" ->
+ let merge (a,b) (c,d) = (a,d) in
+ aux (Some (merge loc1 (Ulexing.loc lexbuf))) desc href lexbuf
+ | _ -> aux None None None lexbuf
+ in aux None None None
+
--- /dev/null
+(* Copyright (C) 2005, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://helm.cs.unibo.it/
+ *)
+
+(* $Id: cicNotationLexer.ml 11028 2010-11-02 17:08:43Z tassi $ *)
+
+open Printf
+
+exception Error of int * int * string
+
+module StringSet = Set.Make(String)
+
+
+
+(* Lexer *)
+let regexp number = xml_digit+
+
+let regexp ident_letter = [ 'a' - 'z' 'A' - 'Z' ]
+
+ (* must be in sync with "is_ligature_char" below *)
+let regexp ligature_char = [ "'`~!?@*()[]<>-+=|:;.,/\"" ]
+let regexp ligature = ligature_char ligature_char+
+
+let regexp ident_decoration = '\'' | '?' | '`'
+let regexp ident_cont = ident_letter | xml_digit | '_'
+let regexp ident_start = ident_letter
+let regexp ident = ident_letter ident_cont* ident_decoration*
+let regexp variable_ident = '_' '_' number
+let regexp pident = '_' ident
+
+let regexp tex_token = '\\' ident
+
+let regexp csymbol = '\'' ident
+
+let regexp begincom = "(*"
+let regexp endcom = "*)"
+let regexp qstring = '"' [^ '"']* '"'
+
+
+
+
+ (* (string, int) Hashtbl.t, with multiple bindings.
+ * int is the unicode codepoint *)
+let ligatures = Hashtbl.create 23
+
+let _ =
+ List.iter
+ (fun (ligature, symbol) -> Hashtbl.add ligatures ligature symbol)
+ [ ("->", <:unicode<to>>); ("=>", <:unicode<Rightarrow>>);
+ (":=", <:unicode<def>>);
+ ]
+
+let error lexbuf msg =
+ let begin_cnum, end_cnum = Ulexing.loc lexbuf in
+ raise (Error (begin_cnum, end_cnum, msg))
+let error_at_end lexbuf msg =
+ let begin_cnum, end_cnum = Ulexing.loc lexbuf in
+ raise (Error (begin_cnum, end_cnum, msg))
+
+
+(*
+let return_symbol lexbuf s = return lexbuf ("SYMBOL", s)
+let return_eoi lexbuf = return lexbuf ("EOI", "")
+*)
+
+let remove_quotes s = String.sub s 1 (String.length s - 2)
+
+let mk_lexer token =
+ let tok_func stream =
+(* let lexbuf = Ulexing.from_utf8_stream stream in *)
+(** XXX Obj.magic rationale.
+ * The problem.
+ * camlp5 constraints the tok_func field of Token.glexer to have type:
+ * Stream.t char -> (Stream.t 'te * flocation_function)
+ * In order to use ulex we have (in theory) to instantiate a new lexbuf each
+ * time a char Stream.t is passed, destroying the previous lexbuf which may
+ * have consumed a character from the old stream which is lost forever :-(
+ * The "solution".
+ * Instead of passing to camlp5 a char Stream.t we pass a lexbuf, casting it to
+ * char Stream.t with Obj.magic where needed.
+ *)
+ let lexbuf = Obj.magic stream in
+ Token.make_stream_and_location
+ (fun () ->
+ try
+ token lexbuf
+ with
+ | Ulexing.Error -> error_at_end lexbuf "Unexpected character"
+ | Ulexing.InvalidCodepoint p ->
+ error_at_end lexbuf (sprintf "Invalid code point: %d" p))
+ in
+ {
+ Token.tok_func = tok_func;
+ Token.tok_using = (fun _ -> ());
+ Token.tok_removing = (fun _ -> ());
+ Token.tok_match = (* Token.default_match *) (fun (a,b) () -> a);
+ Token.tok_text = Token.lexer_text;
+ Token.tok_comm = None;
+ }
+
+let expand_macro lexbuf =
+ let macro =
+ Ulexing.utf8_sub_lexeme lexbuf 1 (Ulexing.lexeme_length lexbuf - 1)
+ in
+ try
+ ("SYMBOL" ^ (Utf8Macro.expand macro))
+ with Utf8Macro.Macro_not_found _ ->
+(* FG: unexpanded TeX macros are terminated by a space for rendering *)
+ "SYMBOL" ^ (Ulexing.utf8_lexeme lexbuf ^ " ")
+
+let remove_quotes s = String.sub s 1 (String.length s - 2)
+let remove_left_quote s = String.sub s 1 (String.length s - 1)
+
+let print = Printf.fprintf
+
+let handle_full_id ch id uri =
+ print ch "<A href=\"%s\">%s</A>" uri id
+
+let handle_full_sym ch s uri desc =
+ let uri = try HExtlib.unopt uri
+ with Failure _ -> "cic:/fakeuri.def(1)"
+ in print ch "<A title=\"%s\" href=\"%s\">%s</A>" desc uri s
+
+let handle_full_num ch num uri desc =
+ let uri = try HExtlib.unopt uri
+ with Failure _ -> "cic:/fakeuri.def(1)"
+ in print ch "<A title=\"%s\" href=\"%s\">%s</A>" desc uri num
+
+let handle_tex ch s =
+ (* print "TEX:%s" (remove_left_quote (Ulexing.utf8_lexeme lexbuf)) *)
+ print ch "%s" s
+
+let return_with_loc token begin_cnum end_cnum =
+ let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in
+ token, flocation
+
+let return lexbuf token =
+ let begin_cnum, end_cnum = Ulexing.loc lexbuf in
+ return_with_loc token begin_cnum end_cnum
+
+ (** @param k continuation to be invoked when no ligature has been found *)
+let ligatures_token k =
+ lexer
+(* | ligature ->
+ let lexeme = Ulexing.utf8_lexeme lexbuf in
+ (match List.rev (Hashtbl.find_all ligatures lexeme) with
+ | [] -> (* ligature not found, rollback and try default lexer *)
+ Ulexing.rollback lexbuf;
+ k lexbuf
+ | default_lig :: _ -> (* ligatures found, use the default one *)
+ return_symbol lexbuf default_lig) *)
+ | eof -> return lexbuf ()
+ | _ -> (* not a ligature, rollback and try default lexer *)
+ Ulexing.rollback lexbuf;
+ k lexbuf
+
+let handle_interpr loc literal interp outch =
+ try
+ (match DisambiguateTypes.InterprEnv.find loc interp with
+ | GrafiteAst.Ident_alias (_,uri) ->
+ handle_full_id outch literal uri
+ | GrafiteAst.Symbol_alias (_,uri,desc) ->
+ handle_full_sym outch literal uri desc
+ | GrafiteAst.Number_alias (uri,desc) ->
+ handle_full_num outch literal uri desc)
+ with Not_found -> print outch "%s" literal
+;;
+
+let level2_ast_token interp outch =
+ let rec aux nesting =
+ lexer
+ | begincom -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf);
+ aux (nesting+1) lexbuf)
+ | endcom -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf);
+ aux (max 0 (nesting-1)) lexbuf)
+ | eof -> return lexbuf ()
+ | _ -> if nesting > 0
+ then (print outch "%s" (Ulexing.utf8_lexeme lexbuf);
+ aux nesting lexbuf)
+ else (Ulexing.rollback lexbuf;
+ basic_lexer lexbuf)
+ and basic_lexer =
+ lexer
+ | ident -> let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ let id = Ulexing.utf8_lexeme lexbuf in
+ handle_interpr loc id interp outch;
+ aux 0 lexbuf
+ | variable_ident ->
+ let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ let id = Ulexing.utf8_lexeme lexbuf in
+ handle_interpr loc id interp outch;
+ aux 0 lexbuf
+ | pident -> let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ let id = Ulexing.utf8_lexeme lexbuf in
+ handle_interpr loc id interp outch;
+ aux 0 lexbuf
+ | number ->
+ let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ let num = Ulexing.utf8_lexeme lexbuf in
+ handle_interpr loc num interp outch;
+ aux 0 lexbuf
+ | tex_token ->
+ let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ let s = Ulexing.utf8_lexeme lexbuf in
+ handle_interpr loc s interp outch;
+ aux 0 lexbuf
+ | qstring -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf); aux 0 lexbuf)
+ | csymbol ->
+ let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ let s = Ulexing.utf8_lexeme lexbuf in
+ handle_interpr loc s interp outch;
+ aux 0 lexbuf
+ | eof -> return lexbuf ()
+ | _ ->
+ let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
+ let s = Ulexing.utf8_lexeme lexbuf in
+ handle_interpr loc s interp outch;
+ aux 0 lexbuf
+ in aux 0
+
+
+(* API implementation *)
+
+(*let mk_small_lexer interpr keywords =
+ let initial_keywords =
+ [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match";
+ "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done";
+ "rec"; "corec" ]
+ in
+ (*let status =
+ List.fold_right StringSet.add (initial_keywords @ keywords) StringSet.empty
+ in*)
+ mk_lexer (level2_ast_token outch interpr)
+;;*)
+
+let mk_small_printer interpr outch =
+ let initial_keywords =
+ [ "CProp"; "Prop"; "Type"; "Set"; "let"; "match";
+ "with"; "in"; "and"; "to"; "as"; "on"; "return"; "done";
+ "rec"; "corec" ]
+ in
+ (* let status =
+ List.fold_right StringSet.add initial_keywords StringSet.empty
+ in *)
+ level2_ast_token interpr outch
+;;
+
+(* let mk_small_lexer interpr = mk_small_lexer interpr [];;*)
--- /dev/null
+(* val mk_small_lexer : GrafiteAst.alias_spec DisambiguateTypes.InterprEnv.t ->
+ * unit Token.glexer*)
+val mk_small_printer :
+ GrafiteAst.alias_spec DisambiguateTypes.InterprEnv.t ->
+ out_channel -> Ulexing.lexbuf -> unit * Stdpp.location
NotationEnv.OptValue (Some (pp_value v))
| NotationEnv.ListValue vl ->
NotationEnv.ListValue (List.map pp_value vl)
- | NotationEnv.LocValue _ as v -> v
+ | NotationEnv.DisambiguationValue _ as v -> v
in
let ast_env_of_env env =
List.map (fun (var, (ty, value)) -> (var, (ty, pp_value value))) env
in
aux [] env
-let instantiate_level2 status env loc term =
+let instantiate_level2 status env (loc,uri,desc) term =
(* prerr_endline ("istanzio: " ^ NotationPp.pp_term term); *)
let fresh_env = ref [] in
let lookup_fresh_name n =
| Ast.Num _
| Ast.Sort _
| Ast.UserInput -> term
- | Ast.Symbol _ -> Ast.AttributedTerm (`Loc loc, term)
+ | Ast.Symbol (s,_) -> aux_symbol s loc (uri,desc)
| Ast.Magic magic -> aux_magic env magic
| Ast.Variable var -> aux_variable env var
| Ast.Cast (t, ty) -> Ast.Cast (aux env t, aux env ty)
| _ -> assert false
+ and aux_symbol s loc = function
+ | _, None -> Ast.AttributedTerm (`Loc loc, Ast.Symbol (s, None))
+ | uri, Some desc ->
+ Ast.AttributedTerm (`Loc loc, Ast.Symbol (s, Some (uri,desc)))
and aux_opt env = function
| Some term -> Some (aux env term)
| None -> None
(** fills a term pattern instantiating variable magics *)
val instantiate_level2:
- #NCic.status -> NotationEnv.t -> Stdpp.location -> NotationPt.term -> NotationPt.term
+ #NCic.status -> NotationEnv.t ->
+ Stdpp.location * string option * string option ->
+ NotationPt.term -> NotationPt.term
let basic_eval_input_notation (l1,l2) status =
GrafiteParser.extend status l1
(fun env loc ->
- let rec inner_loc default = function
- | [] -> default
- | (_,(NotationEnv.NoType,NotationEnv.LocValue l))::_ -> l
- | _::tl -> inner_loc default tl
+ let rec get_disamb = function
+ | [] -> Stdpp.dummy_loc,None,None
+ | (_,(NotationEnv.NoType,NotationEnv.DisambiguationValue dv))::_ -> dv
+ | _::tl -> get_disamb tl
in
- let inner_loc = inner_loc loc env in
- let l2' = TermContentPres.instantiate_level2 status env inner_loc l2 in
+ let l2' = TermContentPres.instantiate_level2 status env (get_disamb env) l2 in
prerr_endline ("new l2 ast : " ^ (NotationPp.pp_term status l2'));
NotationPt.AttributedTerm (`Loc loc,l2'))
;;
;;
-let parse_statement status =
+let parse_statement status =
+ status#reset_loctable ();
parse_statement status#parser_db
(* vim:set foldmethod=marker: *)
#OCAML_PROF=p -p a
#OCAMLOPT_DEBUG_FLAGS = -p
OCAMLC_FLAGS = $(OCAML_FLAGS) $(OCAML_THREADS_FLAGS)
-OCAMLC = $(OCAMLFIND) ocamlc$(OCAML_PROF) $(OCAMLC_FLAGS) $(OCAML_DEBUG_FLAGS)
+OCAMLC = $(OCAMLFIND) ocamlc$(OCAML_PROF) $(OCAMLC_FLAGS) $(OCAML_DEBUG_FLAGS) $(SYNTAXOPTIONS)
OCAMLOPT = $(OCAMLFIND) opt $(OCAMLC_FLAGS) $(OCAMLOPT_DEBUG_FLAGS)
OCAMLDEP = $(OCAMLFIND) ocamldep $(OCAMLDEP_FLAGS)
INSTALL_PROGRAMS= matita matitac
endif
MLI = \
+ matitaScriptLexer.mli \
lablGraphviz.mli \
matitaTypes.mli \
matitaMisc.mli \
MAINCML = $(MAINCMLI:%.mli=%.ml)
PROGRAMS_BYTE = \
- matita matitac matitaclean
+ matita matitac matitadaemon matitaclean
PROGRAMS = $(PROGRAMS_BYTE)
PROGRAMS_OPT = $(patsubst %,%.opt,$(PROGRAMS_BYTE))
NOINST_PROGRAMS =
.PHONY: all
all: $(PROGRAMS) $(NOINST_PROGRAMS)
+UTF8DIR := $(shell $(OCAMLFIND) query helm-syntax_extensions)
+ULEXDIR := $(shell $(OCAMLFIND) query ulex08)
+
+matitaScriptLexer.cmo: SYNTAXOPTIONS = -pp "camlp5o -I $(UTF8DIR) -I $(ULEXDIR) pa_extend.cmo pa_ulex.cma pa_unicode_macro.cma -loc loc"
+
CMOS = $(ML:%.ml=%.cmo)
CCMOS = $(CML:%.ml=%.cmo)
MAINCMOS = $(MAINCML:%.ml=%.cmo)
$(H)$(OCAMLC) $(PKGS) -linkpkg -o matita $(CMOS) $(OCAML_DEBUG_FLAGS) matita.ml
$(H)echo " OCAMLC matitac.ml"
$(H)$(OCAMLC) $(CPKGS) -linkpkg -o matitac $(CCMOS) $(MAINCMOS) $(OCAML_DEBUG_FLAGS) matitac.ml
+ $(H)echo " OCAMLC matitadaemon.ml"
+ $(H)$(OCAMLC) $(CPKGS) -linkpkg -o matitadaemon $(CCMOS) $(MAINCMOS) $(OCAML_DEBUG_FLAGS) matitadaemon.ml
+
.PHONY: linkonly
matita: matita.ml $(LIB_DEPS) $(CMOS)
$(H)echo " OCAMLC $<"
$(H)echo " OCAMLOPT $<"
$(H)$(OCAMLOPT) $(CPKGS) -linkpkg -o $@ $(CCMXS) $(MAINCMXS) matitac.ml
+matitadaemon: matitadaemon.ml $(CLIB_DEPS) $(CMOS) $(MAINCMOS)
+ $(H)echo " OCAMLC $<"
+ $(H)$(OCAMLC) $(PKGS) -linkpkg -o $@ $(CMOS) $(MAINCMOS) matitadaemon.ml
+matitadaemon.opt: matitadaemon.ml $(CLIBX_DEPS) $(CMXS) $(MAINCMXS)
+ $(H)echo " OCAMLOPT $<"
+ $(H)$(OCAMLOPT) $(PKGS) -linkpkg -o $@ $(CMXS) $(MAINCMXS) matitadaemon.ml
+
rottener: rottener.ml $(CLIB_DEPS) $(CCMOS) $(MAINCMOS)
$(H)echo " OCAMLC $<"
$(H)$(OCAMLC) $(CPKGS) -package lablgtk2 -linkpkg -o $@ $(CCMOS) $(MAINCMOS) rottener.ml
notation "hvbox(∃ _ break . p)"
with precedence 20
-for @{'exists $p }.
+for @{'exists $p }.
notation < "hvbox(\exists ident i : ty break . p)"
right associative with precedence 20
V_______________________________________________________________ *)
include "basics/pts.ma".
-include "hints_declaration.ma".
+(*include "hints_declaration.ma".*)
(* propositional equality *)
inductive eq (A:Type[1]) (x:A) : A → Prop ≝
refl: eq A x x.
-
-interpretation "leibnitz's equality" 'eq t x y = (eq t x y).
+interpretation "leibnitz's equality" 'eq t x y = (eq t x y).
lemma eq_rect_r:
- ∀A.∀a,x.∀p:eq ? x a.∀P: ∀x:A. x = a → Type[2]. P a (refl A a) → P x p.
+ ∀A.∀a,x.∀p:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a.∀P:
+ ∀x:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> a → Type[2]. P a (refl A a) → P x p.
#A #a #x #p (cases p) // qed.
lemma eq_ind_r :
- ∀A.∀a.∀P: ∀x:A. x = a → Prop. P a (refl A a) →
- ∀x.∀p:eq ? x a.P x p.
- #A #a #P #p #x0 #p0; @(eq_rect_r ? ? ? p0) //; qed.
+ ∀A.∀a.∀P: ∀x:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> a → Prop. P a (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> A a) →
+ ∀x.∀p:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a.P x p.
+ #A #a #P #p #x0 #p0; @(<A href="cic:/matita/basics/logic/eq_rect_r.def(1)">eq_rect_r</A> ? ? ? p0) //; qed.
lemma eq_rect_Type2_r:
- ∀A.∀a.∀P: ∀x:A. eq ? x a → Type[2]. P a (refl A a) →
- ∀x.∀p:eq ? x a.P x p.
+ ∀A.∀a.∀P: ∀x:A. <A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a → Type[2]. P a (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> A a) →
+ ∀x.∀p:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a.P x p.
#A #a #P #H #x #p (generalize in match H) (generalize in match P)
cases p; //; qed.
-theorem rewrite_l: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. x = y → P y.
+theorem rewrite_l: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> y → P y.
#A #x #P #Hx #y #Heq (cases Heq); //; qed.
-theorem sym_eq: ∀A.∀x,y:A. x = y → y = x.
-#A #x #y #Heq @(rewrite_l A x (λz.z=x)); //; qed.
+theorem sym_eq: ∀A.∀x,y:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> y → y <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x.
+#A #x #y #Heq @(<A href="cic:/matita/basics/logic/rewrite_l.def(1)">rewrite_l</A> A x (λz.z<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x)); //; qed.
-theorem rewrite_r: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. y = x → P y.
-#A #x #P #Hx #y #Heq (cases (sym_eq ? ? ? Heq)); //; qed.
+theorem rewrite_r: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. y <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x → P y.
+#A #x #P #Hx #y #Heq (cases (<A href="cic:/matita/basics/logic/sym_eq.def(2)">sym_eq</A> ? ? ? Heq)); //; qed.
-theorem eq_coerc: ∀A,B:Type[0].A→(A=B)→B.
+theorem eq_coerc: ∀A,B:Type[0].A→(A<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>B)→B.
#A #B #Ha #Heq (elim Heq); //; qed.
-theorem trans_eq : ∀A.∀x,y,z:A. x = y → y = z → x = z.
+theorem trans_eq : ∀A.∀x,y,z:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> y → y <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> z → x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> z.
#A #x #y #z #H1 #H2 >H1; //; qed.
-theorem eq_f: ∀A,B.∀f:A→B.∀x,y:A. x=y → f x = f y.
+theorem eq_f: ∀A,B.∀f:A→B.∀x,y:A. x<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>y → f x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> f y.
#A #B #f #x #y #H >H; //; qed.
(* deleterio per auto? *)
theorem eq_f2: ∀A,B,C.∀f:A→B→C.
-∀x1,x2:A.∀y1,y2:B. x1=x2 → y1=y2 → f x1 y1 = f x2 y2.
+∀x1,x2:A.∀y1,y2:B. x1<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x2 → y1<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>y2 → f x1 y1 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> f x2 y2.
#A #B #C #f #x1 #x2 #y1 #y2 #E1 #E2 >E1; >E2; //; qed.
(* hint to genereric equality
λA. A → False. *)
inductive Not (A:Prop): Prop ≝
-nmk: (A → False) → Not A.
+nmk: (A → <A href="cic:/matita/basics/logic/False.ind(1,0,0)">False</A>) → Not A.
+
interpretation "logical not" 'not x = (Not x).
-theorem absurd : ∀A:Prop. A → ¬A → False.
+theorem absurd : ∀A:Prop. A → <A title="logical not" href="cic:/fakeuri.def(1)">¬</A>A → <A href="cic:/matita/basics/logic/False.ind(1,0,0)">False</A>.
#A #H #Hn (elim Hn); /2/; qed.
(*
#A; #C; #H; #Hn; nelim (Hn H).
nqed. *)
-theorem not_to_not : ∀A,B:Prop. (A → B) → ¬B →¬A.
+theorem not_to_not : ∀A,B:Prop. (A → B) → <A title="logical not" href="cic:/fakeuri.def(1)">¬</A>B →<A title="logical not" href="cic:/fakeuri.def(1)">¬</A>A.
/4/; qed.
(* inequality *)
interpretation "leibnitz's non-equality" 'neq t x y = (Not (eq t x y)).
-theorem sym_not_eq: ∀A.∀x,y:A. x ≠ y → y ≠ x.
+theorem sym_not_eq: ∀A.∀x,y:A. x <A title="leibnitz's non-equality" href="cic:/fakeuri.def(1)">≠</A> y → y <A title="leibnitz's non-equality" href="cic:/fakeuri.def(1)">≠</A> x.
/3/; qed.
(* and *)
interpretation "logical and" 'and x y = (And x y).
-theorem proj1: ∀A,B:Prop. A ∧ B → A.
+theorem proj1: ∀A,B:Prop. A <A title="logical and" href="cic:/fakeuri.def(1)">∧</A> B → A.
#A #B #AB (elim AB) //; qed.
-theorem proj2: ∀ A,B:Prop. A ∧ B → B.
+theorem proj2: ∀ A,B:Prop. A <A title="logical and" href="cic:/fakeuri.def(1)">∧</A> B → B.
#A #B #AB (elim AB) //; qed.
(* or *)
interpretation "logical or" 'or x y = (Or x y).
definition decidable : Prop → Prop ≝
-λ A:Prop. A ∨ ¬ A.
+λ A:Prop. A <A title="logical or" href="cic:/fakeuri.def(1)">∨</A> <A title="logical not" href="cic:/fakeuri.def(1)">¬</A> A.
(* exists *)
inductive ex (A:Type[0]) (P:A → Prop) : Prop ≝
(* iff *)
definition iff :=
- λ A,B. (A → B) ∧ (B → A).
+ λ A,B. (A → B) <A title="logical and" href="cic:/fakeuri.def(1)">∧</A> (B → A).
interpretation "iff" 'iff a b = (iff a b).
definition R0 ≝ λT:Type[0].λt:T.t.
-definition R1 ≝ eq_rect_Type0.
+definition R1 ≝ <A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A>.
(* useless stuff *)
definition R2 :
∀T0:Type[0].
∀a0:T0.
- ∀T1:∀x0:T0. a0=x0 → Type[0].
- ∀a1:T1 a0 (refl ? a0).
- ∀T2:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0. R1 ?? T1 a1 ? p0 = x1 → Type[0].
- ∀a2:T2 a0 (refl ? a0) a1 (refl ? a1).
+ ∀T1:∀x0:T0. a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0 → Type[0].
+ ∀a1:T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0).
+ ∀T2:∀x0:T0. ∀p0:a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0. ∀x1:T1 x0 p0. <A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? p0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x1 → Type[0].
+ ∀a2:T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a1).
∀b0:T0.
- ∀e0:a0 = b0.
+ ∀e0:a0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b0.
∀b1: T1 b0 e0.
- ∀e1:R1 ?? T1 a1 ? e0 = b1.
+ ∀e1:<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? e0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b1.
T2 b0 e0 b1 e1.
#T0 #a0 #T1 #a1 #T2 #a2 #b0 #e0 #b1 #e1
-@(eq_rect_Type0 ????? e1)
-@(R1 ?? ? ?? e0)
+@(<A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A> ????? e1)
+@(<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? ? ?? e0)
@a2
qed.
definition R3 :
∀T0:Type[0].
∀a0:T0.
- ∀T1:∀x0:T0. a0=x0 → Type[0].
- ∀a1:T1 a0 (refl ? a0).
- ∀T2:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0. R1 ?? T1 a1 ? p0 = x1 → Type[0].
- ∀a2:T2 a0 (refl ? a0) a1 (refl ? a1).
- ∀T3:∀x0:T0. ∀p0:a0=x0. ∀x1:T1 x0 p0.∀p1:R1 ?? T1 a1 ? p0 = x1.
- ∀x2:T2 x0 p0 x1 p1.R2 ???? T2 a2 x0 p0 ? p1 = x2 → Type[0].
- ∀a3:T3 a0 (refl ? a0) a1 (refl ? a1) a2 (refl ? a2).
+ ∀T1:∀x0:T0. a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0 → Type[0].
+ ∀a1:T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0).
+ ∀T2:∀x0:T0. ∀p0:a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0. ∀x1:T1 x0 p0. <A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? p0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x1 → Type[0].
+ ∀a2:T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a1).
+ ∀T3:∀x0:T0. ∀p0:a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0. ∀x1:T1 x0 p0.∀p1:<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? p0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x1.
+ ∀x2:T2 x0 p0 x1 p1.<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> ???? T2 a2 x0 p0 ? p1 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x2 → Type[0].
+ ∀a3:T3 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a1) a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a2).
∀b0:T0.
- ∀e0:a0 = b0.
+ ∀e0:a0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b0.
∀b1: T1 b0 e0.
- ∀e1:R1 ?? T1 a1 ? e0 = b1.
+ ∀e1:<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? e0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b1.
∀b2: T2 b0 e0 b1 e1.
- ∀e2:R2 ???? T2 a2 b0 e0 ? e1 = b2.
+ ∀e2:<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> ???? T2 a2 b0 e0 ? e1 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b2.
T3 b0 e0 b1 e1 b2 e2.
#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #b0 #e0 #b1 #e1 #b2 #e2
-@(eq_rect_Type0 ????? e2)
-@(R2 ?? ? ???? e0 ? e1)
+@(<A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A> ????? e2)
+@(<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> ?? ? ???? e0 ? e1)
@a3
qed.
definition R4 :
∀T0:Type[0].
∀a0:T0.
- ∀T1:∀x0:T0. eq T0 a0 x0 → Type[0].
- ∀a1:T1 a0 (refl T0 a0).
- ∀T2:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1 → Type[0].
- ∀a2:T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1).
- ∀T3:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1.
- ∀x2:T2 x0 p0 x1 p1.eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2 → Type[0].
- ∀a3:T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)
- a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2).
- ∀T4:∀x0:T0. ∀p0:eq (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:eq (T1 …) (R1 T0 a0 T1 a1 x0 p0) x1.
- ∀x2:T2 x0 p0 x1 p1.∀p2:eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2.
- ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3.
+ ∀T1:∀x0:T0. <A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> T0 a0 x0 → Type[0].
+ ∀a1:T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0).
+ ∀T2:∀x0:T0. ∀p0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 x0. ∀x1:T1 x0 p0.<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 x0 p0) x1 → Type[0].
+ ∀a2:T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1).
+ ∀T3:∀x0:T0. ∀p0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 x0 p0) x1.
+ ∀x2:T2 x0 p0 x1 p1.<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T2 …) (<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2 → Type[0].
+ ∀a3:T3 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)
+ a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)) a2).
+ ∀T4:∀x0:T0. ∀p0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 x0 p0) x1.
+ ∀x2:T2 x0 p0 x1 p1.∀p2:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T2 …) (<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2.
+ ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T3 …) (<A href="cic:/matita/basics/logic/R3.def(4)">R3</A> T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3.
Type[0].
- ∀a4:T4 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)
- a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2)
- a3 (refl (T3 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)
- a2 (refl (T2 a0 (refl T0 a0) a1 (refl (T1 a0 (refl T0 a0)) a1)) a2))
+ ∀a4:T4 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)
+ a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)) a2)
+ a3 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T3 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)
+ a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)) a2))
a3).
∀b0:T0.
- ∀e0:eq (T0 …) a0 b0.
+ ∀e0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 b0.
∀b1: T1 b0 e0.
- ∀e1:eq (T1 …) (R1 T0 a0 T1 a1 b0 e0) b1.
+ ∀e1:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 b0 e0) b1.
∀b2: T2 b0 e0 b1 e1.
- ∀e2:eq (T2 …) (R2 T0 a0 T1 a1 T2 a2 b0 e0 b1 e1) b2.
+ ∀e2:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T2 …) (<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> T0 a0 T1 a1 T2 a2 b0 e0 b1 e1) b2.
∀b3: T3 b0 e0 b1 e1 b2 e2.
- ∀e3:eq (T3 …) (R3 T0 a0 T1 a1 T2 a2 T3 a3 b0 e0 b1 e1 b2 e2) b3.
+ ∀e3:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T3 …) (<A href="cic:/matita/basics/logic/R3.def(4)">R3</A> T0 a0 T1 a1 T2 a2 T3 a3 b0 e0 b1 e1 b2 e2) b3.
T4 b0 e0 b1 e1 b2 e2 b3 e3.
#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #T4 #a4 #b0 #e0 #b1 #e1 #b2 #e2 #b3 #e3
-@(eq_rect_Type0 ????? e3)
-@(R3 ????????? e0 ? e1 ? e2)
+@(<A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A> ????? e3)
+@(<A href="cic:/matita/basics/logic/R3.def(4)">R3</A> ????????? e0 ? e1 ? e2)
@a4
qed.
(* TODO concrete definition by means of proof irrelevance *)
-axiom streicherK : ∀T:Type[1].∀t:T.∀P:t = t → Type[2].P (refl ? t) → ∀p.P p.
+axiom streicherK : ∀T:Type[1].∀t:T.∀P:t <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> t → Type[2].P (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? t) → ∀p.P p.
\ No newline at end of file
;;
-let eval_ast ~include_paths ?do_heavy_checks status (text,prefix_len,ast) =
+let eval_ast ~include_paths ?do_heavy_checks status (text,prefix_len,ast as ast') =
let baseuri = status#baseuri in
- (*
- let new_aliases,new_status =
- GrafiteDisambiguate.eval_with_new_aliases status
- (fun status ->
- GrafiteEngine.eval_ast ~include_paths ?do_heavy_checks status
- (text,prefix_len,ast)) in
- *)
- let new_status =
- GrafiteEngine.eval_ast ~include_paths ?do_heavy_checks status
- (text,prefix_len,ast) in
- (*
- let _,intermediate_states =
- List.fold_left
- (fun (status,acc) (k,value) ->
- let v = GrafiteAst.description_of_alias value in
- let b =
- try
- let NReference.Ref (uri,_) = NReference.reference_of_string v in
- NUri.baseuri_of_uri uri = baseuri
- with
- NReference.IllFormedReference _ ->
- false (* v is a description, not a URI *)
- in
- if b then
- status,acc
- else
- let status =
- GrafiteDisambiguate.set_proof_aliases status ~implicit_aliases:false
- GrafiteAst.WithPreferences [k,value]
- in
- status, (status ,Some (k,value))::acc
- ) (status,[]) new_aliases (* WARNING: this must be the old status! *)
- in
- (new_status,None)::intermediate_states
- *)
- [new_status,None]
+ GrafiteEngine.eval_ast ~include_paths ?do_heavy_checks status ast'
;;
let baseuri_of_script ~include_paths fname =
| None -> asserted, true, status
| Some (asserted,ast) ->
cb status ast;
- let new_statuses =
- eval_ast ~include_paths ?do_heavy_checks status ("",0,ast) in
let status =
- match new_statuses with
- [s,None] -> s
- | _::(_,Some (_,value))::_ ->
- raise (TryingToAdd (lazy (GrafiteAstPp.pp_alias value)))
- | _ -> assert false
+ eval_ast ~include_paths ?do_heavy_checks status ("",0,ast)
in
asserted, false, status
with exn when not matita_debug ->
in
loop asserted status
-and compile ~compiling ~asserted ~include_paths fname =
+and compile ~compiling ~asserted ~include_paths ~outch fname =
if List.mem fname compiling then raise (CircularDependency fname);
let compiling = fname::compiling in
let matita_debug = Helm_registry.get_bool "matita.debug" in
pp_times fname true big_bang big_bang_u big_bang_s;
(* XXX: update script -- currently to stdout *)
let origbuf = Ulexing.from_utf8_channel (open_in fname) in
- let outfile = open_out (fname ^ ".mad") in
let interpr = GrafiteDisambiguate.get_interpr status#disambiguate_db in
- ignore (SmallLexer.mk_small_printer interpr outfile origbuf);
- close_out outfile;
+ ignore (SmallLexer.mk_small_printer interpr outch origbuf);
asserted
(* MATITA 1.0: debbo fare time_travel sulla ng_library?
LexiconSync.time_travel
pp_times fname false big_bang big_bang_u big_bang_s;
clean_exit baseuri exn
-and assert_ng ~already_included ~compiling ~asserted ~include_paths mapath =
+and assert_ng ~already_included ~compiling ~asserted ~include_paths ?outch mapath =
let _,baseuri,fullmapath,_ = Librarian.baseuri_of_script ~include_paths mapath in
if List.mem fullmapath asserted then asserted,false
else
(* maybe recompiling it I would get the same... *)
raise (AlreadyLoaded (lazy mapath))
else
- let asserted = compile ~compiling ~asserted ~include_paths fullmapath in
+ let outch, is_file_ch =
+ match outch with
+ | None -> open_out (fullmapath ^ ".mad"), true
+ | Some c -> c, false
+ in
+ let asserted = compile ~compiling ~asserted ~include_paths ~outch fullmapath in
+ if is_file_ch then close_out outch;
fullmapath::asserted,true
end
;;
-let assert_ng ~include_paths mapath =
+let assert_ng ~include_paths ?outch mapath =
snd (assert_ng ~include_paths ~already_included:[] ~compiling:[] ~asserted:[]
- mapath)
+ ?outch mapath)
let get_ast status ~include_paths strm =
snd (get_ast status ~compiling:[] ~asserted:[] ~include_paths strm)
GrafiteTypes.status ->
string * int *
GrafiteAst.statement ->
- (GrafiteTypes.status *
- (DisambiguateTypes.domain_item * GrafiteAst.alias_spec) option) list
+ GrafiteTypes.status
-val assert_ng: include_paths:string list -> string -> bool
+val assert_ng: include_paths:string list -> ?outch:out_channel -> string -> bool
String.sub s 0 nl_pos
with Not_found -> s
+(* WR: skipped_txt, nonskipped_txt servono ancora?
+ * (ora non abbiamo più gli alias e il codice sembra più complicato
+ * del necessario)
+ * (rif. nota 11/05/11 *)
let eval_with_engine include_paths status skipped_txt nonskipped_txt st
=
let parsed_text_length =
- String.length skipped_txt + String.length nonskipped_txt
+ String.length skipped_txt + String.length nonskipped_txt
in
let text = skipped_txt ^ nonskipped_txt in
let prefix_len = MatitaGtkMisc.utf8_string_length skipped_txt in
- let enriched_history_fragment =
+ let new_status =
MatitaEngine.eval_ast ~include_paths ~do_heavy_checks:(Helm_registry.get_bool
"matita.do_heavy_checks")
status (text,prefix_len,st)
in
- let enriched_history_fragment = List.rev enriched_history_fragment in
- (* really fragile *)
- let res,_ =
- List.fold_left
- (fun (acc, to_prepend) (status,alias) ->
- match alias with
- | None -> (status,to_prepend ^ nonskipped_txt)::acc,""
- | Some (k,value) ->
- let newtxt = GrafiteAstPp.pp_alias value in
- (status,to_prepend ^ newtxt ^ "\n")::acc, "")
- ([],skipped_txt) enriched_history_fragment
- in
+ let res = new_status,skipped_txt ^ nonskipped_txt in
res,"",parsed_text_length
;;
List.filter (fun x,_ -> List.mem x selected) menv
in
CicMathView.screenshot status sequents menv subst name;
- [status, parsed_text], "", parsed_text_length
+ (status, parsed_text), "", parsed_text_length
| TA.NCheck (_,t) ->
let status = script#status in
let _,_,menv,subst,_ = status#obj in
(* XXX use the metasenv, if possible *)
in
MatitaMathView.cicBrowser (Some (`NCic (t,ctx,m,s)));
- [status, parsed_text], "", parsed_text_length
+ (status, parsed_text), "", parsed_text_length
| TA.NIntroGuess _loc ->
let names_ref = ref [] in
let s = NTactics.intros_tac ~names_ref [] script#status in
let rex = Pcre.regexp ~flags:[`MULTILINE] "\\A([\\n\\t\\r ]*).*\\Z" in
let nl = Pcre.replace ~rex ~templ:"$1" parsed_text in
- [s, nl ^ "#" ^ String.concat " " !names_ref ^ ";"], "", parsed_text_length
+ (s, nl ^ "#" ^ String.concat " " !names_ref ^ ";"), "", parsed_text_length
| TA.NAutoInteractive (_loc, (None,a)) ->
let trace_ref = ref [] in
let s = NnAuto.auto_tac ~params:(None,a) ~trace_ref script#status in
in
let rex = Pcre.regexp ~flags:[`MULTILINE] "\\A([\\n\\t\\r ]*).*\\Z" in
let nl = Pcre.replace ~rex ~templ:"$1" parsed_text in
- [s, nl ^ trace ^ thms ^ ";"], "", parsed_text_length
+ (s, nl ^ trace ^ thms ^ ";"), "", parsed_text_length
| TA.NAutoInteractive (_, (Some _,_)) -> assert false
let rec eval_executable include_paths (buffer : GText.buffer)
eval_with_engine include_paths status skipped_txt nonskipped_txt
(TA.Executable (loc, ex))
with
- MatitaTypes.Cancel -> [], "", 0
+ MatitaTypes.Cancel -> (status,skipped_txt), "", 0
| GrafiteEngine.NMacro (_loc,macro) ->
eval_nmacro include_paths buffer status
unparsed_text (skipped_txt ^ nonskipped_txt) script macro
(offset+parsed_text_length, errorll))
in
assert (text=""); (* no macros inside comments, please! *)
- (match s with
- | (statuses,text)::tl ->
- (statuses,parsed_text ^ text)::tl,"",parsed_text_length + len
- | [] -> [], "", 0)
+ let st,text = s in
+ (st,parsed_text ^ text),"",parsed_text_length + len
let fresh_script_id =
let i = ref 0 in
~callback:(fun () -> self#clean_dirty_lock));
ignore (GMain.Timeout.add ~ms:300000
~callback:(fun _ -> self#_saveToBackupFile ();true));
+ ignore (buffer#connect#changed
+ (fun _ ->
+ let curpos = buffer#cursor_position in
+ HLog.debug ("cursor at " ^ (string_of_int curpos))));
ignore (buffer#connect#modified_changed
(fun _ -> self#set_star buffer#modified));
(* clean_locked is set to true only "during" a PRIMARY paste
in
let time2 = Unix.gettimeofday () in
HLog.debug ("... done in " ^ string_of_float (time2 -. time1) ^ "s");
- let new_statuses, new_statements =
- let statuses, texts = List.split entries in
- statuses, texts
- in
- history <- new_statuses @ history;
- statements <- new_statements @ statements;
+ let new_statuses, new_statements = entries in
+ history <- new_statuses :: history;
+ statements <- new_statements :: statements;
let start = buffer#get_iter_at_mark (`MARK locked_mark) in
- let new_text = String.concat "" (List.rev new_statements) in
if statement <> None then
- buffer#insert ~iter:start new_text
+ buffer#insert ~iter:start new_statements
else begin
let parsed_text = String.sub s 0 parsed_len in
- if new_text <> parsed_text then begin
+ if new_statements <> parsed_text then begin
let stop = start#copy#forward_chars (Glib.Utf8.length parsed_text) in
buffer#delete ~start ~stop;
- buffer#insert ~iter:start new_text;
+ buffer#insert ~iter:start new_statements;
end;
end;
- self#moveMark (Glib.Utf8.length new_text);
- buffer#insert ~iter:(buffer#get_iter_at_mark (`MARK locked_mark)) newtext
+ self#moveMark (Glib.Utf8.length new_statements);
+ buffer#insert ~iter:(buffer#get_iter_at_mark (`MARK locked_mark))
+ newtext
method private _retract offset status new_statements new_history =
NCicLibrary.time_travel status;
self#notify
method loadFromFile f =
- buffer#set_text (HExtlib.input_file f);
+ let script = HExtlib.input_file f in
+ let hs = MatitaScriptLexer.get_hot_spots script in
+ let hstext =
+ String.concat "\n" (List.map (fun (l1,l2,uri,desc) ->
+ let a,b = HExtlib.loc_of_floc l1 in
+ let c,d = HExtlib.loc_of_floc l2 in
+ let uri = match uri with None -> "()" | Some s -> s in
+ let desc = match desc with None -> "()" | Some s -> s in
+ Printf.sprintf "[%d,%d] [%d,%d] '%s' '%s'" a b c d uri desc) hs)
+ in
+ HLog.debug hstext;
+ buffer#set_text script;
+
+ (* this is the default tag for the whole script (mainly used for
+ * setting the default behaviour when needed *)
+ let all_tag = buffer#create_tag [] in
+ buffer#apply_tag all_tag ~start:(buffer#get_iter `START)
+ ~stop:(buffer#get_iter `END);
+ ignore(all_tag#connect#event
+ ~callback:(fun ~origin event pos ->
+ match GdkEvent.get_type event with
+ | `MOTION_NOTIFY ->
+ Gdk.Window.set_cursor
+ (match source_view#get_window `TEXT with None -> assert false | Some x -> x)
+ (Gdk.Cursor.create `ARROW);
+ (* code for removing message from statusbar *)
+ false
+ | _ -> false));
+
+ let invisible_tag = buffer#create_tag [ `INVISIBLE true ] in
+ let apply_tag (l1,l2,uri,desc) =
+ let hyperlink_tag =
+ buffer#create_tag [ `FOREGROUND "blue" ; `UNDERLINE `SINGLE] in
+ (* hyperlink_tag#connect#after#changed ~callback:(fun _ -> HLog.debug "***"); *)
+ let a,b = HExtlib.loc_of_floc l1 in
+ let c,d = HExtlib.loc_of_floc l2 in
+ (* setting invisibility for <A...> and </A> *)
+ buffer#apply_tag invisible_tag
+ ~start:(buffer#get_iter_at_char a)
+ ~stop:(buffer#get_iter_at_char b);
+ buffer#apply_tag invisible_tag
+ ~start:(buffer#get_iter_at_char c)
+ ~stop:(buffer#get_iter_at_char d);
+ (* setting hyperlink inside <A...> and </A> *)
+ (* XXX: I'm not sure why I'm required to put those -1 and +1
+ * otherwise I get too small a clickable area, especially for
+ * single characters hyperlinks. *)
+ buffer#apply_tag hyperlink_tag
+ ~start:(buffer#get_iter_at_char (b-1))
+ ~stop:(buffer#get_iter_at_char (c+1));
+ match uri with
+ | None -> ()
+ | Some uri ->
+ ignore(hyperlink_tag#connect#event
+ ~callback:(fun ~origin event pos ->
+ match GdkEvent.get_type event with
+ `BUTTON_PRESS ->
+ let uri' = NReference.reference_of_string uri in
+ MatitaMathView.cicBrowser (Some (`NRef uri'));
+ true
+ | `MOTION_NOTIFY ->
+ Gdk.Window.set_cursor
+ (match source_view#get_window `TEXT with None -> assert false | Some x -> x)
+ (Gdk.Cursor.create `HAND1);
+ (* let ctxt = (MatitaMisc.get_gui ())#main#statusBar#new_context ~name:"href" in
+ let msg = ctxt#push uri in
+ (* href_statusbar_msg <- Some (ctxt, msg);*) *)
+ false
+ | _ -> false))
+ in
self#reset_buffer;
+ List.iter apply_tag hs;
buffer#set_modified false
method assignFileName file =