-requires="str"
+requires="str netstring"
version="0.0.1"
archive(byte)="utf8_macros.cma"
archive(native)="utf8_macros.cmxa"
("(" number (',' number)* ")")? (* reference spec *)
let regexp qstring = '"' [^ '"']* '"'
-let regexp hreftag = "<" [ 'A' 'a' ]
+let regexp xmarkup = '\005'
+let regexp ymarkup = '\006'
+
+let regexp hreftag = xmarkup [ 'A' 'a' ]
let regexp href = "href=\"" uri "\""
let regexp hreftitle = "title=" qstring
-let regexp hrefclose = "</" [ 'A' 'a' ] ">"
+let regexp hrefclose = xmarkup "/" [ 'A' 'a' ] ymarkup
+
+let regexp tag_cont = ident_letter | xml_digit | "_" | "-"
+let regexp tagname = ident_letter tag_cont*
+let regexp opentag = xmarkup tagname
+let regexp closetag = xmarkup "/" tagname ymarkup
+let regexp attribute = tagname "=" qstring
+let regexp generictag =
+ opentag (utf8_blank+ attribute)* ymarkup
let regexp tex_token = '\\' ident
| utf8_blank+ -> level2_meta_token lexbuf
| hreftag -> return lexbuf ("ATAG","")
| hrefclose -> return lexbuf ("ATAGEND","")
+ | generictag
+ | closetag -> level2_meta_token lexbuf
| ident ->
handle_keywords lexbuf (fun x -> List.mem x level2_meta_keywords) "IDENT"
| variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
| placeholder -> return lexbuf ("PLACEHOLDER", "")
| hreftag -> aux_in_tag None None lexbuf
| hrefclose -> aux None None lexbuf
+ | generictag
+ | closetag -> ligatures_token (aux desc href) lexbuf
| ident -> loctable :=
update_table (loc_of_buf lexbuf) desc href !loctable;
handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT"
| number -> return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
| hreftag -> return lexbuf ("ATAG", "")
| hrefclose -> return lexbuf ("ATAGEND","")
+ | generictag
+ | closetag -> ligatures_token level1_pattern_token lexbuf
| ident ->handle_keywords lexbuf (fun x -> List.mem x level1_keywords) "IDENT"
| variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
| pident->handle_keywords lexbuf (fun x->List.mem x level1_keywords) "PIDENT"
(Gramext.action (fun _ loc -> None, loc))
; [Gramext.Stoken ("ATAG","")
;Gramext.Snterm (Grammar.Entry.obj sym_attributes)
- ;Gramext.Stoken ("SYMBOL",">")
+ ;Gramext.Stoken ("SYMBOL","\005")
;Gramext.Stoken ("SYMBOL",s)
;Gramext.Stoken ("ATAGEND","")],
(Gramext.action (fun _ uridesc _ _ _ loc -> (Some uridesc),loc))
let remove_quotes s = String.sub s 1 (String.length s - 2)
let remove_left_quote s = String.sub s 1 (String.length s - 1)
-let print = Printf.fprintf
+let print s t = s := (!s ^ t) (* Printf.fprintf *)
+
+let xmarkup = "\005"
+let ymarkup = "\006"
let handle_full_id ch id uri =
- print ch "<A href=\"%s\">%s</A>" uri id
+ let id' = Printf.sprintf "\005A href=\"%s\"\006%s\005/A\006" uri id
+ in
+ print ch id'
let handle_full_sym ch s uri desc =
let uri = try HExtlib.unopt uri
- with Failure _ -> "cic:/fakeuri.def(1)"
- in print ch "<A title=\"%s\" href=\"%s\">%s</A>" desc uri s
+ with Failure _ -> "cic:/fakeuri.def(1)" in
+ let s' = Printf.sprintf "\005A title=\"%s\" href=\"%s\"\006%s\005/A\006" desc uri s
+ in
+ print ch s'
let handle_full_num ch num uri desc =
let uri = try HExtlib.unopt uri
with Failure _ -> "cic:/fakeuri.def(1)"
- in print ch "<A title=\"%s\" href=\"%s\">%s</A>" desc uri num
+ in
+ let num' = Printf.sprintf "\005A title=\"%s\" href=\"%s\"\006%s\005/A\006" desc uri num
+ in
+ print ch num'
let handle_tex ch s =
(* print "TEX:%s" (remove_left_quote (Ulexing.utf8_lexeme lexbuf)) *)
- print ch "%s" s
+ print ch s
let return_with_loc token begin_cnum end_cnum =
let flocation = HExtlib.floc_of_loc (begin_cnum,end_cnum) in
Ulexing.rollback lexbuf;
k lexbuf
-let handle_interpr loc literal interp outch =
+let handle_interpr loc literal interp outstr =
try
(match DisambiguateTypes.InterprEnv.find loc interp with
| GrafiteAst.Ident_alias (_,uri) ->
- handle_full_id outch literal uri
+ handle_full_id outstr literal uri
| GrafiteAst.Symbol_alias (_,uri,desc) ->
- handle_full_sym outch literal uri desc
+ handle_full_sym outstr literal uri desc
| GrafiteAst.Number_alias (uri,desc) ->
- handle_full_num outch literal uri desc)
- with Not_found -> print outch "%s" literal
+ handle_full_num outstr literal uri desc)
+ with Not_found -> print outstr literal
;;
-let level2_ast_token interp outch =
+let level2_ast_token interp outstr =
let rec aux nesting =
lexer
- | begincom -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf);
+ | begincom -> (print outstr (Ulexing.utf8_lexeme lexbuf);
aux (nesting+1) lexbuf)
- | endcom -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf);
+ | endcom -> (print outstr (Ulexing.utf8_lexeme lexbuf);
aux (max 0 (nesting-1)) lexbuf)
| eof -> return lexbuf ()
| _ -> if nesting > 0
- then (print outch "%s" (Ulexing.utf8_lexeme lexbuf);
+ then (print outstr (Ulexing.utf8_lexeme lexbuf);
aux nesting lexbuf)
else (Ulexing.rollback lexbuf;
basic_lexer lexbuf)
lexer
| ident -> let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
let id = Ulexing.utf8_lexeme lexbuf in
- handle_interpr loc id interp outch;
+ handle_interpr loc id interp outstr;
aux 0 lexbuf
| variable_ident ->
let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
let id = Ulexing.utf8_lexeme lexbuf in
- handle_interpr loc id interp outch;
+ handle_interpr loc id interp outstr;
aux 0 lexbuf
| pident -> let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
let id = Ulexing.utf8_lexeme lexbuf in
- handle_interpr loc id interp outch;
+ handle_interpr loc id interp outstr;
aux 0 lexbuf
| number ->
let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
let num = Ulexing.utf8_lexeme lexbuf in
- handle_interpr loc num interp outch;
+ handle_interpr loc num interp outstr;
aux 0 lexbuf
| tex_token ->
let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
let s = Ulexing.utf8_lexeme lexbuf in
- handle_interpr loc s interp outch;
+ handle_interpr loc s interp outstr;
aux 0 lexbuf
- | qstring -> (print outch "%s" (Ulexing.utf8_lexeme lexbuf); aux 0 lexbuf)
+ | qstring -> (print outstr (Ulexing.utf8_lexeme lexbuf); aux 0 lexbuf)
| csymbol ->
let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
let s = Ulexing.utf8_lexeme lexbuf in
- handle_interpr loc s interp outch;
+ handle_interpr loc s interp outstr;
aux 0 lexbuf
| eof -> return lexbuf ()
| _ ->
let loc = HExtlib.floc_of_loc (Ulexing.loc lexbuf) in
let s = Ulexing.utf8_lexeme lexbuf in
- handle_interpr loc s interp outch;
+ handle_interpr loc s interp outstr;
aux 0 lexbuf
in aux 0
(*let status =
List.fold_right StringSet.add (initial_keywords @ keywords) StringSet.empty
in*)
- mk_lexer (level2_ast_token outch interpr)
+ mk_lexer (level2_ast_token outstr interpr)
;;*)
-let mk_small_printer interpr outch =
+let mk_small_printer interpr outstr =
let initial_keywords =
[ "CProp"; "Prop"; "Type"; "Set"; "let"; "match";
"with"; "in"; "and"; "to"; "as"; "on"; "return"; "done";
(* let status =
List.fold_right StringSet.add initial_keywords StringSet.empty
in *)
- level2_ast_token interpr outch
+ level2_ast_token interpr outstr
;;
(* let mk_small_lexer interpr = mk_small_lexer interpr [];;*)
* unit Token.glexer*)
val mk_small_printer :
GrafiteAst.alias_spec DisambiguateTypes.InterprEnv.t ->
- out_channel -> Ulexing.lexbuf -> unit * Stdpp.location
+ string ref -> Ulexing.lexbuf -> unit * Stdpp.location
if dir <> Some `RightToLeft then eval_input_notation status (l1,l2)
else status
in
+ let status =
if dir <> Some `LeftToRight then eval_output_notation status (l1,l2)
else status
+ in prerr_endline ("new grammar:\n" ^ (Print_grammar.ebnf_of_term status));
+ status
| GrafiteAst.Alias (loc, spec) ->
let diff =
(*CSC: Warning: this code should be factorized with the corresponding
val disambiguate_db = initial_status
method disambiguate_db = disambiguate_db
method set_disambiguate_db v = {< disambiguate_db = v >}
+ method reset_disambiguate_db () =
+ {< disambiguate_db = { self#disambiguate_db with interpr =
+ DisambiguateTypes.InterprEnv.empty } >}
method set_disambiguate_status
: 'status. #g_status as 'status -> 'self
= fun o -> ((self#set_interp_status o)#set_disambiguate_db o#disambiguate_db)
inherit g_status
inherit Interpretations.status
method set_disambiguate_db: db -> 'self
+ method reset_disambiguate_db: unit -> 'self
method set_disambiguate_status: #g_status -> 'self
end
PACKAGE = utf8_macros
PREDICATES =
-MAKE_TABLE_PACKAGES = helm-xml
+MAKE_TABLE_PACKAGES = "netstring helm-xml"
# modules which have both a .ml and a .mli
INTERFACE_FILES = utf8Macro.mli
$(H)$(OCAMLFIND) ocamlc -rectypes -package $(MAKE_TABLE_PACKAGES) -linkpkg -o $@ $^
utf8MacroTable.ml:
- ./make_table $@ $@.txt
+ ./make_table $@ $@.txt $(@:%.ml=%.js)
utf8MacroTable.cmo: utf8MacroTable.ml
@echo " OCAMLC $<"
$(H)@$(OCAMLFIND) ocamlc -c $<
let main () =
let oc = open_out Sys.argv.(1) in
let oc_doc = open_out Sys.argv.(2) in
+ let oc_js = open_out Sys.argv.(3) in
output_string oc "(* GENERATED by make_table: DO NOT EDIT! *)\n";
output_string oc_doc "(* GENERATED by make_table: DO NOT EDIT! *)\n";
+ output_string oc_js "/* GENERATED by make_table: DO NOT EDIT! */\n";
output_string oc "let macro2utf8 = Hashtbl.create 2000\n";
output_string oc "let utf82macro = Hashtbl.create 2000\n";
output_string oc "let data = [\n";
+ output_string oc_js "var macro2utf8 = new Object();\n";
+ output_string oc_js "var utf82macro = new Object();\n";
let macro2utf8 = parse_from_xml () in
Hashtbl.iter
(fun macro utf8 ->
+ let hescape s =
+ String.escaped (Netencoding.Html.encode ~in_enc:`Enc_utf8 () s) in
fprintf oc " \"%s\", \"%s\";\n" macro (String.escaped utf8);
- fprintf oc_doc "\\%s %s\n" macro utf8)
+ fprintf oc_doc "\\%s %s\n" macro utf8;
+ fprintf oc_js "macro2utf8[\"%s\"] = \"%s\";\n"
+ macro (hescape utf8);
+ fprintf oc_js "utf82macro[\"%s\"] = \"%s\";\n"
+ (hescape utf8) macro)
macro2utf8;
output_string oc " ];;\n";
output_string oc "let _ =\n";
List.iter
(fun (macro, utf8) ->
Hashtbl.replace macro2utf8 macro utf8;
- Hashtbl.add utf82macro utf8 macro)
+ Hashtbl.replace utf82macro utf8 macro)
data;;
matitaInit.mli \
$(NULL)
WMLI = \
+ matitaScriptLexer.mli \
matitaTypes.mli \
matitaMisc.mli \
applyTransformation.mli \
interpretation "leibnitz's equality" 'eq t x y = (eq t x y).
lemma eq_rect_r:
- ∀A.∀a,x.∀p:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a.∀P:
- ∀x:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> a → Type[2]. P a (refl A a) → P x p.
+ ∀A.∀a,x.∀p:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 ? x a.∀P:
+ ∀x:A. x \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 a → Type[2]. P a (refl A a) → P x p.
#A #a #x #p (cases p) // qed.
lemma eq_ind_r :
- ∀A.∀a.∀P: ∀x:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> a → Prop. P a (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> A a) →
- ∀x.∀p:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a.P x p.
- #A #a #P #p #x0 #p0; @(<A href="cic:/matita/basics/logic/eq_rect_r.def(1)">eq_rect_r</A> ? ? ? p0) //; qed.
+ ∀A.∀a.∀P: ∀x:A. x \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 a → Prop. P a (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 A a) →
+ ∀x.∀p:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 ? x a.P x p.
+ #A #a #P #p #x0 #p0; @(\ 5A href="cic:/matita/basics/logic/eq_rect_r.def(1)"\ 6eq_rect_r\ 5/A\ 6 ? ? ? p0) //; qed.
lemma eq_rect_Type2_r:
- ∀A.∀a.∀P: ∀x:A. <A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a → Type[2]. P a (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> A a) →
- ∀x.∀p:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> ? x a.P x p.
+ ∀A.∀a.∀P: ∀x:A. \ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 ? x a → Type[2]. P a (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 A a) →
+ ∀x.∀p:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 ? x a.P x p.
#A #a #P #H #x #p (generalize in match H) (generalize in match P)
cases p; //; qed.
-theorem rewrite_l: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> y → P y.
+theorem rewrite_l: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. x \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 y → P y.
#A #x #P #Hx #y #Heq (cases Heq); //; qed.
-theorem sym_eq: ∀A.∀x,y:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> y → y <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x.
-#A #x #y #Heq @(<A href="cic:/matita/basics/logic/rewrite_l.def(1)">rewrite_l</A> A x (λz.z<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x)); //; qed.
+theorem sym_eq: ∀A.∀x,y:A. x \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 y → y \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 x.
+#A #x #y #Heq @(\ 5A href="cic:/matita/basics/logic/rewrite_l.def(1)"\ 6rewrite_l\ 5/A\ 6 A x (λz.z\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6x)); //; qed.
-theorem rewrite_r: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. y <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x → P y.
-#A #x #P #Hx #y #Heq (cases (<A href="cic:/matita/basics/logic/sym_eq.def(2)">sym_eq</A> ? ? ? Heq)); //; qed.
+theorem rewrite_r: ∀A:Type[1].∀x.∀P:A → Type[1]. P x → ∀y. y \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 x → P y.
+#A #x #P #Hx #y #Heq (cases (\ 5A href="cic:/matita/basics/logic/sym_eq.def(2)"\ 6sym_eq\ 5/A\ 6 ? ? ? Heq)); //; qed.
-theorem eq_coerc: ∀A,B:Type[0].A→(A<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>B)→B.
+theorem eq_coerc: ∀A,B:Type[0].A→(A\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6B)→B.
#A #B #Ha #Heq (elim Heq); //; qed.
-theorem trans_eq : ∀A.∀x,y,z:A. x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> y → y <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> z → x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> z.
+theorem trans_eq : ∀A.∀x,y,z:A. x \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 y → y \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 z → x \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 z.
#A #x #y #z #H1 #H2 >H1; //; qed.
-theorem eq_f: ∀A,B.∀f:A→B.∀x,y:A. x<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>y → f x <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> f y.
+theorem eq_f: ∀A,B.∀f:A→B.∀x,y:A. x\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6y → f x \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 f y.
#A #B #f #x #y #H >H; //; qed.
(* deleterio per auto? *)
theorem eq_f2: ∀A,B,C.∀f:A→B→C.
-∀x1,x2:A.∀y1,y2:B. x1<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x2 → y1<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>y2 → f x1 y1 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> f x2 y2.
-#A #B #C #f #x1 #x2 #y1 #y2 #E1 #E2 >E1; >E2; //; qed.
+∀x1,x2:A.∀y1,y2:B. x1\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6x2 → y1\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6y2 → f x1 y1 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 f x2 y2.
+#A #B #C #f #x1 #x2 #y1 #y2 #E1 #E2 \ 6E1; \ 6E2; //; qed.
(* hint to genereric equality
definition eq_equality: equality ≝
λA. A → False. *)
inductive Not (A:Prop): Prop ≝
-nmk: (A → <A href="cic:/matita/basics/logic/False.ind(1,0,0)">False</A>) → Not A.
+nmk: (A → \ 5A href="cic:/matita/basics/logic/False.ind(1,0,0)"\ 6False\ 5/A\ 6) → Not A.
interpretation "logical not" 'not x = (Not x).
-theorem absurd : ∀A:Prop. A → <A title="logical not" href="cic:/fakeuri.def(1)">¬</A>A → <A href="cic:/matita/basics/logic/False.ind(1,0,0)">False</A>.
+theorem absurd : ∀A:Prop. A → \ 5A title="logical not" href="cic:/fakeuri.def(1)"\ 6¬\ 5/A\ 6A → \ 5A href="cic:/matita/basics/logic/False.ind(1,0,0)"\ 6False\ 5/A\ 6.
#A #H #Hn (elim Hn); /2/; qed.
(*
#A; #C; #H; #Hn; nelim (Hn H).
nqed. *)
-theorem not_to_not : ∀A,B:Prop. (A → B) → <A title="logical not" href="cic:/fakeuri.def(1)">¬</A>B →<A title="logical not" href="cic:/fakeuri.def(1)">¬</A>A.
+theorem not_to_not : ∀A,B:Prop. (A → B) → \ 5A title="logical not" href="cic:/fakeuri.def(1)"\ 6¬\ 5/A\ 6B →\ 5A title="logical not" href="cic:/fakeuri.def(1)"\ 6¬\ 5/A\ 6A.
/4/; qed.
(* inequality *)
interpretation "leibnitz's non-equality" 'neq t x y = (Not (eq t x y)).
-theorem sym_not_eq: ∀A.∀x,y:A. x <A title="leibnitz's non-equality" href="cic:/fakeuri.def(1)">≠</A> y → y <A title="leibnitz's non-equality" href="cic:/fakeuri.def(1)">≠</A> x.
+theorem sym_not_eq: ∀A.∀x,y:A. x \ 5A title="leibnitz's non-equality" href="cic:/fakeuri.def(1)"\ 6≠\ 5/A\ 6 y → y \ 5A title="leibnitz's non-equality" href="cic:/fakeuri.def(1)"\ 6≠\ 5/A\ 6 x.
/3/; qed.
(* and *)
interpretation "logical and" 'and x y = (And x y).
-theorem proj1: ∀A,B:Prop. A <A title="logical and" href="cic:/fakeuri.def(1)">∧</A> B → A.
+theorem proj1: ∀A,B:Prop. A \ 5A title="logical and" href="cic:/fakeuri.def(1)"\ 6∧\ 5/A\ 6 B → A.
#A #B #AB (elim AB) //; qed.
-theorem proj2: ∀ A,B:Prop. A <A title="logical and" href="cic:/fakeuri.def(1)">∧</A> B → B.
+theorem proj2: ∀ A,B:Prop. A \ 5A title="logical and" href="cic:/fakeuri.def(1)"\ 6∧\ 5/A\ 6 B → B.
#A #B #AB (elim AB) //; qed.
(* or *)
interpretation "logical or" 'or x y = (Or x y).
definition decidable : Prop → Prop ≝
-λ A:Prop. A <A title="logical or" href="cic:/fakeuri.def(1)">∨</A> <A title="logical not" href="cic:/fakeuri.def(1)">¬</A> A.
+λ A:Prop. A \ 5A title="logical or" href="cic:/fakeuri.def(1)"\ 6∨\ 5/A\ 6 \ 5A title="logical not" href="cic:/fakeuri.def(1)"\ 6¬\ 5/A\ 6 A.
(* exists *)
inductive ex (A:Type[0]) (P:A → Prop) : Prop ≝
(* iff *)
definition iff :=
- λ A,B. (A → B) <A title="logical and" href="cic:/fakeuri.def(1)">∧</A> (B → A).
+ λ A,B. (A → B) \ 5A title="logical and" href="cic:/fakeuri.def(1)"\ 6∧\ 5/A\ 6 (B → A).
interpretation "iff" 'iff a b = (iff a b).
definition R0 ≝ λT:Type[0].λt:T.t.
-definition R1 ≝ <A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A>.
+definition R1 ≝ \ 5A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)"\ 6eq_rect_Type0\ 5/A\ 6.
(* useless stuff *)
definition R2 :
∀T0:Type[0].
∀a0:T0.
- ∀T1:∀x0:T0. a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0 → Type[0].
- ∀a1:T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0).
- ∀T2:∀x0:T0. ∀p0:a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0. ∀x1:T1 x0 p0. <A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? p0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x1 → Type[0].
- ∀a2:T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a1).
+ ∀T1:∀x0:T0. a0\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6x0 → Type[0].
+ ∀a1:T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a0).
+ ∀T2:∀x0:T0. ∀p0:a0\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6x0. ∀x1:T1 x0 p0. \ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 ?? T1 a1 ? p0 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 x1 → Type[0].
+ ∀a2:T2 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a1).
∀b0:T0.
- ∀e0:a0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b0.
+ ∀e0:a0 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 b0.
∀b1: T1 b0 e0.
- ∀e1:<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? e0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b1.
+ ∀e1:\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 ?? T1 a1 ? e0 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 b1.
T2 b0 e0 b1 e1.
#T0 #a0 #T1 #a1 #T2 #a2 #b0 #e0 #b1 #e1
-@(<A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A> ????? e1)
-@(<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? ? ?? e0)
+@(\ 5A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)"\ 6eq_rect_Type0\ 5/A\ 6 ????? e1)
+@(\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 ?? ? ?? e0)
@a2
qed.
definition R3 :
∀T0:Type[0].
∀a0:T0.
- ∀T1:∀x0:T0. a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0 → Type[0].
- ∀a1:T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0).
- ∀T2:∀x0:T0. ∀p0:a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0. ∀x1:T1 x0 p0. <A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? p0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x1 → Type[0].
- ∀a2:T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a1).
- ∀T3:∀x0:T0. ∀p0:a0<A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A>x0. ∀x1:T1 x0 p0.∀p1:<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? p0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x1.
- ∀x2:T2 x0 p0 x1 p1.<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> ???? T2 a2 x0 p0 ? p1 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> x2 → Type[0].
- ∀a3:T3 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a1) a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? a2).
+ ∀T1:∀x0:T0. a0\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6x0 → Type[0].
+ ∀a1:T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a0).
+ ∀T2:∀x0:T0. ∀p0:a0\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6x0. ∀x1:T1 x0 p0. \ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 ?? T1 a1 ? p0 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 x1 → Type[0].
+ ∀a2:T2 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a1).
+ ∀T3:∀x0:T0. ∀p0:a0\ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6x0. ∀x1:T1 x0 p0.∀p1:\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 ?? T1 a1 ? p0 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 x1.
+ ∀x2:T2 x0 p0 x1 p1.\ 5A href="cic:/matita/basics/logic/R2.def(3)"\ 6R2\ 5/A\ 6 ???? T2 a2 x0 p0 ? p1 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 x2 → Type[0].
+ ∀a3:T3 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a1) a2 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? a2).
∀b0:T0.
- ∀e0:a0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b0.
+ ∀e0:a0 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 b0.
∀b1: T1 b0 e0.
- ∀e1:<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> ?? T1 a1 ? e0 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b1.
+ ∀e1:\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 ?? T1 a1 ? e0 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 b1.
∀b2: T2 b0 e0 b1 e1.
- ∀e2:<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> ???? T2 a2 b0 e0 ? e1 <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> b2.
+ ∀e2:\ 5A href="cic:/matita/basics/logic/R2.def(3)"\ 6R2\ 5/A\ 6 ???? T2 a2 b0 e0 ? e1 \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 b2.
T3 b0 e0 b1 e1 b2 e2.
#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #b0 #e0 #b1 #e1 #b2 #e2
-@(<A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A> ????? e2)
-@(<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> ?? ? ???? e0 ? e1)
+@(\ 5A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)"\ 6eq_rect_Type0\ 5/A\ 6 ????? e2)
+@(\ 5A href="cic:/matita/basics/logic/R2.def(3)"\ 6R2\ 5/A\ 6 ?? ? ???? e0 ? e1)
@a3
qed.
definition R4 :
∀T0:Type[0].
∀a0:T0.
- ∀T1:∀x0:T0. <A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> T0 a0 x0 → Type[0].
- ∀a1:T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0).
- ∀T2:∀x0:T0. ∀p0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 x0. ∀x1:T1 x0 p0.<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 x0 p0) x1 → Type[0].
- ∀a2:T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1).
- ∀T3:∀x0:T0. ∀p0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 x0 p0) x1.
- ∀x2:T2 x0 p0 x1 p1.<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T2 …) (<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2 → Type[0].
- ∀a3:T3 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)
- a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)) a2).
- ∀T4:∀x0:T0. ∀p0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 x0 p0) x1.
- ∀x2:T2 x0 p0 x1 p1.∀p2:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T2 …) (<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2.
- ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T3 …) (<A href="cic:/matita/basics/logic/R3.def(4)">R3</A> T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3.
+ ∀T1:∀x0:T0. \ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 T0 a0 x0 → Type[0].
+ ∀a1:T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0).
+ ∀T2:∀x0:T0. ∀p0:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T0 …) a0 x0. ∀x1:T1 x0 p0.\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T1 …) (\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 T0 a0 T1 a1 x0 p0) x1 → Type[0].
+ ∀a2:T2 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0)) a1).
+ ∀T3:∀x0:T0. ∀p0:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T1 …) (\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 T0 a0 T1 a1 x0 p0) x1.
+ ∀x2:T2 x0 p0 x1 p1.\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T2 …) (\ 5A href="cic:/matita/basics/logic/R2.def(3)"\ 6R2\ 5/A\ 6 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2 → Type[0].
+ ∀a3:T3 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0)) a1)
+ a2 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T2 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0)) a1)) a2).
+ ∀T4:∀x0:T0. ∀p0:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T0 …) a0 x0. ∀x1:T1 x0 p0.∀p1:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T1 …) (\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 T0 a0 T1 a1 x0 p0) x1.
+ ∀x2:T2 x0 p0 x1 p1.∀p2:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T2 …) (\ 5A href="cic:/matita/basics/logic/R2.def(3)"\ 6R2\ 5/A\ 6 T0 a0 T1 a1 T2 a2 x0 p0 x1 p1) x2.
+ ∀x3:T3 x0 p0 x1 p1 x2 p2.∀p3:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T3 …) (\ 5A href="cic:/matita/basics/logic/R3.def(4)"\ 6R3\ 5/A\ 6 T0 a0 T1 a1 T2 a2 T3 a3 x0 p0 x1 p1 x2 p2) x3.
Type[0].
- ∀a4:T4 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)
- a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)) a2)
- a3 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T3 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)
- a2 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T2 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0) a1 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> (T1 a0 (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> T0 a0)) a1)) a2))
+ ∀a4:T4 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0)) a1)
+ a2 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T2 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0)) a1)) a2)
+ a3 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T3 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0)) a1)
+ a2 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T2 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0) a1 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 (T1 a0 (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 T0 a0)) a1)) a2))
a3).
∀b0:T0.
- ∀e0:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T0 …) a0 b0.
+ ∀e0:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T0 …) a0 b0.
∀b1: T1 b0 e0.
- ∀e1:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T1 …) (<A href="cic:/matita/basics/logic/R1.def(2)">R1</A> T0 a0 T1 a1 b0 e0) b1.
+ ∀e1:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T1 …) (\ 5A href="cic:/matita/basics/logic/R1.def(2)"\ 6R1\ 5/A\ 6 T0 a0 T1 a1 b0 e0) b1.
∀b2: T2 b0 e0 b1 e1.
- ∀e2:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T2 …) (<A href="cic:/matita/basics/logic/R2.def(3)">R2</A> T0 a0 T1 a1 T2 a2 b0 e0 b1 e1) b2.
+ ∀e2:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T2 …) (\ 5A href="cic:/matita/basics/logic/R2.def(3)"\ 6R2\ 5/A\ 6 T0 a0 T1 a1 T2 a2 b0 e0 b1 e1) b2.
∀b3: T3 b0 e0 b1 e1 b2 e2.
- ∀e3:<A href="cic:/matita/basics/logic/eq.ind(1,0,2)">eq</A> (T3 …) (<A href="cic:/matita/basics/logic/R3.def(4)">R3</A> T0 a0 T1 a1 T2 a2 T3 a3 b0 e0 b1 e1 b2 e2) b3.
+ ∀e3:\ 5A href="cic:/matita/basics/logic/eq.ind(1,0,2)"\ 6eq\ 5/A\ 6 (T3 …) (\ 5A href="cic:/matita/basics/logic/R3.def(4)"\ 6R3\ 5/A\ 6 T0 a0 T1 a1 T2 a2 T3 a3 b0 e0 b1 e1 b2 e2) b3.
T4 b0 e0 b1 e1 b2 e2 b3 e3.
#T0 #a0 #T1 #a1 #T2 #a2 #T3 #a3 #T4 #a4 #b0 #e0 #b1 #e1 #b2 #e2 #b3 #e3
-@(<A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)">eq_rect_Type0</A> ????? e3)
-@(<A href="cic:/matita/basics/logic/R3.def(4)">R3</A> ????????? e0 ? e1 ? e2)
+@(\ 5A href="cic:/matita/basics/logic/eq_rect_Type0.fix(0,5,1)"\ 6eq_rect_Type0\ 5/A\ 6 ????? e3)
+@(\ 5A href="cic:/matita/basics/logic/R3.def(4)"\ 6R3\ 5/A\ 6 ????????? e0 ? e1 ? e2)
@a4
qed.
(* TODO concrete definition by means of proof irrelevance *)
-axiom streicherK : ∀T:Type[1].∀t:T.∀P:t <A title="leibnitz's equality" href="cic:/fakeuri.def(1)">=</A> t → Type[2].P (<A href="cic:/matita/basics/logic/eq.con(0,1,2)">refl</A> ? t) → ∀p.P p.
\ No newline at end of file
+axiom streicherK : ∀T:Type[1].∀t:T.∀P:t \ 5A title="leibnitz's equality" href="cic:/fakeuri.def(1)"\ 6=\ 5/A\ 6 t → Type[2].P (\ 5A href="cic:/matita/basics/logic/eq.con(0,1,2)"\ 6refl\ 5/A\ 6 ? t) → ∀p.P p.
(* XXX: update script -- currently to stdout *)
let origbuf = Ulexing.from_utf8_channel (open_in fname) in
let interpr = GrafiteDisambiguate.get_interpr status#disambiguate_db in
- ignore (SmallLexer.mk_small_printer interpr outch origbuf);
+ let outstr = ref "" in
+ ignore (SmallLexer.mk_small_printer interpr outstr origbuf);
+ Printf.fprintf outch "%s" !outstr;
asserted
(* MATITA 1.0: debbo fare time_travel sulla ng_library?
LexiconSync.time_travel
in aux 0 (aux_basic None None None)
let get_hot_spots s = get_hot_spots (Ulexing.from_utf8_string s)
+
+(*let xmarkup = "\005"
+let ymarkup = "\006"
+let regexp tag_cont = ident_letter | xml_digit | "_" | "-"
+let regexp tagname = ident_letter tag_cont*
+let regexp opentag = xmarkup tagname
+let regexp closetag = xmarkup "/" tagname ymarkup
+let regexp attribute = tagname "=" qstring
+
+let rec to_xy =
+ lexer
+ | closetag -> xmarkup ^ ymarkup ^ xmarkup ^ to_xy lexbuf
+ | opentag ->
+ let tag =
+ Ulexing.utf8_sub_lexeme lexbuf 1
+ (Ulexing.lexeme_length lexbuf - 1) in
+ xmarkup ^ ymarkup ^ tag ^ to_xy_inner lexbuf
+ | eof -> ""
+ | _ -> let lexeme = Ulexing.utf8_lexeme lexbuf in
+ prerr_endline ("matched " ^ lexeme); lexeme ^ to_xy lexbuf
+
+and to_xy_inner =
+ lexer
+ | utf8_blank+ -> to_xy_inner lexbuf
+ | attribute -> let lexeme = Ulexing.utf8_lexeme lexbuf in
+ ymarkup ^ lexeme ^ to_xy_inner lexbuf
+ | ">" -> xmarkup ^ to_xy lexbuf
+ | eof -> ""
+ | _ -> to_xy lexbuf
+;;
+
+let to_xy s = to_xy (Ulexing.from_utf8_string s)*)
val get_hot_spots : string ->
(Stdpp.location * Stdpp.location * string option * string option) list
-
(status, parsed_text, unparsed_txt'),"",(*parsed_text_len*)
utf8_length parsed_text
+(*let save_moo status =
+ let script = MatitaScript.current () in
+ let baseuri = status#baseuri in
+ match script#bos, script#eos with
+ | true, _ -> ()
+ | _, true ->
+ GrafiteTypes.Serializer.serialize ~baseuri:(NUri.uri_of_string baseuri)
+ status
+ | _ -> clean_current_baseuri status
+;;*)
+
let sequent_size = ref 40;;
let include_paths = ref [];;
String.concat "\n\n" txt
;; *)
+let html_of_matita s =
+ prerr_endline ("input: " ^ s);
+ let patt1 = Str.regexp "\005" in
+ let patt2 = Str.regexp "\006" in
+ let patt3 = Str.regexp "<" in
+ let patt4 = Str.regexp ">" in
+ let res = Str.global_replace patt4 ">" s in
+ prerr_endline ("output: " ^ res);
+ let res = Str.global_replace patt3 "<" res in
+ prerr_endline ("output: " ^ res);
+ let res = Str.global_replace patt2 ">" res in
+ prerr_endline ("output: " ^ res);
+ let res = Str.global_replace patt1 "<" res in
+ prerr_endline ("output: " ^ res);
+ res
+;;
+
let heading_nl_RE = Pcre.regexp "^\\s*\n\\s*";;
let first_line s =
let filename = libdir uid ^ "/" ^ (cgi # argument_value "file") in
prerr_endline ("reading file " ^ filename);
let body =
- Netencoding.Html.encode ~in_enc:`Enc_utf8 ~prefer_name:false ()
- (read_file filename) in
+ (* Netencoding.Html.encode ~in_enc:`Enc_utf8 ~prefer_name:false ()
+ (html_of_matita (read_file filename)) in
+ *)
+ html_of_matita (read_file filename) in
prerr_endline ("sending:\nBEGIN\n" ^ body ^ "\nEND");
let body = "<file>" ^ body ^ "</file>" in
let baseuri, incpaths =
let advance0 sid text =
let status = MatitaAuthentication.get_status sid in
let history = MatitaAuthentication.get_history sid in
+ let status = status#reset_disambiguate_db () in
let (st,new_statements,new_unparsed),(* newtext TODO *) _,parsed_len =
- (* try *)
+ try
eval_statement !include_paths (*buffer*) status (`Raw text)
- (* with End_of_file -> raise Margin *)
+ with
+ | HExtlib.Localized (_,e) -> raise e
+ (*| End_of_file -> raise Margin *)
in
+ let stringbuf = Ulexing.from_utf8_string new_statements in
+ let interpr = GrafiteDisambiguate.get_interpr st#disambiguate_db in
+ let outstr = ref "" in
+ ignore (SmallLexer.mk_small_printer interpr outstr stringbuf);
+ prerr_endline ("parser output: " ^ !outstr);
MatitaAuthentication.set_status sid st;
MatitaAuthentication.set_history sid (st::history);
- parsed_len, new_unparsed, st
+ parsed_len,
+ Netencoding.Html.encode ~in_enc:`Enc_utf8 ~prefer_name:false
+ () (html_of_matita !outstr), new_unparsed, st
let register (cgi : Netcgi1_compat.Netcgi_types.cgi_activation) =
let cgi = Netcgi1_compat.Netcgi_types.of_compat_activation cgi in
();
let text = cgi#argument_value "body" in
prerr_endline ("body =\n" ^ text);
- let parsed_len, new_unparsed, new_status = advance0 sid text in
+ let parsed_len, new_parsed, new_unparsed, new_status = advance0 sid text in
let txt = output_status new_status in
let body =
- "<response><parsed length=\"" ^ (string_of_int parsed_len) ^ "\" />" ^ txt
+ "<response><parsed length=\"" ^ (string_of_int parsed_len) ^ "\">" ^
+ new_parsed ^ "</parsed>" ^ txt
^ "</response>"
in
prerr_endline ("sending advance response:\n" ^ body);
let sid = Uuidm.of_string (Netcgi.Cookie.value (env#cookie "session")) in
let sid = HExtlib.unopt sid in
- let rec aux parsed_len text =
+ let rec aux parsed_len parsed_txt text =
try
prerr_endline ("evaluating: " ^ first_line text);
- let plen,new_unparsed,_new_status = advance0 sid text in
- aux (parsed_len+plen) new_unparsed
+ let plen,new_parsed,new_unparsed,_new_status = advance0 sid text in
+ aux (parsed_len+plen) (parsed_txt ^ new_parsed) new_unparsed
with
- | _ -> parsed_len
+ | End_of_file ->
+ let status = MatitaAuthentication.get_status sid in
+ GrafiteTypes.Serializer.serialize
+ ~baseuri:(NUri.uri_of_string status#baseuri) status;
+ parsed_len, parsed_txt
+ | _ -> parsed_len, parsed_txt
in
cgi # set_header
~cache:`No_cache
();
let text = cgi#argument_value "body" in
prerr_endline ("body =\n" ^ text);
- let parsed_len = aux 0 text in
+ let parsed_len, new_parsed = aux 0 "" text in
let status = MatitaAuthentication.get_status sid in
let txt = output_status status in
let body =
- "<response><parsed length=\"" ^ (string_of_int parsed_len) ^ "\" />" ^ txt
+ "<response><parsed length=\"" ^ (string_of_int parsed_len) ^ "\">" ^
+ new_parsed ^ "</parsed>" ^ txt
^ "</response>"
in
+ (*let body =
+ "<response><parsed length=\"" ^ (string_of_int parsed_len) ^ "\" />" ^ txt
+ ^ "</response>"
+ in*)
prerr_endline ("sending goto bottom response:\n" ^ body);
cgi#out_channel#output_string body
with Not_found -> cgi#set_header ~status:`Internal_server_error
return (result);
}
-String.prototype.unescapeHTML = function()
+String.prototype.html_to_matita = function()
{
var patt1 = /<br(\/|)>/gi;
- var patt2 = /</gi;
- var patt3 = />/gi;
+ var patt2 = /</gi
+ var patt3 = />/gi
+ var patt4 = /</gi;
+ var patt5 = />/gi;
var result = this;
result = result.replace(patt1,"\n");
- result = result.replace(patt2,"<");
- result = result.replace(patt3,">");
+ result = result.replace(patt2,"\005");
+ result = result.replace(patt3,"\006");
+ result = result.replace(patt4,"<");
+ result = result.replace(patt5,">");
+ return (unescape(result));
+}
+
+String.prototype.matita_to_html = function()
+{
+ var patt1 = /</gi
+ var patt2 = />/gi
+ var patt3 = /\005/gi;
+ var patt4 = /\006/gi;
+ var result = this;
+ result = result.replace(patt1,"<");
+ result = result.replace(patt2,">");
+ result = result.replace(patt3,"<");
+ result = result.replace(patt4,">");
return (unescape(result));
}
processor = function(xml) {
if (is_defined(xml)) {
// debug("advance: received response\nBEGIN\n" + req.responseText + "\nEND");
- len = parseInt(xml.getElementsByTagName("parsed")[0].getAttribute("length"));
+ parsed = xml.getElementsByTagName("parsed")[0];
+ len = parseInt(parsed.getAttribute("length"));
len0 = unlocked.innerHTML.length;
- unescaped = unlocked.innerHTML.unescapeHTML();
- parsedtxt = unescaped.substr(0,len);
+ unescaped = unlocked.innerHTML.html_to_matita();
+ parsedtxt = parsed.childNodes[0].nodeValue;
+ //parsedtxt = unescaped.substr(0,len);
unparsedtxt = unescaped.substr(len);
- locked.innerHTML = locked.innerHTML + parsedtxt;
- unlocked.innerHTML = unparsedtxt;
+ locked.innerHTML = locked.innerHTML + parsedtxt; //.matita_to_html();
+ unlocked.innerHTML = unparsedtxt.matita_to_html();
len1 = unlocked.innerHTML.length;
len2 = len0 - len1;
metasenv = xml.getElementsByTagName("meta");
resume();
};
pause();
- callServer("advance",processor,"body=" + (unlocked.innerHTML.unescapeHTML()).sescape());
+ callServer("advance",processor,"body=" + (unlocked.innerHTML.html_to_matita()).sescape());
}
processor = function(xml) {
if (is_defined(xml)) {
// debug("goto bottom: received response\nBEGIN\n" + req.responseText + "\nEND");
- len = parseInt(xml.getElementsByTagName("parsed")[0].getAttribute("length"));
+ parsed = xml.getElementsByTagName("parsed")[0];
+ len = parseInt(parsed.getAttribute("length"));
len0 = unlocked.innerHTML.length;
- unescaped = unlocked.innerHTML.unescapeHTML();
- parsedtxt = unescaped.substr(0,len);
+ unescaped = unlocked.innerHTML.html_to_matita();
+ parsedtxt = parsed.childNodes[0].nodeValue;
+ //parsedtxt = unescaped.substr(0,len);
unparsedtxt = unescaped.substr(len);
- locked.innerHTML = locked.innerHTML + parsedtxt;
- unlocked.innerHTML = unparsedtxt;
+ locked.innerHTML = locked.innerHTML + parsedtxt; //.matita_to_html();
+ unlocked.innerHTML = unparsedtxt.matita_to_html();
len1 = unlocked.innerHTML.length;
len2 = len0 - len1;
metasenv = xml.getElementsByTagName("meta");
resume();
};
pause();
- callServer("bottom",processor,"body=" + (unlocked.innerHTML.unescapeHTML()).sescape());
+ callServer("bottom",processor,"body=" + (unlocked.innerHTML.html_to_matita()).sescape());
}
}
processor = function(xml) {
if (is_defined(xml)) {
- // debug("goto pos: received response\nBEGIN\n" + req.responseText + "\nEND");
- len = parseInt(xml.getElementsByTagName("parsed")[0].getAttribute("length"));
+ parsed = xml.getElementsByTagName("parsed")[0];
+ len = parseInt(parsed.getAttribute("length"));
len0 = unlocked.innerHTML.length;
- unescaped = unlocked.innerHTML.unescapeHTML();
- parsedtxt = unescaped.substr(0,len);
+ unescaped = unlocked.innerHTML.html_to_matita();
+ parsedtxt = parsed.childNodes[0].nodeValue;
+ //parsedtxt = unescaped.substr(0,len);
unparsedtxt = unescaped.substr(len);
- locked.innerHTML = locked.innerHTML + parsedtxt;
- unlocked.innerHTML = unparsedtxt;
+ locked.innerHTML = locked.innerHTML + parsedtxt; //.matita_to_html();
+ unlocked.innerHTML = unparsedtxt.matita_to_html();
len1 = unlocked.innerHTML.length;
len2 = len0 - len1;
metasenv = xml.getElementsByTagName("meta");
}
}
pause();
- callServer("advance",processor,"body=" + (unlocked.innerHTML.unescapeHTML()).sescape());
+ callServer("advance",processor,"body=" + (unlocked.innerHTML.html_to_matita()).sescape());
}
function retract()
{
if (is_defined(xml)) {
locked.innerHTML = "";
+ debug(xml.documentElement.textContent);
unlocked.innerHTML = xml.documentElement.textContent;
} else {
debug("file open failed");