-requires="str unix camlp5.gramlib"
+requires="str unix camlp5.gramlib netstring"
version="0.0.1"
archive(byte)="extlib.cma"
archive(native)="extlib.cmxa"
let regexp hrefclose = xmarkup "/" [ 'A' 'a' ] ymarkup
let regexp tag_cont = ident_letter | xml_digit | "_" | "-"
-let regexp tagname = ident_letter tag_cont*
-let regexp opentag = xmarkup tagname
-let regexp closetag = xmarkup "/" tagname ymarkup
-let regexp attribute = tagname "=" qstring
+let regexp gtagname = [ 'B' - 'Z' 'b' - 'z' ] | ident_letter tag_cont+
+let regexp attrname = ident_letter tag_cont*
+let regexp gopentag = xmarkup gtagname
+let regexp closetag = xmarkup "/" gtagname ymarkup
+let regexp attribute = attrname "=" qstring
let regexp generictag =
- opentag (utf8_blank+ attribute)* ymarkup
+ gopentag (utf8_blank+ attribute)* ymarkup
let regexp tex_token = '\\' ident
if desc <> None || href <> None
then
(let s,e = HExtlib.loc_of_floc loc in
+
prerr_endline (Printf.sprintf "*** [%d,%d] \"%s\",\"%s\""
s e (so_pp href) (so_pp desc));
+
+ Printf.printf "*** [%d,%d] \"%s\",\"%s\""
+ s e (so_pp href) (so_pp desc);
LocalizeEnv.add loc (href,desc) loctable)
else loctable
;;
let level2_ast_token loctable status =
- let rec aux desc href =
+ let rec aux desc href in_tag =
lexer
| let_rec -> return lexbuf ("LETREC","")
| let_corec -> return lexbuf ("LETCOREC","")
| we_proved -> return lexbuf ("WEPROVED","")
| we_have -> return lexbuf ("WEHAVE","")
- | utf8_blank+ -> ligatures_token (aux desc href) lexbuf
+ | utf8_blank+ -> ligatures_token (aux desc href in_tag) lexbuf
| meta ->
let s = Ulexing.utf8_lexeme lexbuf in
return lexbuf ("META", String.sub s 1 (String.length s - 1))
| implicit -> return lexbuf ("IMPLICIT", "")
| placeholder -> return lexbuf ("PLACEHOLDER", "")
- | hreftag -> aux_in_tag None None lexbuf
- | hrefclose -> aux None None lexbuf
+ | hreftag -> aux_attr None None lexbuf
+ | hrefclose -> return lexbuf ("LEXING_ERROR", Ulexing.utf8_lexeme lexbuf)
+ (* ignore other tags *)
| generictag
- | closetag -> ligatures_token (aux desc href) lexbuf
- | ident -> loctable :=
- update_table (loc_of_buf lexbuf) desc href !loctable;
- handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT"
+ | closetag -> ligatures_token (aux desc href in_tag) lexbuf
+ | ident -> if in_tag then
+ aux_close_tag desc href ("IDENT", Ulexing.utf8_lexeme lexbuf) lexbuf
+ else handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT"
| variable_ident ->
return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
- | pident -> loctable :=
- update_table (loc_of_buf lexbuf) desc href !loctable;
- handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT"
- | number -> loctable :=
- update_table (loc_of_buf lexbuf) desc href !loctable;
- return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
- | tex_token -> loctable :=
- update_table (loc_of_buf lexbuf) desc href !loctable;
- return lexbuf (expand_macro lexbuf)
+ | pident -> if in_tag then
+ aux_close_tag desc href ("PIDENT", Ulexing.utf8_lexeme lexbuf) lexbuf
+ else handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT"
+ | number -> let token = "NUMBER", Ulexing.utf8_lexeme lexbuf
+ in
+ if in_tag then
+ aux_close_tag desc href token lexbuf
+ else return lexbuf token
+ | tex_token -> let token = expand_macro lexbuf
+ in
+ if in_tag then
+ aux_close_tag desc href token lexbuf
+ else return lexbuf token
| nreference -> return lexbuf ("NREF", Ulexing.utf8_lexeme lexbuf)
| uri -> return lexbuf ("URI", Ulexing.utf8_lexeme lexbuf)
| qstring ->
Ulexing.utf8_sub_lexeme lexbuf 2 (Ulexing.lexeme_length lexbuf - 4)
in
return lexbuf ("NOTE", comment) *)
- ligatures_token (aux desc href) lexbuf
+ ligatures_token (aux desc href in_tag) lexbuf
| begincomment -> return lexbuf ("BEGINCOMMENT","")
| endcomment -> return lexbuf ("ENDCOMMENT","")
| eof -> return_eoi lexbuf
- | _ -> loctable :=
- update_table (loc_of_buf lexbuf) desc href !loctable;
- return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
-
- and aux_in_tag desc href = lexer
- | utf8_blank+ -> ligatures_token (aux_in_tag desc href) lexbuf
+ | _ -> let token = "SYMBOL", (Ulexing.utf8_lexeme lexbuf)
+ in
+ if in_tag then
+ aux_close_tag desc href token lexbuf
+ else return lexbuf token
+
+ and aux_attr desc href = lexer
+ | utf8_blank+ -> ligatures_token (aux_attr desc href) lexbuf
| href ->
- aux_in_tag desc
+ aux_attr desc
(Some (Ulexing.utf8_sub_lexeme lexbuf 6 (Ulexing.lexeme_length lexbuf - 7)))
lexbuf
| hreftitle ->
- aux_in_tag
+ aux_attr
(Some (Ulexing.utf8_sub_lexeme lexbuf 7 (Ulexing.lexeme_length lexbuf - 8)))
href lexbuf
- | ">" -> aux desc href lexbuf
- | _ -> aux None None lexbuf
- in aux None None
+ | ymarkup -> aux desc href true lexbuf
+ | _ -> return lexbuf ("LEXING_ERROR", Ulexing.utf8_lexeme lexbuf)
+
+(* and aux_in_tag desc href = lexer
+ | ident -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ handle_keywords lexbuf (fun x -> StringSet.mem x status) "IDENT"
+ | variable_ident ->
+ return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
+ | pident -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ handle_keywords lexbuf (fun x -> StringSet.mem x status) "PIDENT"
+ | number -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ return lexbuf ("NUMBER", Ulexing.utf8_lexeme lexbuf)
+ | tex_token -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ return lexbuf (expand_macro lexbuf)
+ | _ -> loctable :=
+ update_table (loc_of_buf lexbuf) desc href !loctable;
+ return_symbol lexbuf (Ulexing.utf8_lexeme lexbuf)
+ *)
+ and aux_close_tag desc href token = lexer
+ | hrefclose -> loctable := update_table (loc_of_buf lexbuf) desc href !loctable;
+ return lexbuf token
+ | _ -> return lexbuf ("LEXING_ERROR", Ulexing.utf8_lexeme lexbuf)
+ in aux None None false
let rec level1_pattern_token =
lexer
| hrefclose -> return lexbuf ("ATAGEND","")
| generictag
| closetag -> ligatures_token level1_pattern_token lexbuf
- | ident ->handle_keywords lexbuf (fun x -> List.mem x level1_keywords) "IDENT"
+ | ident -> handle_keywords lexbuf (fun x -> List.mem x level1_keywords) "IDENT"
| variable_ident -> return lexbuf ("IDENT", Ulexing.utf8_lexeme lexbuf)
| pident->handle_keywords lexbuf (fun x->List.mem x level1_keywords) "PIDENT"
| color -> return lexbuf ("COLOR", Ulexing.utf8_lexeme lexbuf)
let uri,_ = CicNotationLexer.LocalizeEnv.find loc
!loctable in
match uri with
- | Some u -> id, `Uri u
- | None -> id, `Ambiguous
+ | Some u ->
+ prerr_endline ("trovata interpretazione per " ^ id ^ ": " ^ u);
+ id, `Uri u
+ | None ->
+ prerr_endline ("identificatore ambiguo: " ^ id);
+ id, `Ambiguous
with
- | Not_found -> id, `Ambiguous ]];
+ | Not_found ->
+ prerr_endline ("identificatore non trovato: " ^ id);
+ id, `Ambiguous ]];
+ gnum: [
+ [ n = NUMBER ->
+ try
+ match CicNotationLexer.LocalizeEnv.find loc !loctable with
+ | _uri, Some interpr -> n, Some (Some "cic:/fakeuri.def(1)",interpr)
+ | _ -> n,None
+ with
+ | Not_found -> n,None ]];
arg: [
[ LPAREN; names = LIST1 gident SEP SYMBOL ",";
SYMBOL ":"; ty = term; RPAREN ->
| u = URI -> return_term loc (Ast.Ident
(NUri.name_of_uri (NUri.uri_of_string u), `Uri u))
| r = NREF -> return_term loc (Ast.NRef (NReference.reference_of_string r))
- | n = NUMBER -> return_term loc (Ast.Num (n, None))
+ | (n,interpr) = gnum -> return_term loc (Ast.Num (n, interpr))
| IMPLICIT -> return_term loc (Ast.Implicit `JustOne)
| SYMBOL <:unicode<ldots>> -> return_term loc (Ast.Implicit `Vector)
| PLACEHOLDER -> return_term loc Ast.UserInput
| _, [] -> assert false
;;
+let utf8_parsed_text s floc =
+ let start, stop = loc_of_floc floc in
+ let len = stop - start in
+ let res = Netconversion.ustring_sub `Enc_utf8 start len s in
+ res, String.length res
+
val touch: string -> unit
val profiling_enabled: bool ref
+
+val utf8_parsed_text : string -> Stdpp.location -> string * int
module NRef = NReference
let debug_print s = prerr_endline (Lazy.force s);;
-let debug_print _ = ();;
+(* let debug_print _ = ();; *)
let cic_name_of_name = function
| Ast.Ident (n,_) -> n
matitaInit.mli \
$(NULL)
WMLI = \
- matitaScriptLexer.mli \
matitaTypes.mli \
matitaMisc.mli \
applyTransformation.mli \
title="Execute the script until the current position of the cursor."></A>
<A href="javascript:gotoBottom()"><IMG src="icons/bottom.png"
id="bottom" alt="Bottom" title="Execute the whole script."></A>
- <INPUT type="BUTTON" value="Browse library" id="showdialog" ONCLICK="showLibrary()"></p>
+ <INPUT type="BUTTON" value="Browse library" id="showdialog" ONCLICK="showLibrary()">
+ <INPUT type="BUTTON" value="Save" id="savebutton" ONCLICK="saveFile()">
+ <INPUT type="BUTTON" value="Commit" id="savebutton" ONCLICK="commitAll()"></p>
<!-- <INPUT type="BUTTON" value="show sequent" id="showseq" ONCLICK="showSequent()">
<INPUT type="BUTTON" value="hide sequent" id="hideseq" ONCLICK="hideSequent()">
<INPUT type="BUTTON" value="selection test" id="hideseq" ONCLICK="test()"> -->
end
;;
+let eos status s =
+ let only_dust_RE = Pcre.regexp "^(\\s|\n|%%[^\n]*\n)*$" in
+ let rec is_there_only_comments status s =
+ if Pcre.pmatch ~rex:only_dust_RE s then raise End_of_file;
+ let strm =
+ GrafiteParser.parsable_statement status
+ (Ulexing.from_utf8_string s) in
+ match GrafiteParser.parse_statement status strm with
+ | GrafiteAst.Comment (loc,_) ->
+ let _,parsed_text_length = HExtlib.utf8_parsed_text s loc in
+ (* CSC: why +1 in the following lines ???? *)
+ let parsed_text_length = parsed_text_length + 1 in
+ let remain_len = String.length s - parsed_text_length in
+ let next = String.sub s parsed_text_length remain_len in
+ is_there_only_comments status next
+ | GrafiteAst.Executable _ -> false
+ in
+ try is_there_only_comments status s
+ with
+ | NCicLibrary.IncludedFileNotCompiled _
+ | HExtlib.Localized _
+ | CicNotationParser.Parse_error _ -> false
+ | End_of_file -> true
+ | Invalid_argument "Array.make" -> false
+;;
+
+
let assert_ng ~include_paths ?outch mapath =
snd (assert_ng ~include_paths ~already_included:[] ~compiling:[] ~asserted:[]
?outch mapath)
GrafiteTypes.status
val assert_ng: include_paths:string list -> ?outch:out_channel -> string -> bool
+
+val eos : GrafiteTypes.status -> string -> bool
exception SvnError of string;;
+let mutex = Mutex.create ();;
+
+let to_be_committed = ref [];;
+
let exec_process cmd =
let (stdout, stdin, stderr) as chs = Unix.open_process_full cmd [||] in
let outlines = ref [] in
errno, output ^ "\n\n" ^ errors
| _ -> assert false))
+(* this should be executed only for a freshly created user
+ * so no CS is needed *)
let checkout user =
let rt_dir = Helm_registry.get "matita.rt_base_dir" in
let repo = Helm_registry.get "matita.weblib" in
let to_be_removed = (Helm_registry.get "matita.rt_base_dir") ^ "/users/*" in
ignore (Sys.command ("rm -rf " ^ to_be_removed))
;;
+
+(* adds a user to the commit queue; concurrent instances possible, so we
+ * enclose the update in a CS
+ *)
+let add_user uid =
+ Mutex.lock mutex;
+ to_be_committed := uid::List.filter (fun x -> x <> uid) !to_be_committed;
+ Mutex.unlock mutex;
+;;
+
+(* this function and the next one should only be called by the server itself (or
+ * the admin) at a scheduled time, so no concurrent instances and no CS needed
+ * also, svn should already be safe as far as concurrency is concerned *)
+let commit user =
+ let rt_dir = Helm_registry.get "matita.rt_base_dir" in
+ let repo = Helm_registry.get "matita.weblib" in
+
+ let errno, outstr = exec_process
+ ("svn ci --message \"commit by user " ^ user ^ "\" " ^ rt_dir ^ "/users/" ^ user ^ "/")
+ in
+ if errno = 0 then ()
+ else raise (SvnError outstr)
+
+let do_global_commit () =
+ prerr_endline ("to be committed: " ^ String.concat " " !to_be_committed);
+ List.fold_left
+ (fun acc u ->
+ try
+ commit u;
+ acc
+ with
+ | SvnError outstr ->
+ prerr_endline outstr;
+ u::acc)
+ [] (List.rev !to_be_committed)
+;;
val html_of_library : string -> string
val reset_lib : unit -> unit
+
+(* val commit : string -> unit *)
+
+val add_user : string -> unit
+
+val do_global_commit : unit -> string list
| `Ast (st, text) -> st, text
in
let text_of_loc floc =
- let nonskipped_txt,_ = MatitaGtkMisc.utf8_parsed_text unparsed_text floc in
+ let nonskipped_txt,_ = HExtlib.utf8_parsed_text unparsed_text floc in
let start, stop = HExtlib.loc_of_floc floc in
let floc = HExtlib.floc_of_loc (0, start) in
- let skipped_txt,_ = MatitaGtkMisc.utf8_parsed_text unparsed_text floc in
+ let skipped_txt,_ = HExtlib.utf8_parsed_text unparsed_text floc in
let floc = HExtlib.floc_of_loc (0, stop) in
- let txt,len = MatitaGtkMisc.utf8_parsed_text unparsed_text floc in
+ let txt,len = HExtlib.utf8_parsed_text unparsed_text floc in
txt,nonskipped_txt,skipped_txt,len
in
match st with
| _::[] -> true
| _ -> false
- method eos =
- let rec is_there_only_comments status s =
- if Pcre.pmatch ~rex:only_dust_RE s then raise Margin;
- let strm =
- GrafiteParser.parsable_statement status
- (Ulexing.from_utf8_string s)in
- match GrafiteParser.parse_statement status strm with
- | GrafiteAst.Comment (loc,_) ->
- let _,parsed_text_length = MatitaGtkMisc.utf8_parsed_text s loc in
- (* CSC: why +1 in the following lines ???? *)
- let parsed_text_length = parsed_text_length + 1 in
- let remain_len = String.length s - parsed_text_length in
- let next = String.sub s parsed_text_length remain_len in
- is_there_only_comments status next
- | GrafiteAst.Executable _ -> false
- in
- try is_there_only_comments self#status self#getFuture
- with
- | NCicLibrary.IncludedFileNotCompiled _
- | HExtlib.Localized _
- | CicNotationParser.Parse_error _ -> false
- | Margin | End_of_file -> true
- | Invalid_argument "Array.make" -> false
+ method eos = MatitaEngine.eos self#status self#getFuture
(* debug *)
method dump () =
let utf8_length = Netconversion.ustring_length `Enc_utf8
-let utf8_parsed_text s floc =
- let start, stop = HExtlib.loc_of_floc floc in
- let len = stop - start in
- let res = Netconversion.ustring_sub `Enc_utf8 start len s in
- res, String.length res
-
(*** from matitaScript.ml ***)
(* let only_dust_RE = Pcre.regexp "^(\\s|\n|%%[^\n]*\n)*$" *)
let _,lend = HExtlib.loc_of_floc floc in
let parsed_text, _parsed_text_len =
- utf8_parsed_text unparsed_text (HExtlib.floc_of_loc (0,lend)) in
+ HExtlib.utf8_parsed_text unparsed_text (HExtlib.floc_of_loc (0,lend)) in
let byte_parsed_text_len = String.length parsed_text in
let unparsed_txt' =
String.sub unparsed_text byte_parsed_text_len
let sid = Uuidm.of_string (Netcgi.Cookie.value (env#cookie "session")) in
let sid = HExtlib.unopt sid in
let uid = MatitaAuthentication.user_of_session sid in
+ (*
cgi # set_header
~cache:`No_cache
~content_type:"text/xml; charset=\"utf-8\""
();
+ *)
let filename = libdir uid ^ "/" ^ (cgi # argument_value "file") in
prerr_endline ("reading file " ^ filename);
let body =
include_paths := incpaths;
let status = MatitaAuthentication.get_status sid in
MatitaAuthentication.set_status sid (status#set_baseuri baseuri);
+ cgi # set_header
+ ~cache:`No_cache
+ ~content_type:"text/xml; charset=\"utf-8\""
+ ();
cgi#out_channel#output_string body;
with
| Not_found _ ->
cgi # set_header
~status:`Internal_server_error
~cache:`No_cache
- ~content_type:"text/xml; charset=\"utf-8\""
+ ~content_type:"text/html; charset=\"utf-8\""
());
cgi#out_channel#commit_work()
;;
let advance0 sid text =
let status = MatitaAuthentication.get_status sid in
- let history = MatitaAuthentication.get_history sid in
let status = status#reset_disambiguate_db () in
let (st,new_statements,new_unparsed),(* newtext TODO *) _,parsed_len =
try
ignore (SmallLexer.mk_small_printer interpr outstr stringbuf);
prerr_endline ("parser output: " ^ !outstr);
MatitaAuthentication.set_status sid st;
- MatitaAuthentication.set_history sid (st::history);
parsed_len,
Netencoding.Html.encode ~in_enc:`Enc_utf8 ~prefer_name:false
() (html_of_matita !outstr), new_unparsed, st
let text = read_file (rt_path () ^ "/logout.html") in
cgi#out_channel#output_string text
with
+ | Not_found _ ->
+ cgi # set_header
+ ~status:`Internal_server_error
+ ~cache:`No_cache
+ ~content_type:"text/html; charset=\"utf-8\""
+ ());
+ cgi#out_channel#commit_work()
+;;
+
+let save (cgi : Netcgi1_compat.Netcgi_types.cgi_activation) =
+ let cgi = Netcgi1_compat.Netcgi_types.of_compat_activation cgi in
+ let env = cgi#environment in
+ (try
+ let sid = Uuidm.of_string (Netcgi.Cookie.value (env#cookie "session")) in
+ let sid = HExtlib.unopt sid in
+ let status = MatitaAuthentication.get_status sid in
+ let uid = MatitaAuthentication.user_of_session sid in
+ assert (cgi#arguments <> []);
+ let locked = cgi#argument_value "locked" in
+ let unlocked = cgi#argument_value "unlocked" in
+ let filename = libdir uid ^ "/" ^ (cgi # argument_value "file") in
+ prerr_endline ("Matita will save the file for user " ^ uid);
+ let oc = open_out filename in
+ output_string oc (locked ^ unlocked);
+ close_out oc;
+ if MatitaEngine.eos status unlocked then
+ begin
+ prerr_endline ("serializing proof objects...");
+ GrafiteTypes.Serializer.serialize
+ ~baseuri:(NUri.uri_of_string status#baseuri) status;
+ prerr_endline ("adding to the commit queue...");
+ MatitaFilesystem.add_user uid;
+ prerr_endline ("done.");
+ end;
+ cgi # set_header
+ ~cache:`No_cache
+ ~content_type:"text/xml; charset=\"utf-8\""
+ ();
+ cgi#out_channel#output_string "<response>ok</response>"
+ with
+ | Not_found _ ->
+ cgi # set_header
+ ~status:`Internal_server_error
+ ~cache:`No_cache
+ ~content_type:"text/xml; charset=\"utf-8\""
+ ());
+ cgi#out_channel#commit_work()
+;;
+
+let initiate_commit (cgi : Netcgi1_compat.Netcgi_types.cgi_activation) =
+ let cgi = Netcgi1_compat.Netcgi_types.of_compat_activation cgi in
+ let env = cgi#environment in
+ (try
+ let errors = MatitaFilesystem.do_global_commit () in
+ prerr_endline ("commit errors: " ^ (String.concat " " errors));
+ cgi # set_header
+ ~cache:`No_cache
+ ~content_type:"text/xml; charset=\"utf-8\""
+ ();
+ cgi#out_channel#output_string "<response>ok</response>"
+ with
| Not_found _ ->
cgi # set_header
~status:`Internal_server_error
(try
let sid = Uuidm.of_string (Netcgi.Cookie.value (env#cookie "session")) in
let sid = HExtlib.unopt sid in
+ (*
cgi # set_header
~cache:`No_cache
~content_type:"text/xml; charset=\"utf-8\""
();
+ *)
let text = cgi#argument_value "body" in
prerr_endline ("body =\n" ^ text);
+ let history = MatitaAuthentication.get_history sid in
let parsed_len, new_parsed, new_unparsed, new_status = advance0 sid text in
+ MatitaAuthentication.set_history sid (new_status::history);
let txt = output_status new_status in
let body =
"<response><parsed length=\"" ^ (string_of_int parsed_len) ^ "\">" ^
^ "</response>"
in
prerr_endline ("sending advance response:\n" ^ body);
+ cgi # set_header
+ ~cache:`No_cache
+ ~content_type:"text/xml; charset=\"utf-8\""
+ ();
cgi#out_channel#output_string body
with
| Not_found _ ->
(try
let sid = Uuidm.of_string (Netcgi.Cookie.value (env#cookie "session")) in
let sid = HExtlib.unopt sid in
+ let history = MatitaAuthentication.get_history sid in
let rec aux parsed_len parsed_txt text =
try
let status = MatitaAuthentication.get_status sid in
GrafiteTypes.Serializer.serialize
~baseuri:(NUri.uri_of_string status#baseuri) status;
+ if parsed_len > 0 then
+ MatitaAuthentication.set_history sid (status::history);
parsed_len, parsed_txt
| _ -> parsed_len, parsed_txt
- in
+ in
+ (*
cgi # set_header
~cache:`No_cache
~content_type:"text/xml; charset=\"utf-8\""
();
+ *)
let text = cgi#argument_value "body" in
prerr_endline ("body =\n" ^ text);
let parsed_len, new_parsed = aux 0 "" text in
^ "</response>"
in*)
prerr_endline ("sending goto bottom response:\n" ^ body);
+ cgi # set_header
+ ~cache:`No_cache
+ ~content_type:"text/xml; charset=\"utf-8\""
+ ();
cgi#out_channel#output_string body
with Not_found -> cgi#set_header ~status:`Internal_server_error
~cache:`No_cache
let retract (cgi : Netcgi1_compat.Netcgi_types.cgi_activation) =
let cgi = Netcgi1_compat.Netcgi_types.of_compat_activation cgi in
let env = cgi#environment in
+ prerr_endline "executing retract";
(try
let sid = Uuidm.of_string (Netcgi.Cookie.value (env#cookie "session")) in
let sid = HExtlib.unopt sid in
+ (*
cgi # set_header
~cache:`No_cache
~content_type:"text/xml; charset=\"utf-8\""
();
+ *)
let history = MatitaAuthentication.get_history sid in
let new_history,new_status =
match history with
history, status
| [_] -> (prerr_endline "singleton";failwith "retract")
| _ -> (prerr_endline "nil"; assert false) in
+ prerr_endline "before time_travel";
NCicLibrary.time_travel new_status;
+ prerr_endline "after time travel";
MatitaAuthentication.set_history sid new_history;
MatitaAuthentication.set_status sid new_status;
prerr_endline ("after retract history.length = " ^
string_of_int (List.length new_history));
let body = output_status new_status in
+ cgi # set_header
+ ~cache:`No_cache
+ ~content_type:"text/xml; charset=\"utf-8\""
+ ();
cgi#out_channel#output_string body
with _ -> cgi#set_header ~status:`Internal_server_error
~cache:`No_cache
let sid = Uuidm.of_string (Netcgi.Cookie.value (env#cookie "session")) in
let sid = HExtlib.unopt sid in
+ (*
cgi # set_header
~cache:`No_cache
~content_type:"text/html; charset=\"utf-8\""
();
+ *)
let uid = MatitaAuthentication.user_of_session sid in
let html = MatitaFilesystem.html_of_library uid in
+ cgi # set_header
+ ~cache:`No_cache
+ ~content_type:"text/html; charset=\"utf-8\""
+ ();
cgi#out_channel#output_string
((*
"<html><head>\n" ^
dyn_translator = (fun _ -> ""); (* not needed *)
dyn_accept_all_conditionals = false;
} in
+ let do_save =
+ { Nethttpd_services.dyn_handler = (fun _ -> save);
+ dyn_activation = Nethttpd_services.std_activation `Std_activation_buffered;
+ dyn_uri = None; (* not needed *)
+ dyn_translator = (fun _ -> ""); (* not needed *)
+ dyn_accept_all_conditionals = false;
+ } in
+ let do_commit =
+ { Nethttpd_services.dyn_handler = (fun _ -> initiate_commit);
+ dyn_activation = Nethttpd_services.std_activation `Std_activation_buffered;
+ dyn_uri = None; (* not needed *)
+ dyn_translator = (fun _ -> ""); (* not needed *)
+ dyn_accept_all_conditionals = false;
+ } in
let nethttpd_factory =
Nethttpd_plex.nethttpd_factory
; "login", do_login
; "logout", do_logout
; "reset", do_resetlib
- ; "viewlib", do_viewlib]
+ ; "viewlib", do_viewlib
+ ; "save", do_save
+ ; "commit", do_commit]
() in
MatitaInit.initialize_all ();
MatitaAuthentication.deserialize ();
var dialogTitle;
var dialogContent;
var metasenv = "";
+var lockedbackup = "";
function initialize()
{
}
}
+// the following is used to avoid escaping unicode, which results in
+// the server being unable to unescape the string
String.prototype.sescape = function() {
var patt1 = /%/gi;
var patt2 = /=/gi;
// debug("advance: received response\nBEGIN\n" + req.responseText + "\nEND");
parsed = xml.getElementsByTagName("parsed")[0];
len = parseInt(parsed.getAttribute("length"));
- len0 = unlocked.innerHTML.length;
+ // len0 = unlocked.innerHTML.length;
unescaped = unlocked.innerHTML.html_to_matita();
parsedtxt = parsed.childNodes[0].nodeValue;
//parsedtxt = unescaped.substr(0,len);
unparsedtxt = unescaped.substr(len);
- locked.innerHTML = locked.innerHTML + parsedtxt; //.matita_to_html();
+ lockedbackup += parsedtxt;
+ locked.innerHTML = lockedbackup;
unlocked.innerHTML = unparsedtxt.matita_to_html();
- len1 = unlocked.innerHTML.length;
- len2 = len0 - len1;
+ // len1 = unlocked.innerHTML.length;
+ // len2 = len0 - len1;
+ len2 = parsedtxt.length;
metasenv = xml.getElementsByTagName("meta");
populate_goalarray(metasenv);
statements = listcons(len2,statements);
// debug("goto bottom: received response\nBEGIN\n" + req.responseText + "\nEND");
parsed = xml.getElementsByTagName("parsed")[0];
len = parseInt(parsed.getAttribute("length"));
- len0 = unlocked.innerHTML.length;
- unescaped = unlocked.innerHTML.html_to_matita();
- parsedtxt = parsed.childNodes[0].nodeValue;
- //parsedtxt = unescaped.substr(0,len);
- unparsedtxt = unescaped.substr(len);
- locked.innerHTML = locked.innerHTML + parsedtxt; //.matita_to_html();
- unlocked.innerHTML = unparsedtxt.matita_to_html();
- len1 = unlocked.innerHTML.length;
- len2 = len0 - len1;
- metasenv = xml.getElementsByTagName("meta");
- populate_goalarray(metasenv);
- statements = listcons(len2,statements);
- unlocked.scrollIntoView(true);
+ if (len > 0) {
+ // len0 = unlocked.innerHTML.length;
+ unescaped = unlocked.innerHTML.html_to_matita();
+ parsedtxt = parsed.childNodes[0].nodeValue;
+ //parsedtxt = unescaped.substr(0,len);
+ unparsedtxt = unescaped.substr(len);
+ lockedbackup += parsedtxt;
+ locked.innerHTML = lockedbackup; //.matita_to_html();
+ unlocked.innerHTML = unparsedtxt.matita_to_html();
+ // len1 = unlocked.innerHTML.length;
+ len2 = parsedtxt.length;
+ metasenv = xml.getElementsByTagName("meta");
+ populate_goalarray(metasenv);
+ if (len2 > 0)
+ statements = listcons(len2,statements);
+ unlocked.scrollIntoView(true);
+ }
} else {
debug("goto bottom failed");
}
if (is_defined(xml)) {
parsed = xml.getElementsByTagName("parsed")[0];
len = parseInt(parsed.getAttribute("length"));
- len0 = unlocked.innerHTML.length;
+ // len0 = unlocked.innerHTML.length;
unescaped = unlocked.innerHTML.html_to_matita();
parsedtxt = parsed.childNodes[0].nodeValue;
//parsedtxt = unescaped.substr(0,len);
unparsedtxt = unescaped.substr(len);
- locked.innerHTML = locked.innerHTML + parsedtxt; //.matita_to_html();
+ lockedbackup += parsedtxt;
+ locked.innerHTML = lockedbackup; //.matita_to_html();
unlocked.innerHTML = unparsedtxt.matita_to_html();
- len1 = unlocked.innerHTML.length;
- len2 = len0 - len1;
+ // len1 = unlocked.innerHTML.length;
+ len2 = parsedtxt.length;
metasenv = xml.getElementsByTagName("meta");
// populate_goalarray(metasenv);
statements = listcons(len2,statements);
// debug("advance: received response\nBEGIN\n" + req.responseText + "\nEND");
statementlen = parseInt(listhd(statements));
statements = listtl(statements);
+ /*
lockedlen = locked.innerHTML.length - statementlen;
statement = locked.innerHTML.substr(lockedlen, statementlen);
locked.innerHTML = locked.innerHTML.substr(0,lockedlen);
unlocked.innerHTML = statement + unlocked.innerHTML;
+ */
+ lockedlen = lockedbackup.length - statementlen;
+ statement = lockedbackup.substr(lockedlen, statementlen);
+ lockedbackup = lockedbackup.substr(0,lockedlen);
+ locked.innerHTML = lockedbackup;
+ unlocked.innerHTML = statement + unlocked.innerHTML;
metasenv = xml.getElementsByTagName("meta");
populate_goalarray(metasenv);
unlocked.scrollIntoView(true);
processor = function(xml)
{
if (is_defined(xml)) {
- locked.innerHTML = "";
+ lockedbackup = "";
+ locked.innerHTML = lockedbackup;
unlocked.innerHTML = xml.documentElement.textContent;
} else {
debug("file open failed");
processor = function(xml)
{
if (is_defined(xml)) {
- locked.innerHTML = "";
+ lockedbackup = ""
+ locked.innerHTML = lockedbackup;
debug(xml.getElementsByTagName("file")[0].childNodes[0].nodeValue);
unlocked.innerHTML = xml.getElementsByTagName("file")[0].childNodes[0].nodeValue;
}
};
dialogBox.style.display = "none";
+ current_fname = thefile;
callServer("open",processor,"file=" + escape(thefile));
}
}
+function saveFile()
+{
+ processor = function(xml) {
+ if (is_defined(xml)) {
+ debug("file saved!");
+ } else {
+ debug("save file failed");
+ }
+ resume();
+ };
+ if (is_defined(current_fname)) {
+ pause();
+ callServer("save",processor,"file=" + escape(current_fname) +
+ "&locked=" + (locked.innerHTML.html_to_matita()).sescape() +
+ "&unlocked=" + (unlocked.innerHTML.html_to_matita()).sescape());
+ }
+ else { debug("no file selected"); }
+}
+
+function commitAll()
+{
+ processor = function(xml) {
+ if (is_defined(xml)) {
+ debug("commit succeeded(?)");
+ } else {
+ debug("commit failed!");
+ }
+ resume();
+ };
+ pause();
+ callServer("commit",processor);
+}
+
var goalcell;
function hideSequent() {
handler = "reset";
}
};
+ uri {
+ path = "/save";
+ service {
+ type = "dynamic";
+ handler = "save";
+ }
+ };
+ uri {
+ path = "/commit";
+ service {
+ type = "dynamic";
+ handler = "commit";
+ }
+ };
};
};
workload_manager {