(*
- * Copyright (C) 2003:
+ * Copyright (C) 2003-2004:
* Stefano Zacchiroli <zack@cs.unibo.it>
* for the HELM Team http://helm.cs.unibo.it/
*
* http://helm.cs.unibo.it/
*)
-open Http_getter_common;;
-open Http_getter_misc;;
-open Http_getter_types;;
-open Http_getter_debugger;;
-open Printf;;
+open Printf
- (* constants *)
+open Http_getter_common
+open Http_getter_misc
+open Http_getter_debugger
+open Http_getter_types
-let common_headers = [
- "Cache-Control", "no-cache";
- "Pragma", "no-cache";
- "Expires", "0"
-]
-
- (* HTTP queries argument parsing *)
-
- (* parse encoding ("format" parameter), default is Enc_normal *)
-let parse_enc (req: Http_types.request) =
- try
- (match req#param "format" with
- | "normal" -> Enc_normal
- | "gz" -> Enc_gzipped
- | s -> raise (Http_getter_bad_request ("Invalid format: " ^ s)))
- with Http_types.Param_not_found _ -> Enc_normal
-;;
- (* parse "patch_dtd" parameter, default is true *)
-let parse_patch (req: Http_types.request) =
- try
- (match req#param "patch_dtd" with
- | s when String.lowercase s = "yes" -> true
- | s when String.lowercase s = "no" -> false
- | s -> raise (Http_getter_bad_request ("Invalid patch_dtd value: " ^ s)))
- with Http_types.Param_not_found _ -> true
-;;
- (* parse output format ("format" parameter), no default value *)
-let parse_output_format (req: Http_types.request) =
- match req#param "format" with
- | s when String.lowercase s = "txt" -> Fmt_text
- | s when String.lowercase s = "xml" -> Fmt_xml
- | s -> raise (Http_getter_bad_request ("Invalid /ls format: " ^ s))
-;;
- (* parse "baseuri" format for /ls method, no default value *)
-let parse_ls_uri =
- let parse_ls_RE = Pcre.regexp "^(\\w+):(.*)$" in
- let trailing_slash_RE = Pcre.regexp "/+$" in
- let wrong_uri uri =
- raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ uri))
- in
- fun (req: Http_types.request) ->
- let baseuri = req#param "baseuri" in
- try
- let subs =
- Pcre.extract ~rex:parse_ls_RE
- (Pcre.replace ~rex:trailing_slash_RE baseuri)
- in
- (match (subs.(1), subs.(2)) with
- | "cic", uri -> Cic uri
- | "theory", uri -> Theory uri
- | _ -> wrong_uri baseuri)
- with Not_found -> wrong_uri baseuri
-;;
+let (index_line_sep_RE, index_sep_RE, trailing_types_RE,
+ heading_cic_RE, heading_theory_RE, heading_nuprl_RE,
+ heading_rdf_cic_RE, heading_rdf_theory_RE) =
+ (Pcre.regexp "[ \t]+", Pcre.regexp "\r\n|\r|\n",
+ Pcre.regexp "\\.types$",
+ Pcre.regexp "^cic:", Pcre.regexp "^theory:", Pcre.regexp "^nuprl:",
+ Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:")
(* global maps, shared by all threads *)
-let cic_map = new Http_getter_map.map Http_getter_env.cic_dbm in
-let nuprl_map = new Http_getter_map.map Http_getter_env.nuprl_dbm in
-let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm in
-let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm in
+let cic_map = new Http_getter_map.map Http_getter_env.cic_dbm
+let nuprl_map = new Http_getter_map.map Http_getter_env.nuprl_dbm
+let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm
+let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm
-let maps = [ cic_map; nuprl_map; rdf_map; xsl_map ] in
-let close_maps () = List.iter (fun m -> m#close) maps in
-let clear_maps () = List.iter (fun m -> m#clear) maps in
-let sync_maps () = List.iter (fun m -> m#sync) maps in
+let maps = [ cic_map; nuprl_map; rdf_map; xsl_map ]
+let close_maps () = List.iter (fun m -> m#close) maps
+let clear_maps () = List.iter (fun m -> m#clear) maps
+let sync_maps () = List.iter (fun m -> m#sync) maps
let map_of_uri = function
| uri when is_cic_uri uri -> cic_map
| uri when is_nuprl_uri uri -> nuprl_map
| uri when is_rdf_uri uri -> rdf_map
| uri when is_xsl_uri uri -> xsl_map
- | uri -> raise (Http_getter_unresolvable_URI uri)
-in
-let resolve uri =
- try
- (map_of_uri uri)#resolve uri
- with Http_getter_map.Key_not_found _ ->
- raise (Http_getter_unresolvable_URI uri)
-in
-let register uri =
- (* Warning: this fail if uri is already registered *)
- (map_of_uri uri)#add uri
-in
-let return_all_foo_uris map doctype filter outchan =
- (** return all URIs contained in 'map' which satisfy predicate 'filter'; URIs
- are written in an XMLish format ('doctype' is the XML doctype) onto 'outchan'
- *)
- Http_daemon.send_basic_headers ~code:200 outchan;
- Http_daemon.send_header "Content-Type" "text/xml" outchan;
- Http_daemon.send_headers common_headers outchan;
- Http_daemon.send_CRLF outchan;
- output_string
- outchan
- (sprintf
-"<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
-<!DOCTYPE %s SYSTEM \"%s/getdtd?uri=%s.dtd\">
-
-<%s>
-"
- doctype
- Http_getter_env.my_own_url
- doctype
- doctype);
- map#iter
- (fun uri _ ->
- if filter uri then
- output_string outchan (sprintf "\t<uri value=\"%s\" />\n" uri));
- output_string outchan (sprintf "</%s>\n" doctype)
-in
-let return_all_xml_uris = return_all_foo_uris cic_map "alluris" in
-let return_all_rdf_uris = return_all_foo_uris rdf_map "allrdfuris" in
-let return_ls =
- let (++) (oldann, oldtypes, oldbody, oldtree)
- (newann, newtypes, newbody, newtree) =
- ((if newann > oldann then newann else oldann),
- (if newtypes > oldtypes then newtypes else oldtypes),
- (if newbody > oldbody then newbody else oldbody),
- (if newtree > oldtree then newtree else oldtree))
- in
- let basepart_RE =
- Pcre.regexp
- "^([^.]*\\.[^.]*)((\\.body)|(\\.proof_tree)|(\\.types))?(\\.ann)?$"
- in
- let (types_RE, types_ann_RE, body_RE, body_ann_RE,
- proof_tree_RE, proof_tree_ann_RE) =
- (Pcre.regexp "\\.types$", Pcre.regexp "\\.types\\.ann$",
- Pcre.regexp "\\.body$", Pcre.regexp "\\.body\\.ann$",
- Pcre.regexp "\\.proof_tree$", Pcre.regexp "\\.proof_tree\\.ann$")
- in
- let (slash_RE, til_slash_RE, no_slashes_RE) =
- (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$")
- in
- fun lsuri fmt outchan ->
- let pat =
- "^" ^
- (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p))
- in
- let (dir_RE, obj_RE) =
- (Pcre.regexp (pat ^ "/"), Pcre.regexp (pat ^ "(\\.|$)"))
- in
- let dirs = ref StringSet.empty in
- let objs = Hashtbl.create 17 in
- let store_dir d =
- dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs
- in
- let store_obj o =
- let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in
- let no_flags = false, No, No, No in
- let oldflags =
- try
- Hashtbl.find objs basepart
- with Not_found -> (* no ann, no types, no body, no proof tree *)
- no_flags
- in
- let newflags =
- match o with
- | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No, No)
- | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No, No)
- | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes, No)
- | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann, No)
- | s when Pcre.pmatch ~rex:proof_tree_RE s -> (false, No, No, Yes)
- | s when Pcre.pmatch ~rex:proof_tree_ann_RE s -> (true, No, No, Ann)
- | s -> no_flags
- in
- Hashtbl.replace objs basepart (oldflags ++ newflags)
- in
- cic_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *)
- (fun key _ ->
- match key with
- | uri when Pcre.pmatch ~rex:dir_RE uri -> (* directory hit *)
- let localpart = Pcre.replace ~rex:dir_RE uri in
- if Pcre.pmatch ~rex:no_slashes_RE localpart then
- store_obj localpart
- else
- store_dir localpart
- | uri when Pcre.pmatch ~rex:obj_RE uri -> (* file hit *)
- store_obj (Pcre.replace ~rex:til_slash_RE uri)
- | uri -> () (* miss *));
- match fmt with
- | Fmt_text ->
- let body =
- (List.fold_left
- (fun s d -> sprintf "%sdir, %s\n" s d) ""
- (StringSet.elements !dirs)) ^
- (Http_getter_misc.hashtbl_sorted_fold
- (fun uri (annflag, typesflag, bodyflag, treeflag) cont ->
- sprintf "%sobject, %s, <%s,%s,%s,%s>\n"
- cont uri (if annflag then "YES" else "NO")
- (string_of_ls_flag typesflag)
- (string_of_ls_flag bodyflag)
- (string_of_ls_flag treeflag))
- objs "")
- in
- Http_daemon.respond
- ~headers:(("Content-Type", "text/plain") :: common_headers)
- ~body outchan
- | Fmt_xml ->
- let body =
- sprintf
-"<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
-<!DOCTYPE ls SYSTEM \"%s/getdtd?uri=ls.dtd\">
+ | uri -> raise (Unresolvable_URI uri)
-<ls>
-%s
-</ls>
-"
- Http_getter_env.my_own_url
- ("\n" ^
- (String.concat
- "\n"
- (List.map
- (fun d -> "<section>" ^ d ^ "</section>")
- (StringSet.elements !dirs))) ^ "\n" ^
- (Http_getter_misc.hashtbl_sorted_fold
- (fun uri (annflag, typesflag, bodyflag, treeflag) cont ->
- sprintf
-"%s<object name=\"%s\">
-\t<ann value=\"%s\" />
-\t<types value=\"%s\" />
-\t<body value=\"%s\" />
-\t<proof_tree value=\"%s\" />
-</object>
-"
- cont uri (if annflag then "YES" else "NO")
- (string_of_ls_flag typesflag)
- (string_of_ls_flag bodyflag)
- (string_of_ls_flag treeflag))
- objs ""))
- in
- Http_daemon.respond
- ~headers:(("Content-Type", "text/xml") :: common_headers)
- ~body outchan
-in
-let (index_line_sep_RE, index_sep_RE, trailing_types_RE,
- heading_cic_RE, heading_theory_RE, heading_nuprl_RE,
- heading_rdf_cic_RE, heading_rdf_theory_RE) =
- (Pcre.regexp "[ \t]+", Pcre.regexp "\r\n|\r|\n",
- Pcre.regexp "\\.types$",
- Pcre.regexp "^cic:", Pcre.regexp "^theory:", Pcre.regexp "^nuprl:",
- Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:")
-in
let update_from_server logmsg server_url = (* use global maps *)
debug_print ("Updating information from " ^ server_url);
let xml_url_of_uri = function
Pcre.replace ~rex:heading_theory_RE ~templ:server_url uri
| uri when (Pcre.pmatch ~rex:heading_nuprl_RE uri) ->
Pcre.replace ~rex:heading_nuprl_RE ~templ:server_url uri
- | uri -> raise (Http_getter_invalid_URI uri)
+ | uri -> raise (Invalid_URI uri)
in
let rdf_url_of_uri = function (* TODO as above *)
| uri when (Pcre.pmatch ~rex:heading_rdf_cic_RE uri) ->
Pcre.replace ~rex:heading_rdf_cic_RE ~templ:server_url uri
| uri when (Pcre.pmatch ~rex:heading_rdf_theory_RE uri) ->
Pcre.replace ~rex:heading_rdf_theory_RE ~templ:server_url uri
- | uri -> raise (Http_getter_invalid_URI uri)
+ | uri -> raise (Invalid_URI uri)
in
- let log = ref (logmsg ^ "Processing server: " ^ server_url ^ "<br />\n") in
+ let log = ref (`T ("Processing server: " ^ server_url) :: logmsg) in
let (xml_index, rdf_index, xsl_index) =
(* TODO keeps index in memory, is better to keep them on temp files? *)
(http_get (server_url ^ "/" ^ Http_getter_env.xml_index),
debug_print (sprintf "Warning: useless server %s" server_url);
(match xml_index with
| Some xml_index ->
- (log := !log ^ "Updating XML db ...<br />\n";
+ (log := `T "Updating XML db ...<br />" :: !log;
List.iter
(function
| l when is_blank_line l -> () (* skip blank and commented lines *)
| l ->
- try
+ (try
(match Pcre.split ~rex:index_line_sep_RE l with
| [uri; "gz"] ->
assert (is_cic_uri uri || is_nuprl_uri uri) ;
assert (is_cic_uri uri || is_nuprl_uri uri) ;
(map_of_uri uri)#replace
uri ((xml_url_of_uri uri) ^ ".xml")
- | _ ->
- log := !log ^ "Ignoring invalid line: '" ^ l ^ "'<br />\n")
- with Http_getter_invalid_URI uri ->
- log := !log ^ "Ignoring invalid XML URI: '" ^ uri ^ "'<br />\n")
- (Pcre.split ~rex:index_sep_RE xml_index)) (* xml_index lines *)
+ | _ -> log := `T ("Ignoring invalid line: '" ^ l) :: !log)
+ with Invalid_URI uri ->
+ log := `T ("Ignoring invalid XML URI: '" ^ l) :: !log))
+ (Pcre.split ~rex:index_sep_RE xml_index); (* xml_index lines *)
+ log := `T "All done" :: !log)
| None -> ());
(match rdf_index with
| Some rdf_index ->
- (log := !log ^ "Updating RDF db ...<br />\n";
+ (log := `T "Updating RDF db ..." :: !log;
List.iter
(fun l ->
try
(match Pcre.split ~rex:index_line_sep_RE l with
| [uri; "gz"] ->
- rdf_map#replace uri ((rdf_url_of_uri uri) ^ ".xml.gz")
- | [uri] -> rdf_map#replace uri ((rdf_url_of_uri uri) ^ ".xml")
- | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "<br />\n")
- with Http_getter_invalid_URI uri ->
- log := !log ^ "Ignoring invalid RDF URI: " ^ uri ^ "<br />\n")
- (Pcre.split ~rex:index_sep_RE rdf_index)) (* rdf_index lines *)
+ rdf_map#replace uri
+ ((rdf_url_of_uri uri) ^ ".xml.gz")
+ | [uri] ->
+ rdf_map#replace uri ((rdf_url_of_uri uri) ^ ".xml")
+ | _ -> log := `T ("Ignoring invalid line: '" ^ l) :: !log)
+ with Invalid_URI uri ->
+ log := `T ("Ignoring invalid RDF URI: '" ^ l) :: !log)
+ (Pcre.split ~rex:index_sep_RE rdf_index); (* rdf_index lines *)
+ log := `T "All done" :: !log)
| None -> ());
(match xsl_index with
| Some xsl_index ->
- (log := !log ^ "Updating XSLT db ...<br />\n";
+ (log := `T "Updating XSLT db ..." :: !log;
List.iter
(fun l -> xsl_map#replace l (server_url ^ "/" ^ l))
(Pcre.split ~rex:index_sep_RE xsl_index);
- log := !log ^ "All done!<br />\n")
+ log := `T "All done" :: !log)
| None -> ());
debug_print "done with this server";
!log
-in
+
let update_from_all_servers () = (* use global maps *)
clear_maps ();
let log =
List.fold_left
update_from_server
- "" (* initial logmsg: empty *)
+ [] (* initial logmsg: empty *)
(* reverse order: 1st server is the most important one *)
- (List.rev !Http_getter_env.servers)
+ (List.map snd (List.rev (Http_getter_env.servers ())))
in
sync_maps ();
- log
-in
+ `Msg (`L (List.rev log))
- (* thread action *)
+let update_from_one_server server_url =
+ let log = update_from_server [] server_url in
+ `Msg (`L (List.rev log))
-let callback (req: Http_types.request) outchan =
- try
- debug_print ("Connection from " ^ req#clientAddr);
- debug_print ("Received request: " ^ req#path);
- (match req#path with
- | "/help" ->
- return_html_raw
- (Http_getter_const.usage_string (Http_getter_env.env_to_string ()))
- outchan
- | "/getxml" | "/getxslt" | "/getdtd" | "/resolve" | "/register" ->
- (let uri = req#param "uri" in (* common parameter *)
- match req#path with
- | "/getxml" ->
- let enc = parse_enc req in
- let patch = parse_patch req in
- Http_getter_cache.respond_xml
- ~url:(resolve uri) ~uri ~enc ~patch outchan
- | "/getxslt" ->
- let patch = parse_patch req in
- Http_getter_cache.respond_xsl ~url:(resolve uri) ~patch outchan
- | "/getdtd" ->
- let patch = parse_patch req in
- Http_getter_cache.respond_dtd
- ~patch ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan
- | "/resolve" ->
- (try
- return_xml_raw
- (sprintf "<url value=\"%s\" />\n" (resolve uri))
- outchan
- with Http_getter_unresolvable_URI uri ->
- return_xml_raw "<unresolved />\n" outchan)
- | "/register" ->
- let url = req#param "url" in
- register uri url;
- return_html_msg "Register done" outchan
- | _ -> assert false)
- | "/clean_cache" ->
- Http_getter_cache.clean ();
- return_html_msg "Done." outchan
- | "/update" ->
- Http_getter_env.reload (); (* reload servers list from servers file *)
- let log = update_from_all_servers () in
- return_html_msg log outchan
- | "/list_servers" ->
- return_html_raw
- (sprintf "<html><body><table>\n%s\n</table></body></html>"
- (String.concat "\n"
- (List.map
- (let i = ref ~-1 in
- fun s -> incr i; sprintf "<tr><td>%d</td><td>%s</td></tr>" !i s)
- !Http_getter_env.servers)))
- outchan
- | "/add_server" ->
- let name = req#param "url" in
- (try
- let position =
- try
- let res = int_of_string (req#param "position") in
- if res < 0 then
- raise (Failure "int_of_string");
- res
- with Failure "int_of_string" ->
- raise (Http_getter_bad_request
- (sprintf "position must be a non negative integer (%s given)"
- (req#param "position")))
- in
- if position = 0 then (* fallback to default value *)
- raise (Http_types.Param_not_found "foo")
- else if position > 0 then begin (* add server and update all *)
- Http_getter_env.add_server ~position name;
- let log = update_from_all_servers () in
- return_html_msg
- (sprintf "Added server %s in position %d)<br />\n%s"
- name position log)
- outchan
- end else (* position < 0 *) (* error! *)
- assert false (* already checked above *)
- with Http_types.Param_not_found _ -> (* add as 1st server by default *)
- Http_getter_env.add_server ~position:0 name;
- let log = update_from_server (* quick update (new server only) *)
- (sprintf "Added server %s in head position<br />\n" name) name
- in
- return_html_msg log outchan)
- | "/remove_server" ->
- let position =
- try
- let res = int_of_string (req#param "position") in
- if res < 0 then
- raise (Failure "int_of_string");
- res
- with Failure "int_of_string" ->
- raise (Http_getter_bad_request
- (sprintf "position must be a non negative integer (%s given)"
- (req#param "position")))
- in
- let server_name =
- try
- List.nth !Http_getter_env.servers position
- with Failure "nth" ->
- raise (Http_getter_bad_request
- (sprintf "no server with position %d" position))
- in
- Http_getter_env.remove_server position;
- let log = update_from_all_servers () in
- return_html_msg
- (sprintf "Removed server %s (position %d)<br />\n%s"
- server_name position log)
- outchan
- | "/getalluris" ->
- return_all_xml_uris
- (fun uri ->
- (Pcre.pmatch ~rex:heading_cic_RE uri) &&
- not (Pcre.pmatch ~rex:trailing_types_RE uri))
- outchan
- | "/getallrdfuris" ->
- (let classs = req#param "class" in
- try
- let filter =
- let base = "^helm:rdf:www\\.cs\\.unibo\\.it/helm/rdf/" in
- match classs with
- | ("forward" as c) | ("backward" as c) ->
- (fun uri -> Pcre.pmatch ~pat:(base ^ c) uri)
- | c -> raise (Http_getter_invalid_RDF_class c)
- in
- return_all_rdf_uris filter outchan
- with Http_getter_invalid_RDF_class c ->
- raise (Http_getter_bad_request ("Invalid RDF class: " ^ c)))
- | "/ls" -> return_ls (parse_ls_uri req) (parse_output_format req) outchan
- | "/getempty" ->
- Http_daemon.respond ~body:Http_getter_const.empty_xml outchan
- | invalid_request ->
- Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan);
- debug_print "Done!\n"
- with
- | Http_types.Param_not_found attr_name ->
- return_400 (sprintf "Parameter '%s' is missing" attr_name) outchan
- | Http_getter_bad_request msg -> return_html_error msg outchan
- | Http_getter_internal_error msg -> return_html_internal_error msg outchan
- | Shell.Subprocess_error l ->
- return_html_internal_error
- (String.concat "<br />\n"
- (List.map
- (fun (cmd, code) ->
- sprintf "Command '%s' returned %s"
- cmd (string_of_proc_status code))
- l))
- outchan
- | exc ->
- return_html_error
- ("Uncaught exception: " ^ (Printexc.to_string exc))
- outchan
-in
+let temp_file_of_uri uri =
+ let flat_string s s' c =
+ let cs = String.copy s in
+ for i = 0 to (String.length s) - 1 do
+ if String.contains s' s.[i] then cs.[i] <- c
+ done;
+ cs
+ in
+ let user = try Unix.getlogin () with _ -> "" in
+ Filename.open_temp_file (user ^ flat_string uri ".-=:;!?/&" '_') ""
+
+(* API *)
- (* daemon initialization *)
+let help () = Http_getter_const.usage_string (Http_getter_env.env_to_string ())
-let main () =
- print_string (Http_getter_env.env_to_string ());
- flush stdout;
- Unix.putenv "http_proxy" "";
- at_exit close_maps;
- Sys.catch_break true;
+let resolve uri =
try
- Http_daemon.start'
- ~timeout:(Some 600) ~port:Http_getter_env.port ~mode:`Thread callback
- with Sys.Break -> () (* 'close_maps' already registered with 'at_exit' *)
-in
+ (map_of_uri uri)#resolve uri
+ with Http_getter_map.Key_not_found _ -> raise (Unresolvable_URI uri)
+
+ (* Warning: this fail if uri is already registered *)
+let register ~uri ~url = (map_of_uri uri)#add uri url
+
+let update () = update_from_all_servers ()
+
+let getxml ?(format = Enc_normal) ?(patch_dtd = true) uri =
+ let url = resolve uri in
+ let (fname, outchan) = temp_file_of_uri uri in
+ Http_getter_cache.respond_xml ~uri ~url ~enc:format ~patch:patch_dtd outchan;
+ close_out outchan;
+ fname
+
+let getxslt ?(patch_dtd = true) uri =
+ let url = resolve uri in
+ let (fname, outchan) = temp_file_of_uri uri in
+ Http_getter_cache.respond_xsl ~url ~patch:patch_dtd outchan;
+ close_out outchan;
+ fname
+
+let getdtd ?(patch_dtd = true) uri =
+ let url = Http_getter_env.dtd_dir ^ "/" ^ uri in
+ let (fname, outchan) = temp_file_of_uri uri in
+ Http_getter_cache.respond_dtd ~url ~patch:patch_dtd outchan;
+ close_out outchan;
+ fname
+
+let clean_cache () = Http_getter_cache.clean ()
+
+let list_servers () = Http_getter_env.servers ()
+
+let add_server ?(position = 0) name =
+ if position = 0 then begin
+ Http_getter_env.add_server ~position:0 name;
+ update_from_one_server name (* quick update (new server only) *)
+ end else if position > 0 then begin
+ Http_getter_env.add_server ~position name;
+ update ()
+ end else (* already checked bt parse_position *)
+ assert false
+
+let remove_server position =
+ let server_name =
+ try
+ List.assoc position (Http_getter_env.servers ())
+ with Not_found ->
+ raise (Invalid_argument (sprintf "no server with position %d" position))
+ in
+ Http_getter_env.remove_server position;
+ update ()
+
+let return_uris map filter =
+ let uris = ref [] in
+ map#iter (fun uri _ -> if filter uri then uris := uri :: !uris);
+ List.rev !uris
+
+let getalluris () =
+ let filter uri =
+ (Pcre.pmatch ~rex:heading_cic_RE uri) &&
+ not (Pcre.pmatch ~rex:trailing_types_RE uri)
+ in
+ return_uris cic_map filter
-main ()
+let getallrdfuris classs =
+ let filter =
+ let base = "^helm:rdf:www\\.cs\\.unibo\\.it/helm/rdf/" in
+ match classs with
+ | `Forward -> (fun uri -> Pcre.pmatch ~pat:(base ^ "forward") uri)
+ | `Backward -> (fun uri -> Pcre.pmatch ~pat:(base ^ "backward") uri)
+ in
+ return_uris rdf_map filter
+
+let ls =
+ let (++) (oldann, oldtypes, oldbody, oldtree)
+ (newann, newtypes, newbody, newtree) =
+ ((if newann > oldann then newann else oldann),
+ (if newtypes > oldtypes then newtypes else oldtypes),
+ (if newbody > oldbody then newbody else oldbody),
+ (if newtree > oldtree then newtree else oldtree))
+ in
+ let basepart_RE =
+ Pcre.regexp
+ "^([^.]*\\.[^.]*)((\\.body)|(\\.proof_tree)|(\\.types))?(\\.ann)?$"
+ in
+ let (types_RE, types_ann_RE, body_RE, body_ann_RE,
+ proof_tree_RE, proof_tree_ann_RE) =
+ (Pcre.regexp "\\.types$", Pcre.regexp "\\.types\\.ann$",
+ Pcre.regexp "\\.body$", Pcre.regexp "\\.body\\.ann$",
+ Pcre.regexp "\\.proof_tree$", Pcre.regexp "\\.proof_tree\\.ann$")
+ in
+ let (slash_RE, til_slash_RE, no_slashes_RE) =
+ (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$")
+ in
+ fun lsuri ->
+ let pat =
+ "^" ^
+ (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p))
+ in
+ let (dir_RE, obj_RE) =
+ (Pcre.regexp (pat ^ "/"), Pcre.regexp (pat ^ "(\\.|$)"))
+ in
+ let dirs = ref StringSet.empty in
+ let objs = Hashtbl.create 17 in
+ let store_dir d =
+ dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs
+ in
+ let store_obj o =
+ let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in
+ let no_flags = false, No, No, No in
+ let oldflags =
+ try
+ Hashtbl.find objs basepart
+ with Not_found -> (* no ann, no types, no body, no proof tree *)
+ no_flags
+ in
+ let newflags =
+ match o with
+ | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No, No)
+ | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No, No)
+ | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes, No)
+ | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann, No)
+ | s when Pcre.pmatch ~rex:proof_tree_RE s -> (false, No, No, Yes)
+ | s when Pcre.pmatch ~rex:proof_tree_ann_RE s -> (true, No, No, Ann)
+ | s -> no_flags
+ in
+ Hashtbl.replace objs basepart (oldflags ++ newflags)
+ in
+ cic_map#iter
+ (* BLEARGH Dbm module lacks support for fold-like functions *)
+ (fun key _ ->
+ match key with
+ | uri when Pcre.pmatch ~rex:dir_RE uri -> (* directory hit *)
+ let localpart = Pcre.replace ~rex:dir_RE uri in
+ if Pcre.pmatch ~rex:no_slashes_RE localpart then
+ store_obj localpart
+ else
+ store_dir localpart
+ | uri when Pcre.pmatch ~rex:obj_RE uri -> (* file hit *)
+ store_obj (Pcre.replace ~rex:til_slash_RE uri)
+ | uri -> () (* miss *));
+ let ls_items = ref [] in
+ StringSet.iter (fun dir -> ls_items := Ls_section dir :: !ls_items) !dirs;
+ Http_getter_misc.hashtbl_sorted_iter
+ (fun uri (annflag, typesflag, bodyflag, treeflag) ->
+ ls_items :=
+ Ls_object {
+ uri = uri; ann = annflag;
+ types = typesflag; body = typesflag; proof_tree = treeflag
+ } :: !ls_items)
+ objs;
+ List.rev !ls_items