(* * Copyright (C) 2003-2004: * Stefano Zacchiroli * for the HELM Team http://helm.cs.unibo.it/ * * This file is part of HELM, an Hypertextual, Electronic * Library of Mathematics, developed at the Computer Science * Department, University of Bologna, Italy. * * HELM is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * HELM is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HELM; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, * MA 02111-1307, USA. * * For details, see the HELM World-Wide-Web page, * http://helm.cs.unibo.it/ *) open Printf open Http_getter_common open Http_getter_misc open Http_getter_types exception Not_implemented of string exception UnexpectedGetterOutput (* resolve_result is needed because it is not possible to raise *) (* an exception in a pxp ever-processing callback. Too bad. *) type resolve_result = | Unknown | Exception of exn | Resolved of string type logger_callback = HelmLogger.html_tag -> unit let stdout_logger tag = print_string (HelmLogger.string_of_html_tag tag) let not_implemented s = raise (Not_implemented ("Http_getter." ^ s)) let (index_line_sep_RE, index_sep_RE, trailing_types_RE, heading_cic_RE, heading_theory_RE, heading_nuprl_RE, heading_rdf_cic_RE, heading_rdf_theory_RE) = (Pcre.regexp "[ \t]+", Pcre.regexp "\r\n|\r|\n", Pcre.regexp "\\.types$", Pcre.regexp "^cic:", Pcre.regexp "^theory:", Pcre.regexp "^nuprl:", Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:") (* global maps, shared by all threads *) let cic_map = lazy (new Http_getter_map.map (Lazy.force Http_getter_env.cic_dbm)) let nuprl_map = lazy (new Http_getter_map.map (Lazy.force Http_getter_env.nuprl_dbm)) let rdf_map = lazy (new Http_getter_map.map (Lazy.force Http_getter_env.rdf_dbm)) let xsl_map = lazy (new Http_getter_map.map (Lazy.force Http_getter_env.xsl_dbm)) let uri_tree = ref None let deref_if_some r = match !r with | None -> assert false | Some x -> x let is_prefetch_on () = match !uri_tree with None -> false | Some _ -> true let dump_tree () = let path = Lazy.force Http_getter_env.dump_file in Tree.save_to_disk path (deref_if_some uri_tree); Http_getter_md5.create_hash [ (Lazy.force Http_getter_env.cic_dbm_real); path ] let load_tree () = if not (Http_getter_md5.check_hash ()) then assert false else uri_tree := Some (Tree.load_from_disk (Lazy.force Http_getter_env.dump_file)) let sync_with_map () = if not (Http_getter_md5.check_hash ()) then begin let tree = ref (Some Tree.empty_tree) in Http_getter_logger.log "Updating cic map dump..."; let t = Unix.time () in (Lazy.force cic_map)#iter (fun k _ -> tree := Some (Tree.add_uri k (deref_if_some tree))); uri_tree := !tree; Http_getter_logger.log (sprintf "done in %.0f sec" (Unix.time () -. t)); dump_tree () end else begin Http_getter_logger.log "Cic map dump is up to date!"; load_tree () (* XXX TASSI: race condition here *) end let maps = [ cic_map; nuprl_map; rdf_map; xsl_map ] let close_maps () = List.iter (fun m -> (Lazy.force m) # close) maps let clear_maps () = List.iter (fun m -> (Lazy.force m) # clear) maps let sync_maps () = List.iter (fun m -> (Lazy.force m) # sync) maps; sync_with_map () let map_of_uri = function | uri when is_cic_uri uri -> Lazy.force cic_map | uri when is_nuprl_uri uri -> Lazy.force nuprl_map | uri when is_rdf_uri uri -> Lazy.force rdf_map | uri when is_xsl_uri uri -> Lazy.force xsl_map | uri -> raise (Unresolvable_URI uri) let update_from_server logger server_url = (* use global maps *) Http_getter_logger.log ("Updating information from " ^ server_url); let xml_url_of_uri = function (* TODO missing sanity checks on server_url, e.g. it can contains $1 *) | uri when (Pcre.pmatch ~rex:heading_cic_RE uri) -> Pcre.replace ~rex:heading_cic_RE ~templ:server_url uri | uri when (Pcre.pmatch ~rex:heading_theory_RE uri) -> Pcre.replace ~rex:heading_theory_RE ~templ:server_url uri | uri when (Pcre.pmatch ~rex:heading_nuprl_RE uri) -> Pcre.replace ~rex:heading_nuprl_RE ~templ:server_url uri | uri -> raise (Invalid_URI uri) in let rdf_url_of_uri = function (* TODO as above *) | uri when (Pcre.pmatch ~rex:heading_rdf_cic_RE uri) -> Pcre.replace ~rex:heading_rdf_cic_RE ~templ:server_url uri | uri when (Pcre.pmatch ~rex:heading_rdf_theory_RE uri) -> Pcre.replace ~rex:heading_rdf_theory_RE ~templ:server_url uri | uri -> raise (Invalid_URI uri) in logger (`T ("Processing server: " ^ server_url)); logger `BR; let (xml_index, rdf_index, xsl_index) = (* TODO keeps index in memory, is better to keep them on temp files? *) (http_get (server_url ^ "/" ^ (Lazy.force Http_getter_env.xml_index)), http_get (server_url ^ "/" ^ (Lazy.force Http_getter_env.rdf_index)), http_get (server_url ^ "/" ^ (Lazy.force Http_getter_env.xsl_index))) in if (xml_index = None && rdf_index = None && xsl_index = None) then Http_getter_logger.log (sprintf "Warning: useless server %s" server_url); (match xml_index with | Some xml_index -> logger (`T "- Updating XML db ..."); (* logger `BR; *) List.iter (function | l when is_blank_line l -> () (* skip blank and commented lines *) | l -> (try (match Pcre.split ~rex:index_line_sep_RE l with | [uri; "gz"] -> assert (is_cic_uri uri || is_nuprl_uri uri) ; (map_of_uri uri)#replace uri ((xml_url_of_uri uri) ^ ".xml.gz") | [uri] -> assert (is_cic_uri uri || is_nuprl_uri uri) ; (map_of_uri uri)#replace uri ((xml_url_of_uri uri) ^ ".xml") | _ -> logger (`T ("Ignoring invalid line: '" ^ l)); logger `BR) with Invalid_URI uri -> logger (`T ("Ignoring invalid XML URI: '" ^ l)); logger `BR)) (Pcre.split ~rex:index_sep_RE xml_index); (* xml_index lines *) logger (`T "All done"); logger `BR | None -> ()); (match rdf_index with | Some rdf_index -> logger (`T "- Updating RDF db ..."); (* logger `BR; *) List.iter (fun l -> try (match Pcre.split ~rex:index_line_sep_RE l with | [uri; "gz"] -> (Lazy.force rdf_map) # replace uri ((rdf_url_of_uri uri) ^ ".xml.gz") | [uri] -> (Lazy.force rdf_map) # replace uri ((rdf_url_of_uri uri) ^ ".xml") | _ -> logger (`T ("Ignoring invalid line: '" ^ l)); logger `BR) with Invalid_URI uri -> logger (`T ("Ignoring invalid RDF URI: '" ^ l)); logger `BR) (Pcre.split ~rex:index_sep_RE rdf_index); (* rdf_index lines *) logger (`T "All done"); logger `BR | None -> ()); (match xsl_index with | Some xsl_index -> logger (`T "- Updating XSLT db ..."); (* logger `BR; *) List.iter (fun l -> (Lazy.force xsl_map) # replace l (server_url ^ "/" ^ l)) (Pcre.split ~rex:index_sep_RE xsl_index); logger (`T "All done"); logger `BR | None -> ()); Http_getter_logger.log "done with this server" let update_from_all_servers logger () = (* use global maps *) clear_maps (); List.iter (update_from_server logger) (* reverse order: 1st server is the most important one *) (List.map snd (List.rev (Http_getter_env.servers ()))); sync_maps () let update_from_one_server ?(logger = fun _ -> ()) server_url = update_from_server logger server_url let temp_file_of_uri uri = let flat_string s s' c = let cs = String.copy s in for i = 0 to (String.length s) - 1 do if String.contains s' s.[i] then cs.[i] <- c done; cs in let user = try Unix.getlogin () with _ -> "" in Filename.open_temp_file (user ^ flat_string uri ".-=:;!?/&" '_') "" (* should we use a remote getter or not *) let remote () = try Helm_registry.get "getter.mode" = "remote" with Helm_registry.Key_not_found _ -> false let getter_url () = Helm_registry.get "getter.url" (* Remote interface: getter methods implemented using a remote getter *) (* *) let getxslt_remote ~patch_dtd uri = not_implemented "getxslt_remote" let getdtd_remote ~patch_dtd uri = not_implemented "getdtd_remote" let clean_cache_remote () = not_implemented "clean_cache_remote" let list_servers_remote () = not_implemented "list_servers_remote" let add_server_remote ~logger ~position name = not_implemented "add_server_remote" let remove_server_remote ~logger position = not_implemented "remove_server_remote" let getalluris_remote () = not_implemented "getalluris_remote" let getallrdfuris_remote () = not_implemented "getallrdfuris_remote" let ls_remote lsuri = not_implemented "ls_remote" (* *) let resolve_remote uri = (* deliver resolve request to http_getter *) let doc = ClientHTTP.get (sprintf "%sresolve?uri=%s" (getter_url ()) uri) in let res = ref Unknown in Pxp_ev_parser.process_entity PxpHelmConf.pxp_config (`Entry_content []) (Pxp_ev_parser.create_entity_manager ~is_document:true PxpHelmConf.pxp_config (Pxp_yacc.from_string doc)) (function | Pxp_types.E_start_tag ("url",["value",url],_,_) -> res := Resolved url | Pxp_types.E_start_tag ("unresolvable",[],_,_) -> res := Exception (Unresolvable_URI uri) | Pxp_types.E_start_tag ("not_found",[],_,_) -> res := Exception (Key_not_found uri) | Pxp_types.E_start_tag (x,_,_,_) -> res := Exception UnexpectedGetterOutput | _ -> ()); match !res with | Unknown -> raise UnexpectedGetterOutput | Exception e -> raise e | Resolved url -> url let register_remote ~uri ~url = ClientHTTP.send (sprintf "%sregister?uri=%s&url=%s" (getter_url ()) uri url) let unregister_remote uri = ClientHTTP.send (sprintf "%sunregister?uri=%s" (getter_url ()) uri) let update_remote logger () = let answer = ClientHTTP.get (getter_url () ^ "update") in logger (`T answer); logger `BR let getxml_remote ~format ~patch_dtd uri = ClientHTTP.get_and_save_to_tmp (sprintf "%sgetxml?uri=%s&format=%s&patch_dtd=%s" (getter_url ()) uri (match format with `Normal -> "normal" | `Gzipped -> "gzipped") (match patch_dtd with true -> "yes" | false -> "no")) (* API *) let help () = Http_getter_const.usage_string (Http_getter_env.env_to_string ()) let resolve uri = if remote () then resolve_remote uri else (map_of_uri uri)#resolve uri let register ~uri ~url = if remote () then register_remote ~uri ~url else begin (map_of_uri uri)#add uri url; if is_prefetch_on () then uri_tree := Some (Tree.add_uri uri (deref_if_some uri_tree)) end let unregister uri = if remote () then unregister_remote uri else try begin (map_of_uri uri)#remove uri; if is_prefetch_on () then uri_tree := Some (Tree.remove_uri uri (deref_if_some uri_tree)) end with Key_not_found _ -> () let update ?(logger = fun _ -> ()) () = if remote () then update_remote logger () else update_from_all_servers logger () let getxml ?(format = `Normal) ?(patch_dtd = true) uri = if remote () then getxml_remote ~format ~patch_dtd uri else begin Http_getter_logger.log ~level:2 ("getxml: " ^ uri); let url = resolve uri in Http_getter_logger.log ~level:2 ("resolved_uri: " ^ url) ; let (fname, outchan) = temp_file_of_uri uri in Http_getter_cache.respond_xml ~via_http:false ~enc:format ~patch:patch_dtd ~uri ~url outchan; close_out outchan; fname end let getxslt ?(patch_dtd = true) uri = if remote () then getxslt_remote ~patch_dtd uri else begin let url = resolve uri in let (fname, outchan) = temp_file_of_uri uri in Http_getter_cache.respond_xsl ~via_http:false ~url ~patch:patch_dtd outchan; close_out outchan; fname end let getdtd ?(patch_dtd = true) uri = if remote () then getdtd_remote ~patch_dtd uri else begin let url = Lazy.force Http_getter_env.dtd_dir ^ "/" ^ uri in let (fname, outchan) = temp_file_of_uri uri in Http_getter_cache.respond_dtd ~via_http:false ~url ~patch:patch_dtd outchan; close_out outchan; fname end let clean_cache () = if remote () then clean_cache_remote () else Http_getter_cache.clean () let list_servers () = if remote () then list_servers_remote () else Http_getter_env.servers () let add_server ?(logger = fun _ -> ()) ?(position = 0) name = if remote () then add_server_remote ~logger ~position name else begin if position = 0 then begin Http_getter_env.add_server ~position:0 name; update_from_one_server ~logger name (* quick update (new server only) *) end else if position > 0 then begin Http_getter_env.add_server ~position name; update ~logger () end else (* already checked by parse_position *) assert false end let has_server position = List.mem_assoc position (Http_getter_env.servers ()) let remove_server ?(logger = fun _ -> ()) position = if remote () then remove_server_remote ~logger () else begin let server_name = try List.assoc position (Http_getter_env.servers ()) with Not_found -> raise (Invalid_argument (sprintf "no server with position %d" position)) in Http_getter_env.remove_server position; update ~logger () end let return_uris map filter = let uris = ref [] in map#iter (fun uri _ -> if filter uri then uris := uri :: !uris); List.rev !uris let getalluris () = if remote () then getalluris_remote () else let filter uri = (Pcre.pmatch ~rex:heading_cic_RE uri) (* && not (Pcre.pmatch ~rex:trailing_types_RE uri) *) in return_uris (Lazy.force cic_map) filter let getallrdfuris classs = if remote () then getallrdfuris_remote () else let filter = let base = "^helm:rdf:www\\.cs\\.unibo\\.it/helm/rdf/" in match classs with | `Forward -> (fun uri -> Pcre.pmatch ~pat:(base ^ "forward") uri) | `Backward -> (fun uri -> Pcre.pmatch ~pat:(base ^ "backward") uri) in return_uris (Lazy.force rdf_map) filter let (++) (oldann, oldtypes, oldbody, oldtree) (newann, newtypes, newbody, newtree) = ((if newann > oldann then newann else oldann), (if newtypes > oldtypes then newtypes else oldtypes), (if newbody > oldbody then newbody else oldbody), (if newtree > oldtree then newtree else oldtree)) let (types_RE, types_ann_RE, body_RE, body_ann_RE, proof_tree_RE, proof_tree_ann_RE, trailing_slash_RE, theory_RE) = (Pcre.regexp "\\.types$", Pcre.regexp "\\.types\\.ann$", Pcre.regexp "\\.body$", Pcre.regexp "\\.body\\.ann$", Pcre.regexp "\\.proof_tree$", Pcre.regexp "\\.proof_tree\\.ann$", Pcre.regexp "/$", Pcre.regexp "\\.theory$") let basepart_RE = Pcre.regexp "^([^.]*\\.[^.]*)((\\.body)|(\\.proof_tree)|(\\.types))?(\\.ann)?$" let (slash_RE, til_slash_RE, no_slashes_RE) = (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$") let ls regexp = if remote () then ls_remote regexp else begin let looking_for_dir = Pcre.pmatch ~rex:trailing_slash_RE regexp in let pat = Pcre.replace ~rex:trailing_slash_RE ("^" ^ regexp) in let (dir_RE, dir_local_RE, obj_RE, first_comp_RE) = Pcre.regexp (pat ^ "/"), Pcre.regexp "[^/]+/[^/]*", Pcre.regexp (pat ^ "(\\.|$)"), Pcre.regexp "/.*" in let toplevel_theory = match List.rev (Pcre.split ~rex:slash_RE pat) with | dir :: _ -> Some (dir ^ ".theory") | _ -> None in let dirs = ref StringSet.empty in let objs = Hashtbl.create 17 in let store_dir d = dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs in let store_obj o = let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in let no_flags = false, No, No, No in let oldflags = try Hashtbl.find objs basepart with Not_found -> (* no ann, no types, no body, no proof tree *) no_flags in let newflags = match o with | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No, No) | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No, No) | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes, No) | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann, No) | s when Pcre.pmatch ~rex:proof_tree_RE s -> (false, No, No, Yes) | s when Pcre.pmatch ~rex:proof_tree_ann_RE s -> (true, No, No, Ann) | s -> no_flags in Hashtbl.replace objs basepart (oldflags ++ newflags) in (* Variables used in backward compatibility code to map theory:/path/t.theory into theory:/path/t/index.theory when cic:/path/t/ exists *) let the_candidate_for_remapping = (* CSC: Here I am making a strong assumption: the pattern can be only of the form [^:]*:/path where path is NOT a regular expression *) "theory:" ^ Pcre.replace ~rex:(Pcre.regexp "[^:]*:") pat in let index_not_generated_yet = ref true in let valid_candidates = ref [] in let candidates_found = ref [] in (*(Lazy.force cic_map) # iter*) (* depending on prefetch *) let if_prefetch if_is if_not = if is_prefetch_on() then if_is else if_not in let iter_on_right_thing = if_prefetch (fun f -> List.iter (fun k -> f k "") (Tree.ls_path regexp (deref_if_some uri_tree))) (fun f -> (Lazy.force cic_map) # iter f) in let calculate_localpart = if_prefetch (fun uri -> uri) (fun uri -> Pcre.replace ~rex:dir_RE uri) in let check_if_x_RE = if_prefetch (fun x_RE uri -> true) (fun x_RE uri -> Pcre.pmatch ~rex:x_RE uri) in let check_if_dir_RE = check_if_x_RE dir_RE in let check_if_obj_RE = check_if_x_RE obj_RE in iter_on_right_thing (fun key _ -> (* we work in two ways: * 1 iteration on the whole map * 2 tree visit * * Since in the first case 'key' is a complete uri, while * in the second case it is only the subtree rooted in the * query regex, we must relay only on the localpath. * * example: * query::= cic:/aaa/bbb/ * * answer1 ::= the whole map * * aswer2 ::= [ "ccc/"; "c1.something"] where * cic:/aaa/bbb/ccc/ and cic:/aaa/bbb/c1.something * are the (partials) uri that matched query * * after the localpath extracion we have more uris in the first case, * but at least the are all rooted in the same node. * * the Tree.get_frontier may be changed to return the same stuff as * the map iteration+localpath extraction does, but I hope it is not * necessary *) match key with | uri when looking_for_dir && check_if_dir_RE uri -> (* directory hit *) let localpart = calculate_localpart uri in if Pcre.pmatch ~rex:no_slashes_RE localpart then begin (* Backward compatibility code to map theory:/path/t.theory into theory:/path/t/index.theory when cic:/path/t/ exists *) if Pcre.pmatch ~rex:theory_RE localpart then candidates_found := localpart :: !candidates_found else store_obj localpart end else begin store_dir localpart ; if Pcre.pmatch localpart ~rex:dir_local_RE then begin let valid = Pcre.replace ~rex:first_comp_RE localpart ^ ".theory" in if not (List.mem valid !valid_candidates) then valid_candidates := valid::!valid_candidates end end | uri when (not looking_for_dir) && check_if_obj_RE uri -> (* file hit *) store_obj (Pcre.replace ~rex:til_slash_RE uri) | uri -> ()); (* (* miss *) if !index_not_generated_yet && Pcre.pmatch ~rex:orig_theory_RE uri then (index_not_generated_yet := false ; store_obj "index.theory")); *) store_obj "index.theory"; List.iter (fun localpart -> if not (List.mem localpart !valid_candidates) then store_obj localpart ) !candidates_found ; let ls_items = ref [] in StringSet.iter (fun dir -> ls_items := Ls_section dir :: !ls_items) !dirs; Http_getter_misc.hashtbl_sorted_iter (fun uri (annflag, typesflag, bodyflag, treeflag) -> ls_items := Ls_object { uri = uri; ann = annflag; types = typesflag; body = bodyflag; proof_tree = treeflag } :: !ls_items) objs; List.rev !ls_items end (* Shorthands from now on *) let getxml' uri = getxml (UriManager.string_of_uri uri) let resolve' uri = resolve (UriManager.string_of_uri uri) let register' uri url = register ~uri:(UriManager.string_of_uri uri) ~url let unregister' uri = unregister (UriManager.string_of_uri uri) let sync_dump_file () = if is_prefetch_on () then dump_tree () let init () = Http_getter_logger.set_log_level (Helm_registry.get_opt_default Helm_registry.get_int 1 "getter.log_level"); Http_getter_logger.set_log_file (Helm_registry.get_opt Helm_registry.get_string "getter.log_file"); Http_getter_env.reload (); let is_prefetch_set = Helm_registry.get_opt_default Helm_registry.get_bool false "getter.prefetch" in if is_prefetch_set then ignore (Thread.create sync_with_map ())