X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;ds=sidebyside;f=helm%2Fhttp_getter%2Fhttp_getter.ml;h=32c4882dfd09f58a5ec4120c8129a51937b282d7;hb=3f81a72dbdc7f2ffb17cc1ecd4e6a39270891c77;hp=982544bce69e606545660e839f7d539b471fb7ac;hpb=1e34c17c30ee6c992f3759bd0f0d5012e6803092;p=helm.git diff --git a/helm/http_getter/http_getter.ml b/helm/http_getter/http_getter.ml index 982544bce..32c4882df 100644 --- a/helm/http_getter/http_getter.ml +++ b/helm/http_getter/http_getter.ml @@ -1,5 +1,7 @@ (* - * Copyright (C) 2003, HELM Team. + * Copyright (C) 2003-2004: + * Stefano Zacchiroli + * for the HELM Team http://helm.cs.unibo.it/ * * This file is part of HELM, an Hypertextual, Electronic * Library of Mathematics, developed at the Computer Science @@ -21,219 +23,43 @@ * MA 02111-1307, USA. * * For details, see the HELM World-Wide-Web page, - * http://cs.unibo.it/helm/. + * http://helm.cs.unibo.it/ *) -open Http_getter_common;; -open Http_getter_misc;; -open Http_getter_types;; -open Http_getter_debugger;; -open Printf;; +open Printf - (* constants *) +open Http_getter_common +open Http_getter_misc +open Http_getter_debugger +open Http_getter_types -let common_headers = [ - "Cache-Control", "no-cache"; - "Pragma", "no-cache"; - "Expires", "0" -] - - (* HTTP queries argument parsing *) - -let parse_enc (req: Http_types.request) = - try - (match req#param "format" with - | "normal" -> Enc_normal - | "gz" -> Enc_gzipped - | s -> raise (Http_getter_bad_request ("Invalid format: " ^ s))) - with Http_types.Param_not_found _ -> Enc_normal -;; -let parse_patch_dtd (req: Http_types.request) = - match req#param "patch_dtd" with - | s when String.lowercase s = "yes" -> true - | s when String.lowercase s = "no" -> false - | s -> raise (Http_getter_bad_request ("Invalid patch_dtd value: " ^ s)) -;; -let parse_output_format (req: Http_types.request) = - match req#param "format" with - | s when String.lowercase s = "txt" -> Fmt_text - | s when String.lowercase s = "xml" -> Fmt_xml - | s -> raise (Http_getter_bad_request ("Invalid /ls format: " ^ s)) -;; -let parse_ls_uri = - let parse_ls_RE = Pcre.regexp "^(\\w+):(.*)$" in - let trailing_slash_RE = Pcre.regexp "/+$" in - fun (req: Http_types.request) -> - let baseuri = req#param "baseuri" in - let subs = - Pcre.extract ~rex:parse_ls_RE - (Pcre.replace ~rex:trailing_slash_RE baseuri) - in - match (subs.(1), subs.(2)) with - | "cic", uri -> Cic uri - | "theory", uri -> Theory uri - | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri)) -;; +let (index_line_sep_RE, index_sep_RE, trailing_types_RE, + heading_cic_RE, heading_theory_RE, heading_nuprl_RE, + heading_rdf_cic_RE, heading_rdf_theory_RE) = + (Pcre.regexp "[ \t]+", Pcre.regexp "\r\n|\r|\n", + Pcre.regexp "\\.types$", + Pcre.regexp "^cic:", Pcre.regexp "^theory:", Pcre.regexp "^nuprl:", + Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:") (* global maps, shared by all threads *) -let xml_map = new Http_getter_map.map Http_getter_env.xml_dbm in -let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm in -let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm in +let cic_map = new Http_getter_map.map Http_getter_env.cic_dbm +let nuprl_map = new Http_getter_map.map Http_getter_env.nuprl_dbm +let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm +let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm + +let maps = [ cic_map; nuprl_map; rdf_map; xsl_map ] +let close_maps () = List.iter (fun m -> m#close) maps +let clear_maps () = List.iter (fun m -> m#clear) maps +let sync_maps () = List.iter (fun m -> m#sync) maps -let save_maps () = xml_map#close; rdf_map#close; xsl_map#close in let map_of_uri = function - | uri when is_xml_uri uri -> xml_map + | uri when is_cic_uri uri -> cic_map + | uri when is_nuprl_uri uri -> nuprl_map | uri when is_rdf_uri uri -> rdf_map | uri when is_xsl_uri uri -> xsl_map - | uri -> raise (Http_getter_unresolvable_URI uri) -in -let resolve uri = (map_of_uri uri)#resolve uri in -let register uri = (map_of_uri uri )#add uri in -let return_all_foo_uris map doctype filter outchan = - (** return all URIs contained in 'map' which satisfy predicate 'filter'; URIs - are written in an XMLish format ('doctype' is the XML doctype) onto 'outchan' - *) - Http_daemon.send_basic_headers ~code:200 outchan; - Http_daemon.send_header "Content-Type" "text/xml" outchan; - Http_daemon.send_headers common_headers outchan; - Http_daemon.send_CRLF outchan; - output_string - outchan - (sprintf -" - - -<%s> -" - doctype - Http_getter_env.my_own_url - doctype - doctype); - map#iter - (fun uri _ -> - if filter uri then - output_string outchan (sprintf "\t\n" uri)); - output_string outchan (sprintf "\n" doctype) -in -let return_all_xml_uris = return_all_foo_uris xml_map "alluris" in -let return_all_rdf_uris = return_all_foo_uris rdf_map "allrdfuris" in -let return_ls = - let (++) (oldann, oldtypes, oldbody) (newann, newtypes, newbody) = - ((if newann > oldann then newann else oldann), - (if newtypes > oldtypes then newtypes else oldtypes), - (if newbody > oldbody then newbody else oldbody)) - in - let basepart_RE = - Pcre.regexp "^([^.]*\\.[^.]*)((\\.body)|(\\.types))?(\\.ann)?" - in - let (types_RE, types_ann_RE, body_RE, body_ann_RE) = - (Pcre.regexp "\\.types", Pcre.regexp "\\.types.ann", - Pcre.regexp "\\.body", Pcre.regexp "\\.body.ann") - in - let (slash_RE, til_slash_RE, no_slashes_RE) = - (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$") - in - fun lsuri fmt outchan -> - let pat = - "^" ^ - (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p)) - in - let (dir_RE, obj_RE) = - (Pcre.regexp (pat ^ "/"), Pcre.regexp (pat ^ "(\\.|$)")) - in - let dirs = ref StringSet.empty in - let objs = Hashtbl.create 17 in - let store_dir d = - dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs - in - let store_obj o = - let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in - let oldflags = - try - Hashtbl.find objs basepart - with Not_found -> (false, No, No) (* no ann, no types no body *) - in - let newflags = - match o with - | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No) - | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No) - | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes) - | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann) - | s -> (false, No, No) - in - Hashtbl.replace objs basepart (oldflags ++ newflags) - in - xml_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *) - (fun key _ -> - match key with - | uri when Pcre.pmatch ~rex:dir_RE uri -> (* directory hit *) - let localpart = Pcre.replace ~rex:dir_RE uri in - if Pcre.pmatch ~rex:no_slashes_RE localpart then - store_obj localpart - else - store_dir localpart - | uri when Pcre.pmatch ~rex:obj_RE uri -> (* file hit *) - store_obj (Pcre.replace ~rex:til_slash_RE uri) - | uri -> () (* miss *)); - match fmt with - | Fmt_text -> - let body = - (List.fold_left - (fun s d -> sprintf "%sdir, %s\n" s d) "" - (StringSet.elements !dirs)) ^ - (Http_getter_misc.hashtbl_sorted_fold - (fun uri (annflag, typesflag, bodyflag) cont -> - sprintf "%sobject, %s, <%s,%s,%s>\n" - cont uri (if annflag then "YES" else "NO") - (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag)) - objs "") - in - Http_daemon.respond - ~headers:(("Content-Type", "text/plain") :: common_headers) - ~body outchan - | Fmt_xml -> - let body = - sprintf -" - + | uri -> raise (Unresolvable_URI uri) - -%s - -" - Http_getter_env.my_own_url - ("\n" ^ - (String.concat - "\n" - (List.map - (fun d -> "
" ^ d ^ "
") - (StringSet.elements !dirs))) ^ "\n" ^ - (Http_getter_misc.hashtbl_sorted_fold - (fun uri (annflag, typesflag, bodyflag) cont -> - sprintf -"%s -\t -\t -\t - -" - cont uri (if annflag then "YES" else "NO") - (string_of_ls_flag typesflag) - (string_of_ls_flag bodyflag)) - objs "")) - in - Http_daemon.respond - ~headers:(("Content-Type", "text/xml") :: common_headers) - ~body outchan -in -let (index_line_sep_RE, index_sep_RE, trailing_types_RE, - heading_cic_RE, heading_theory_RE, - heading_rdf_cic_RE, heading_rdf_theory_RE) = - (Pcre.regexp "[ \t]+", Pcre.regexp "\n+", Pcre.regexp "\\.types$", - Pcre.regexp "^cic:", Pcre.regexp "^theory:", - Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:") -in let update_from_server logmsg server_url = (* use global maps *) debug_print ("Updating information from " ^ server_url); let xml_url_of_uri = function @@ -242,16 +68,18 @@ let update_from_server logmsg server_url = (* use global maps *) Pcre.replace ~rex:heading_cic_RE ~templ:server_url uri | uri when (Pcre.pmatch ~rex:heading_theory_RE uri) -> Pcre.replace ~rex:heading_theory_RE ~templ:server_url uri - | uri -> raise (Http_getter_invalid_URI uri) + | uri when (Pcre.pmatch ~rex:heading_nuprl_RE uri) -> + Pcre.replace ~rex:heading_nuprl_RE ~templ:server_url uri + | uri -> raise (Invalid_URI uri) in let rdf_url_of_uri = function (* TODO as above *) | uri when (Pcre.pmatch ~rex:heading_rdf_cic_RE uri) -> Pcre.replace ~rex:heading_rdf_cic_RE ~templ:server_url uri | uri when (Pcre.pmatch ~rex:heading_rdf_theory_RE uri) -> Pcre.replace ~rex:heading_rdf_theory_RE ~templ:server_url uri - | uri -> raise (Http_getter_invalid_URI uri) + | uri -> raise (Invalid_URI uri) in - let log = ref (logmsg ^ "Processing server: " ^ server_url ^ "
\n") in + let log = ref (`T ("Processing server: " ^ server_url) :: logmsg) in let (xml_index, rdf_index, xsl_index) = (* TODO keeps index in memory, is better to keep them on temp files? *) (http_get (server_url ^ "/" ^ Http_getter_env.xml_index), @@ -262,153 +90,240 @@ let update_from_server logmsg server_url = (* use global maps *) debug_print (sprintf "Warning: useless server %s" server_url); (match xml_index with | Some xml_index -> - (log := !log ^ "Updating XML db ...
\n"; + (log := `T "Updating XML db ...
" :: !log; List.iter - (fun l -> - try - (match Pcre.split ~rex:index_line_sep_RE l with - | [uri; "gz"] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml.gz") - | [uri] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml") - | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") - with Http_getter_invalid_URI uri -> - log := !log ^ "Ignoring invalid XML URI: " ^ uri ^ "
\n") - (Pcre.split ~rex:index_sep_RE xml_index)) (* xml_index lines *) + (function + | l when is_blank_line l -> () (* skip blank and commented lines *) + | l -> + (try + (match Pcre.split ~rex:index_line_sep_RE l with + | [uri; "gz"] -> + assert (is_cic_uri uri || is_nuprl_uri uri) ; + (map_of_uri uri)#replace + uri ((xml_url_of_uri uri) ^ ".xml.gz") + | [uri] -> + assert (is_cic_uri uri || is_nuprl_uri uri) ; + (map_of_uri uri)#replace + uri ((xml_url_of_uri uri) ^ ".xml") + | _ -> log := `T ("Ignoring invalid line: '" ^ l) :: !log) + with Invalid_URI uri -> + log := `T ("Ignoring invalid XML URI: '" ^ l) :: !log)) + (Pcre.split ~rex:index_sep_RE xml_index); (* xml_index lines *) + log := `T "All done" :: !log) | None -> ()); (match rdf_index with | Some rdf_index -> - (log := !log ^ "Updating RDF db ...
\n"; + (log := `T "Updating RDF db ..." :: !log; List.iter (fun l -> try (match Pcre.split ~rex:index_line_sep_RE l with - | [uri; "gz"] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml.gz") - | [uri] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml") - | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") - with Http_getter_invalid_URI uri -> - log := !log ^ "Ignoring invalid RDF URI: " ^ uri ^ "
\n") - (Pcre.split ~rex:index_sep_RE rdf_index)) (* rdf_index lines *) + | [uri; "gz"] -> + rdf_map#replace uri + ((rdf_url_of_uri uri) ^ ".xml.gz") + | [uri] -> + rdf_map#replace uri ((rdf_url_of_uri uri) ^ ".xml") + | _ -> log := `T ("Ignoring invalid line: '" ^ l) :: !log) + with Invalid_URI uri -> + log := `T ("Ignoring invalid RDF URI: '" ^ l) :: !log) + (Pcre.split ~rex:index_sep_RE rdf_index); (* rdf_index lines *) + log := `T "All done" :: !log) | None -> ()); (match xsl_index with | Some xsl_index -> - (log := !log ^ "Updating XSLT db ...
\n"; + (log := `T "Updating XSLT db ..." :: !log; List.iter - (fun l -> xsl_map#add l (server_url ^ "/" ^ l)) + (fun l -> xsl_map#replace l (server_url ^ "/" ^ l)) (Pcre.split ~rex:index_sep_RE xsl_index); - log := !log ^ "All done!
\n") + log := `T "All done" :: !log) | None -> ()); debug_print "done with this server"; !log -in - (* thread action *) +let update_from_all_servers () = (* use global maps *) + clear_maps (); + let log = + List.fold_left + update_from_server + [] (* initial logmsg: empty *) + (* reverse order: 1st server is the most important one *) + (List.map snd (List.rev (Http_getter_env.servers ()))) + in + sync_maps (); + `Msg (`L (List.rev log)) -let callback (req: Http_types.request) outchan = - try - debug_print ("Connection from " ^ req#clientAddr); - debug_print ("Received request: " ^ req#path); - (match req#path with - | "/help" -> return_html_msg Http_getter_const.usage_string outchan - | "/getxml" | "/getxslt" | "/getdtd" | "/resolve" | "/register" -> - (let uri = req#param "uri" in (* common parameter *) - match req#path with - | "/getxml" -> - let enc = parse_enc req in - let patch_dtd = - try parse_patch_dtd req with Http_types.Param_not_found _ -> true - in - Http_getter_cache.respond_xml - ~url:(resolve uri) ~uri ~enc ~patch_dtd outchan - | "/getxslt" -> - let patch_dtd = - try parse_patch_dtd req with Http_types.Param_not_found _ -> true - in - Http_getter_cache.respond_xsl ~url:(resolve uri) ~patch_dtd outchan - | "/getdtd" -> - let patch_dtd = - try parse_patch_dtd req with Http_types.Param_not_found _ -> true - in - Http_getter_cache.respond_dtd - ~patch_dtd ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan - | "/resolve" -> - (try - return_xml_msg - (sprintf "\n" (resolve uri)) - outchan - with Http_getter_unresolvable_URI uri -> - return_xml_msg "\n" outchan) - | "/register" -> - let url = req#param "url" in - register uri url; - return_html_msg "Register done" outchan - | _ -> assert false) - | "/update" -> - (xml_map#clear; rdf_map#clear; xsl_map#clear; - let log = - List.fold_left - update_from_server - "" (* initial logmsg: empty *) - (* reverse order: 1st server is the most important one *) - (List.rev Http_getter_env.servers) - in - xml_map#sync; rdf_map#sync; xsl_map#sync; - return_html_msg log outchan) - | "/getalluris" -> - return_all_xml_uris - (fun uri -> - (Pcre.pmatch ~rex:heading_cic_RE uri) && - not (Pcre.pmatch ~rex:trailing_types_RE uri)) - outchan - | "/getallrdfuris" -> - (let classs = req#param "class" in - try - let filter = - let base = "^helm:rdf:www\\.cs\\.unibo\\.it/helm/rdf/" in - match classs with - | ("forward" as c) | ("backward" as c) -> - (fun uri -> Pcre.pmatch ~pat:(base ^ c) uri) - | c -> raise (Http_getter_invalid_RDF_class c) - in - return_all_rdf_uris filter outchan - with Http_getter_invalid_RDF_class c -> - raise (Http_getter_bad_request ("Invalid RDF class: " ^ c))) - | "/ls" -> return_ls (parse_ls_uri req) (parse_output_format req) outchan - | "/getempty" -> - Http_daemon.respond ~body:Http_getter_const.empty_xml outchan - | invalid_request -> - Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan); - debug_print "Done!\n" - with - | Http_types.Param_not_found attr_name -> - return_400 (sprintf "Parameter '%s' is missing" attr_name) outchan - | Http_getter_bad_request msg -> return_html_error msg outchan - | Http_getter_internal_error msg -> return_html_internal_error msg outchan - | Shell.Subprocess_error l -> - return_html_internal_error - (String.concat "
\n" - (List.map - (fun (cmd, code) -> - sprintf "Command '%s' returned %s" - cmd (string_of_proc_status code)) - l)) - outchan - | exc -> - return_html_error - ("Uncaught exception: " ^ (Printexc.to_string exc)) - outchan -in +let update_from_one_server server_url = + let log = update_from_server [] server_url in + `Msg (`L (List.rev log)) - (* daemon initialization *) +let temp_file_of_uri uri = + let flat_string s s' c = + let cs = String.copy s in + for i = 0 to (String.length s) - 1 do + if String.contains s' s.[i] then cs.[i] <- c + done; + cs + in + let user = try Unix.getlogin () with _ -> "" in + Filename.open_temp_file (user ^ flat_string uri ".-=:;!?/&" '_') "" + +(* API *) + +let help () = Http_getter_const.usage_string (Http_getter_env.env_to_string ()) -let main () = - Http_getter_env.dump_env (); - Unix.putenv "http_proxy" ""; - at_exit save_maps; - Sys.catch_break true; +let resolve uri = try - Http_daemon.start' - ~timeout:(Some 600) ~port:Http_getter_env.port ~mode:`Thread callback - with Sys.Break -> () (* 'save_maps' already registered with 'at_exit' *) -in + (map_of_uri uri)#resolve uri + with Http_getter_map.Key_not_found _ -> raise (Unresolvable_URI uri) + + (* Warning: this fail if uri is already registered *) +let register ~uri ~url = (map_of_uri uri)#add uri url + +let update () = update_from_all_servers () + +let getxml ?(format = Enc_normal) ?(patch_dtd = true) uri = + let url = resolve uri in + let (fname, outchan) = temp_file_of_uri uri in + Http_getter_cache.respond_xml ~uri ~url ~enc:format ~patch:patch_dtd outchan; + close_out outchan; + fname + +let getxslt ?(patch_dtd = true) uri = + let url = resolve uri in + let (fname, outchan) = temp_file_of_uri uri in + Http_getter_cache.respond_xsl ~url ~patch:patch_dtd outchan; + close_out outchan; + fname + +let getdtd ?(patch_dtd = true) uri = + let url = Http_getter_env.dtd_dir ^ "/" ^ uri in + let (fname, outchan) = temp_file_of_uri uri in + Http_getter_cache.respond_dtd ~url ~patch:patch_dtd outchan; + close_out outchan; + fname + +let clean_cache () = Http_getter_cache.clean () + +let list_servers () = Http_getter_env.servers () + +let add_server ?(position = 0) name = + if position = 0 then begin + Http_getter_env.add_server ~position:0 name; + update_from_one_server name (* quick update (new server only) *) + end else if position > 0 then begin + Http_getter_env.add_server ~position name; + update () + end else (* already checked bt parse_position *) + assert false + +let remove_server position = + let server_name = + try + List.assoc position (Http_getter_env.servers ()) + with Not_found -> + raise (Invalid_argument (sprintf "no server with position %d" position)) + in + Http_getter_env.remove_server position; + update () + +let return_uris map filter = + let uris = ref [] in + map#iter (fun uri _ -> if filter uri then uris := uri :: !uris); + List.rev !uris + +let getalluris () = + let filter uri = + (Pcre.pmatch ~rex:heading_cic_RE uri) && + not (Pcre.pmatch ~rex:trailing_types_RE uri) + in + return_uris cic_map filter -main () +let getallrdfuris classs = + let filter = + let base = "^helm:rdf:www\\.cs\\.unibo\\.it/helm/rdf/" in + match classs with + | `Forward -> (fun uri -> Pcre.pmatch ~pat:(base ^ "forward") uri) + | `Backward -> (fun uri -> Pcre.pmatch ~pat:(base ^ "backward") uri) + in + return_uris rdf_map filter + +let ls = + let (++) (oldann, oldtypes, oldbody, oldtree) + (newann, newtypes, newbody, newtree) = + ((if newann > oldann then newann else oldann), + (if newtypes > oldtypes then newtypes else oldtypes), + (if newbody > oldbody then newbody else oldbody), + (if newtree > oldtree then newtree else oldtree)) + in + let basepart_RE = + Pcre.regexp + "^([^.]*\\.[^.]*)((\\.body)|(\\.proof_tree)|(\\.types))?(\\.ann)?$" + in + let (types_RE, types_ann_RE, body_RE, body_ann_RE, + proof_tree_RE, proof_tree_ann_RE) = + (Pcre.regexp "\\.types$", Pcre.regexp "\\.types\\.ann$", + Pcre.regexp "\\.body$", Pcre.regexp "\\.body\\.ann$", + Pcre.regexp "\\.proof_tree$", Pcre.regexp "\\.proof_tree\\.ann$") + in + let (slash_RE, til_slash_RE, no_slashes_RE) = + (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$") + in + fun lsuri -> + let pat = + "^" ^ + (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p)) + in + let (dir_RE, obj_RE) = + (Pcre.regexp (pat ^ "/"), Pcre.regexp (pat ^ "(\\.|$)")) + in + let dirs = ref StringSet.empty in + let objs = Hashtbl.create 17 in + let store_dir d = + dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs + in + let store_obj o = + let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in + let no_flags = false, No, No, No in + let oldflags = + try + Hashtbl.find objs basepart + with Not_found -> (* no ann, no types, no body, no proof tree *) + no_flags + in + let newflags = + match o with + | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No, No) + | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No, No) + | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes, No) + | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann, No) + | s when Pcre.pmatch ~rex:proof_tree_RE s -> (false, No, No, Yes) + | s when Pcre.pmatch ~rex:proof_tree_ann_RE s -> (true, No, No, Ann) + | s -> no_flags + in + Hashtbl.replace objs basepart (oldflags ++ newflags) + in + cic_map#iter + (* BLEARGH Dbm module lacks support for fold-like functions *) + (fun key _ -> + match key with + | uri when Pcre.pmatch ~rex:dir_RE uri -> (* directory hit *) + let localpart = Pcre.replace ~rex:dir_RE uri in + if Pcre.pmatch ~rex:no_slashes_RE localpart then + store_obj localpart + else + store_dir localpart + | uri when Pcre.pmatch ~rex:obj_RE uri -> (* file hit *) + store_obj (Pcre.replace ~rex:til_slash_RE uri) + | uri -> () (* miss *)); + let ls_items = ref [] in + StringSet.iter (fun dir -> ls_items := Ls_section dir :: !ls_items) !dirs; + Http_getter_misc.hashtbl_sorted_iter + (fun uri (annflag, typesflag, bodyflag, treeflag) -> + ls_items := + Ls_object { + uri = uri; ann = annflag; + types = typesflag; body = typesflag; proof_tree = treeflag + } :: !ls_items) + objs; + List.rev !ls_items