X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Fhttp_getter%2Fhttp_getter.ml;h=b5cfa47d9564561c09783dfb85b702ae280f7605;hb=85ccebb566c36671ca753debe09e6dd5c9dd0df7;hp=bb9c78e7efb738205a1f136c7b10ed4e52aab3bb;hpb=86e80bc65186bf4c2824dc94f5f4dd5966843f14;p=helm.git diff --git a/helm/http_getter/http_getter.ml b/helm/http_getter/http_getter.ml index bb9c78e7e..b5cfa47d9 100644 --- a/helm/http_getter/http_getter.ml +++ b/helm/http_getter/http_getter.ml @@ -1,5 +1,7 @@ (* - * Copyright (C) 2000, HELM Team. + * Copyright (C) 2003: + * Stefano Zacchiroli + * for the HELM Team http://helm.cs.unibo.it/ * * This file is part of HELM, an Hypertextual, Electronic * Library of Mathematics, developed at the Computer Science @@ -21,32 +23,27 @@ * MA 02111-1307, USA. * * For details, see the HELM World-Wide-Web page, - * http://cs.unibo.it/helm/. + * http://helm.cs.unibo.it/ *) -(* TODO optimization: precompile almost all regexp *) - open Http_getter_common;; +open Http_getter_misc;; open Http_getter_types;; +open Http_getter_debugger;; open Printf;; - (* debugging settings *) -let debug = true;; -let debug_print s = if debug then prerr_endline ("[HTTP-Getter] " ^ s);; -let http_debug = false;; -Http_common.debug := http_debug;; + (* constants *) -let http_get url = - debug_print ("Downloading URL: " ^ url); - try - Some (Http_client.Convenience.http_get url) - with Http_client.Http_error (code, _) -> - (debug_print - (sprintf "Failed to download %s, HTTP response was %d" url code); - None) -;; +let common_headers = [ + "Cache-Control", "no-cache"; + "Pragma", "no-cache"; + "Expires", "0" +] -let parse_format (req: Http_types.request) = + (* HTTP queries argument parsing *) + + (* parse encoding ("format" parameter), default is Enc_normal *) +let parse_enc (req: Http_types.request) = try (match req#param "format" with | "normal" -> Enc_normal @@ -54,7 +51,8 @@ let parse_format (req: Http_types.request) = | s -> raise (Http_getter_bad_request ("Invalid format: " ^ s))) with Http_types.Param_not_found _ -> Enc_normal ;; -let parse_patch_dtd (req: Http_types.request) = + (* parse "patch_dtd" parameter, default is true *) +let parse_patch (req: Http_types.request) = try (match req#param "patch_dtd" with | s when String.lowercase s = "yes" -> true @@ -62,46 +60,65 @@ let parse_patch_dtd (req: Http_types.request) = | s -> raise (Http_getter_bad_request ("Invalid patch_dtd value: " ^ s))) with Http_types.Param_not_found _ -> true ;; -let parse_ls_format (req: Http_types.request) = + (* parse output format ("format" parameter), no default value *) +let parse_output_format (req: Http_types.request) = match req#param "format" with | s when String.lowercase s = "txt" -> Fmt_text | s when String.lowercase s = "xml" -> Fmt_xml | s -> raise (Http_getter_bad_request ("Invalid /ls format: " ^ s)) ;; -let parse_ls_uri (req: Http_types.request) = - let baseuri = req#param "baseuri" in - let subs = - Pcre.extract ~pat:"^(\\w+):(.*)$" (Pcre.replace ~pat:"/+$" baseuri) - in - match (subs.(1), subs.(2)) with - | "cic", uri -> Cic uri - | "theory", uri -> Theory uri - | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri)) + (* parse "baseuri" format for /ls method, no default value *) +let parse_ls_uri = + let parse_ls_RE = Pcre.regexp "^(\\w+):(.*)$" in + let trailing_slash_RE = Pcre.regexp "/+$" in + fun (req: Http_types.request) -> + let baseuri = req#param "baseuri" in + let subs = + Pcre.extract ~rex:parse_ls_RE + (Pcre.replace ~rex:trailing_slash_RE baseuri) + in + match (subs.(1), subs.(2)) with + | "cic", uri -> Cic uri + | "theory", uri -> Theory uri + | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri)) ;; -let xml_map = new Http_getter_map.map Http_getter_env.xml_dbm in + (* global maps, shared by all threads *) + +let cic_map = new Http_getter_map.map Http_getter_env.cic_dbm in +let nuprl_map = new Http_getter_map.map Http_getter_env.nuprl_dbm in let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm in let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm in -let resolve uri = (* use global maps *) - let map = - match uri with - | uri when is_xml_uri uri -> xml_map - | uri when is_rdf_uri uri -> rdf_map - | uri when is_xsl_uri uri -> xsl_map - | uri -> raise (Http_getter_unresolvable_URI uri) - in - map#resolve uri +let save_maps () = + cic_map#close; nuprl_map#close; rdf_map#close; xsl_map#close in +let map_of_uri = function + | uri when is_cic_uri uri -> cic_map + | uri when is_nuprl_uri uri -> nuprl_map + | uri when is_rdf_uri uri -> rdf_map + | uri when is_xsl_uri uri -> xsl_map + | uri -> raise (Http_getter_unresolvable_URI uri) +in +let resolve uri = + try + (map_of_uri uri)#resolve uri + with Http_getter_map.Key_not_found _ -> + raise (Http_getter_unresolvable_URI uri) in +let register uri = (map_of_uri uri )#add uri in let return_all_foo_uris map doctype filter outchan = - Http_daemon.send_basic_headers outchan; + (** return all URIs contained in 'map' which satisfy predicate 'filter'; URIs + are written in an XMLish format ('doctype' is the XML doctype) onto 'outchan' + *) + Http_daemon.send_basic_headers ~code:200 outchan; Http_daemon.send_header "Content-Type" "text/xml" outchan; + Http_daemon.send_headers common_headers outchan; Http_daemon.send_CRLF outchan; output_string outchan (sprintf " -; + <%s> " @@ -115,111 +132,145 @@ let return_all_foo_uris map doctype filter outchan = output_string outchan (sprintf "\t\n" uri)); output_string outchan (sprintf "\n" doctype) in -let return_all_uris = return_all_foo_uris xml_map "alluris" in +let return_all_xml_uris = return_all_foo_uris cic_map "alluris" in let return_all_rdf_uris = return_all_foo_uris rdf_map "allrdfuris" in let return_ls = let (++) (oldann, oldtypes, oldbody) (newann, newtypes, newbody) = ((if newann > oldann then newann else oldann), (if newtypes > oldtypes then newtypes else oldtypes), - (if newbody > oldbody then newbody else oldbody)) - in - let basepartRE = + (if newbody > oldbody then newbody else oldbody)) + in + let basepart_RE = Pcre.regexp "^([^.]*\\.[^.]*)((\\.body)|(\\.types))?(\\.ann)?" in - fun lsuri format outchan -> - let pat = - "^" ^ (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p)) + let (types_RE, types_ann_RE, body_RE, body_ann_RE) = + (Pcre.regexp "\\.types", Pcre.regexp "\\.types.ann", + Pcre.regexp "\\.body", Pcre.regexp "\\.body.ann") in - let dirs = ref [] in - let objs = Hashtbl.create 17 in - xml_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *) - (fun _ -> function - | uri when Pcre.pmatch ~pat:(pat ^ "/") uri -> (* directory hit *) - let dir = - List.hd (Pcre.split ~pat:"/" (Pcre.replace ~pat:(pat ^ "/") uri)) - in - dirs := dir :: !dirs - | uri when Pcre.pmatch ~pat:(pat ^ "(\\.|$)") uri -> (* object hit *) - let localpart = Pcre.replace ~pat:"^.*/" uri in - let basepart = Pcre.replace ~rex:basepartRE ~templ:"$1" localpart in - let oldflags = - try - Hashtbl.find objs basepart - with Not_found -> (false, No, No) (* no ann, no types no body *) - in - let newflags = - match localpart with - | s when Pcre.pmatch ~pat:"\\.types" s -> (false, Yes, No) - | s when Pcre.pmatch ~pat:"\\.types.ann" s -> (true, Ann, No) - | s when Pcre.pmatch ~pat:"\\.body" s -> (false, No, Yes) - | s when Pcre.pmatch ~pat:"\\.body.ann" s -> (true, No, Ann) - | s -> - raise - (Http_getter_internal_error ("Invalid /ls localpart: " ^ s)) - in - Hashtbl.replace objs basepart (oldflags ++ newflags) - | _ -> () (* miss *)); - match format with - | Fmt_text -> - let body = - "dir, " ^ (String.concat "\ndir, " (List.sort compare !dirs)) ^ "\n" ^ - (Http_getter_misc.hashtbl_sorted_fold - (fun uri (annflag, typesflag, bodyflag) cont -> - sprintf - "%sobject, %s, <%s,%s,%s>\n" - cont uri (if annflag then "YES" else "NO") - (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag)) - objs "") - in Http_daemon.respond ~headers:["Content-Type", "text/txt"] ~body outchan - | Fmt_xml -> - let body = - sprintf + let (slash_RE, til_slash_RE, no_slashes_RE) = + (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$") + in + fun lsuri fmt outchan -> + let pat = + "^" ^ + (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p)) + in + let (dir_RE, obj_RE) = + (Pcre.regexp (pat ^ "/"), Pcre.regexp (pat ^ "(\\.|$)")) + in + let dirs = ref StringSet.empty in + let objs = Hashtbl.create 17 in + let store_dir d = + dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs + in + let store_obj o = + let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in + let oldflags = + try + Hashtbl.find objs basepart + with Not_found -> (false, No, No) (* no ann, no types, no body *) + in + let newflags = + match o with + | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No) + | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No) + | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes) + | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann) + | s -> (false, No, No) + in + Hashtbl.replace objs basepart (oldflags ++ newflags) + in + cic_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *) + (fun key _ -> + match key with + | uri when Pcre.pmatch ~rex:dir_RE uri -> (* directory hit *) + let localpart = Pcre.replace ~rex:dir_RE uri in + if Pcre.pmatch ~rex:no_slashes_RE localpart then + store_obj localpart + else + store_dir localpart + | uri when Pcre.pmatch ~rex:obj_RE uri -> (* file hit *) + store_obj (Pcre.replace ~rex:til_slash_RE uri) + | uri -> () (* miss *)); + match fmt with + | Fmt_text -> + let body = + (List.fold_left + (fun s d -> sprintf "%sdir, %s\n" s d) "" + (StringSet.elements !dirs)) ^ + (Http_getter_misc.hashtbl_sorted_fold + (fun uri (annflag, typesflag, bodyflag) cont -> + sprintf "%sobject, %s, <%s,%s,%s>\n" + cont uri (if annflag then "YES" else "NO") + (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag)) + objs "") + in + Http_daemon.respond + ~headers:(("Content-Type", "text/plain") :: common_headers) + ~body outchan + | Fmt_xml -> + let body = + sprintf " - %s " - Http_getter_env.my_own_url - ("\n" ^ - (String.concat - "\n" - (List.map - (fun d -> "
" ^ d ^ "
") - (List.sort compare !dirs))) ^ "\n" ^ - (Http_getter_misc.hashtbl_sorted_fold - (fun uri (annflag, typesflag, bodyflag) cont -> - sprintf + Http_getter_env.my_own_url + ("\n" ^ + (String.concat + "\n" + (List.map + (fun d -> "
" ^ d ^ "
") + (StringSet.elements !dirs))) ^ "\n" ^ + (Http_getter_misc.hashtbl_sorted_fold + (fun uri (annflag, typesflag, bodyflag) cont -> + sprintf "%s -\t -\t -\t +\t +\t +\t " - cont uri (if annflag then "YES" else "NO") - (string_of_ls_flag typesflag) - (string_of_ls_flag bodyflag)) - objs "")) - in Http_daemon.respond ~headers:["Content-Type", "text/xml"] ~body outchan + cont uri (if annflag then "YES" else "NO") + (string_of_ls_flag typesflag) + (string_of_ls_flag bodyflag)) + objs "")) + in + Http_daemon.respond + ~headers:(("Content-Type", "text/xml") :: common_headers) + ~body outchan +in +let (index_line_sep_RE, index_sep_RE, trailing_types_RE, + heading_cic_RE, heading_theory_RE, heading_nuprl_RE, + heading_rdf_cic_RE, heading_rdf_theory_RE) = + (Pcre.regexp "[ \t]+", Pcre.regexp "\r\n|\r|\n", + Pcre.regexp "\\.types$", + Pcre.regexp "^cic:", Pcre.regexp "^theory:", Pcre.regexp "^nuprl:", + Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:") in let update_from_server logmsg server_url = (* use global maps *) + debug_print ("Updating information from " ^ server_url); let xml_url_of_uri = function (* TODO missing sanity checks on server_url, e.g. it can contains $1 *) - | uri when (Pcre.pmatch ~pat:"^cic://" uri) -> - Pcre.replace ~pat:"^cic://" ~templ:server_url uri - | uri when (Pcre.pmatch ~pat:"^theory://" uri) -> - Pcre.replace ~pat:"^theory://" ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_cic_RE uri) -> + Pcre.replace ~rex:heading_cic_RE ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_theory_RE uri) -> + Pcre.replace ~rex:heading_theory_RE ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_nuprl_RE uri) -> + Pcre.replace ~rex:heading_nuprl_RE ~templ:server_url uri | uri -> raise (Http_getter_invalid_URI uri) in let rdf_url_of_uri = function (* TODO as above *) - | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//cic:" uri) -> - Pcre.replace ~pat:"^helm:rdf.*//cic:" ~templ:server_url uri - | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//theory:" uri) -> - Pcre.replace ~pat:"^helm:rdf.*//theory:" ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_rdf_cic_RE uri) -> + Pcre.replace ~rex:heading_rdf_cic_RE ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_rdf_theory_RE uri) -> + Pcre.replace ~rex:heading_rdf_theory_RE ~templ:server_url uri | uri -> raise (Http_getter_invalid_URI uri) in - let log = ref ("Processing server: " ^ server_url ^ "
\n") in + let log = ref (logmsg ^ "Processing server: " ^ server_url ^ "
\n") in let (xml_index, rdf_index, xsl_index) = (* TODO keeps index in memory, is better to keep them on temp files? *) (http_get (server_url ^ "/" ^ Http_getter_env.xml_index), @@ -232,15 +283,22 @@ let update_from_server logmsg server_url = (* use global maps *) | Some xml_index -> (log := !log ^ "Updating XML db ...
\n"; List.iter - (fun l -> - try - (match Pcre.split ~pat:"[ \\t]+" l with - | [uri; "gz"] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml.gz") - | [uri] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml") - | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") - with Http_getter_invalid_URI uri -> - log := !log ^ "Ignoring invalid XML URI: " ^ uri) - (Pcre.split ~pat:"\n+" xml_index)) (* xml_index lines *) + (function + | l when is_blank_line l -> () (* skip blank and commented lines *) + | l -> + try + (match Pcre.split ~rex:index_line_sep_RE l with + | [uri; "gz"] -> + assert (is_cic_uri uri || is_nuprl_uri uri) ; + (map_of_uri uri)#add uri ((xml_url_of_uri uri) ^ ".xml.gz") + | [uri] -> + assert (is_cic_uri uri || is_nuprl_uri uri) ; + (map_of_uri uri)#add uri ((xml_url_of_uri uri) ^ ".xml") + | _ -> + log := !log ^ "Ignoring invalid line: '" ^ l ^ "'
\n") + with Http_getter_invalid_URI uri -> + log := !log ^ "Ignoring invalid XML URI: '" ^ uri ^ "'
\n") + (Pcre.split ~rex:index_sep_RE xml_index)) (* xml_index lines *) | None -> ()); (match rdf_index with | Some rdf_index -> @@ -248,75 +306,78 @@ let update_from_server logmsg server_url = (* use global maps *) List.iter (fun l -> try - (match Pcre.split ~pat:"[ \\t]+" l with + (match Pcre.split ~rex:index_line_sep_RE l with | [uri; "gz"] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml.gz") | [uri] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml") | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") with Http_getter_invalid_URI uri -> - log := !log ^ "Ignoring invalid RDF URI: " ^ uri) - (Pcre.split ~pat:"\n+" rdf_index)) (* rdf_index lines *) + log := !log ^ "Ignoring invalid RDF URI: " ^ uri ^ "
\n") + (Pcre.split ~rex:index_sep_RE rdf_index)) (* rdf_index lines *) | None -> ()); (match xsl_index with | Some xsl_index -> (log := !log ^ "Updating XSLT db ...
\n"; List.iter (fun l -> xsl_map#add l (server_url ^ "/" ^ l)) - (Pcre.split ~pat:"\n+" xsl_index); + (Pcre.split ~rex:index_sep_RE xsl_index); log := !log ^ "All done!
\n") | None -> ()); + debug_print "done with this server"; !log in (* thread action *) + let callback (req: Http_types.request) outchan = try debug_print ("Connection from " ^ req#clientAddr); debug_print ("Received request: " ^ req#path); (match req#path with - | "/help" -> return_html_msg Http_getter_const.usage_string outchan + | "/help" -> return_html_raw Http_getter_const.usage_string outchan | "/getxml" | "/getxslt" | "/getdtd" | "/resolve" | "/register" -> (let uri = req#param "uri" in (* common parameter *) match req#path with | "/getxml" -> - let enc = parse_format req in - let patch_dtd = parse_patch_dtd req in + let enc = parse_enc req in + let patch = parse_patch req in Http_getter_cache.respond_xml - ~url:(resolve uri) ~uri ~enc ~patch_dtd outchan + ~url:(resolve uri) ~uri ~enc ~patch outchan | "/getxslt" -> -(* let patch_dtd = parse_patch_dtd req in *) - (* TODO add support and default value for patch_dtd *) - Http_getter_cache.respond_xsl ~url:(resolve uri) outchan + let patch = parse_patch req in + Http_getter_cache.respond_xsl ~url:(resolve uri) ~patch outchan | "/getdtd" -> - let patch_dtd = parse_patch_dtd req in + let patch = parse_patch req in Http_getter_cache.respond_dtd - ~patch_dtd ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan + ~patch ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan | "/resolve" -> (try - return_xml_msg + return_xml_raw (sprintf "\n" (resolve uri)) outchan with Http_getter_unresolvable_URI uri -> - return_xml_msg "\n" outchan) + return_xml_raw "\n" outchan) | "/register" -> let url = req#param "url" in - xml_map#add uri url; + register uri url; return_html_msg "Register done" outchan | _ -> assert false) | "/update" -> - (xml_map#clear; rdf_map#clear; xsl_map#clear; + (Http_getter_env.reload (); (* reload servers list from servers file *) + cic_map#clear; nuprl_map#clear; rdf_map#clear; xsl_map#clear; let log = List.fold_left update_from_server "" (* initial logmsg: empty *) (* reverse order: 1st server is the most important one *) - (List.rev Http_getter_env.servers) + (List.rev !Http_getter_env.servers) in + cic_map#sync; nuprl_map#sync; rdf_map#sync; xsl_map#sync; return_html_msg log outchan) | "/getalluris" -> - return_all_uris + return_all_xml_uris (fun uri -> - (Pcre.pmatch ~pat:"^cic:" uri) && - not (Pcre.pmatch ~pat:"\\.types$" uri)) + (Pcre.pmatch ~rex:heading_cic_RE uri) && + not (Pcre.pmatch ~rex:trailing_types_RE uri)) outchan | "/getallrdfuris" -> (let classs = req#param "class" in @@ -331,26 +392,44 @@ let callback (req: Http_types.request) outchan = return_all_rdf_uris filter outchan with Http_getter_invalid_RDF_class c -> raise (Http_getter_bad_request ("Invalid RDF class: " ^ c))) - | "/ls" -> return_ls (parse_ls_uri req) (parse_ls_format req) outchan + | "/ls" -> return_ls (parse_ls_uri req) (parse_output_format req) outchan | "/getempty" -> Http_daemon.respond ~body:Http_getter_const.empty_xml outchan | invalid_request -> - Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan) + Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan); + debug_print "Done!\n" with | Http_types.Param_not_found attr_name -> return_400 (sprintf "Parameter '%s' is missing" attr_name) outchan | Http_getter_bad_request msg -> return_html_error msg outchan | Http_getter_internal_error msg -> return_html_internal_error msg outchan + | Shell.Subprocess_error l -> + return_html_internal_error + (String.concat "
\n" + (List.map + (fun (cmd, code) -> + sprintf "Command '%s' returned %s" + cmd (string_of_proc_status code)) + l)) + outchan | exc -> return_html_error ("Uncaught exception: " ^ (Printexc.to_string exc)) outchan in - (* daemon initialization *) -Http_getter_env.dump_env (); -flush stdout; -Unix.putenv "http_proxy" ""; -Http_daemon.start' - ~timeout:None ~port:Http_getter_env.port ~mode:`Thread callback + (* daemon initialization *) + +let main () = + Http_getter_env.dump_env (); + Unix.putenv "http_proxy" ""; + at_exit save_maps; + Sys.catch_break true; + try + Http_daemon.start' + ~timeout:(Some 600) ~port:Http_getter_env.port ~mode:`Thread callback + with Sys.Break -> () (* 'save_maps' already registered with 'at_exit' *) +in + +main ()