X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Fhttp_getter%2Fhttp_getter.ml;h=09a49a308ad50d7dcf80342ad9d29ff63f9987fe;hb=5d7d6bd5090f3f82279bef0b93b4b361a5b1d751;hp=7eb8deae4998047b59b6adb181a0488e143ed194;hpb=a96d06ae093236a3be8461912926903e81751578;p=helm.git diff --git a/helm/http_getter/http_getter.ml b/helm/http_getter/http_getter.ml index 7eb8deae4..09a49a308 100644 --- a/helm/http_getter/http_getter.ml +++ b/helm/http_getter/http_getter.ml @@ -1,5 +1,7 @@ (* - * Copyright (C) 2000, HELM Team. + * Copyright (C) 2003: + * Stefano Zacchiroli + * for the HELM Team http://helm.cs.unibo.it/ * * This file is part of HELM, an Hypertextual, Electronic * Library of Mathematics, developed at the Computer Science @@ -21,32 +23,24 @@ * MA 02111-1307, USA. * * For details, see the HELM World-Wide-Web page, - * http://cs.unibo.it/helm/. + * http://helm.cs.unibo.it/ *) -(* TODO optimization: precompile almost all regexp *) - open Http_getter_common;; +open Http_getter_misc;; open Http_getter_types;; +open Http_getter_debugger;; open Printf;; - (* debugging settings *) -let debug = true;; -let debug_print s = if debug then prerr_endline ("[HTTP-Getter] " ^ s);; -let http_debug = false;; -Http_common.debug := http_debug;; + (* constants *) -let http_get url = -(* debug_print ("Downloading URL: " ^ url); *) - try - Some (Http_client.Convenience.http_get url) - with Http_client.Http_error (code, _) -> None -(* - (debug_print - (sprintf "Failed to download %s, HTTP response was %d" url code); - None) -*) -;; +let common_headers = [ + "Cache-Control", "no-cache"; + "Pragma", "no-cache"; + "Expires", "0" +] + + (* HTTP queries argument parsing *) let parse_enc (req: Http_types.request) = try @@ -56,7 +50,7 @@ let parse_enc (req: Http_types.request) = | s -> raise (Http_getter_bad_request ("Invalid format: " ^ s))) with Http_types.Param_not_found _ -> Enc_normal ;; -let parse_patch_dtd (req: Http_types.request) = +let parse_patch (req: Http_types.request) = match req#param "patch_dtd" with | s when String.lowercase s = "yes" -> true | s when String.lowercase s = "no" -> false @@ -68,17 +62,23 @@ let parse_output_format (req: Http_types.request) = | s when String.lowercase s = "xml" -> Fmt_xml | s -> raise (Http_getter_bad_request ("Invalid /ls format: " ^ s)) ;; -let parse_ls_uri (req: Http_types.request) = - let baseuri = req#param "baseuri" in - let subs = - Pcre.extract ~pat:"^(\\w+):(.*)$" (Pcre.replace ~pat:"/+$" baseuri) - in - match (subs.(1), subs.(2)) with - | "cic", uri -> Cic uri - | "theory", uri -> Theory uri - | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri)) +let parse_ls_uri = + let parse_ls_RE = Pcre.regexp "^(\\w+):(.*)$" in + let trailing_slash_RE = Pcre.regexp "/+$" in + fun (req: Http_types.request) -> + let baseuri = req#param "baseuri" in + let subs = + Pcre.extract ~rex:parse_ls_RE + (Pcre.replace ~rex:trailing_slash_RE baseuri) + in + match (subs.(1), subs.(2)) with + | "cic", uri -> Cic uri + | "theory", uri -> Theory uri + | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri)) ;; + (* global maps, shared by all threads *) + let xml_map = new Http_getter_map.map Http_getter_env.xml_dbm in let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm in let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm in @@ -90,11 +90,20 @@ let map_of_uri = function | uri when is_xsl_uri uri -> xsl_map | uri -> raise (Http_getter_unresolvable_URI uri) in -let resolve uri = (map_of_uri uri)#resolve uri in +let resolve uri = + try + (map_of_uri uri)#resolve uri + with Http_getter_map.Key_not_found _ -> + raise (Http_getter_unresolvable_URI uri) +in let register uri = (map_of_uri uri )#add uri in let return_all_foo_uris map doctype filter outchan = + (** return all URIs contained in 'map' which satisfy predicate 'filter'; URIs + are written in an XMLish format ('doctype' is the XML doctype) onto 'outchan' + *) Http_daemon.send_basic_headers ~code:200 outchan; Http_daemon.send_header "Content-Type" "text/xml" outchan; + Http_daemon.send_headers common_headers outchan; Http_daemon.send_CRLF outchan; output_string outchan @@ -188,7 +197,8 @@ let return_ls = objs "") in Http_daemon.respond - ~headers:["Content-Type", "text/plain"] ~body outchan + ~headers:(("Content-Type", "text/plain") :: common_headers) + ~body outchan | Fmt_xml -> let body = sprintf @@ -221,23 +231,31 @@ let return_ls = objs "")) in Http_daemon.respond - ~headers:["Content-Type", "text/xml"] ~body outchan + ~headers:(("Content-Type", "text/xml") :: common_headers) + ~body outchan +in +let (index_line_sep_RE, index_sep_RE, trailing_types_RE, + heading_cic_RE, heading_theory_RE, + heading_rdf_cic_RE, heading_rdf_theory_RE) = + (Pcre.regexp "[ \t]+", Pcre.regexp "\n+", Pcre.regexp "\\.types$", + Pcre.regexp "^cic:", Pcre.regexp "^theory:", + Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:") in let update_from_server logmsg server_url = (* use global maps *) debug_print ("Updating information from " ^ server_url); let xml_url_of_uri = function (* TODO missing sanity checks on server_url, e.g. it can contains $1 *) - | uri when (Pcre.pmatch ~pat:"^cic:" uri) -> - Pcre.replace ~pat:"^cic:" ~templ:server_url uri - | uri when (Pcre.pmatch ~pat:"^theory:" uri) -> - Pcre.replace ~pat:"^theory:" ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_cic_RE uri) -> + Pcre.replace ~rex:heading_cic_RE ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_theory_RE uri) -> + Pcre.replace ~rex:heading_theory_RE ~templ:server_url uri | uri -> raise (Http_getter_invalid_URI uri) in let rdf_url_of_uri = function (* TODO as above *) - | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//cic:" uri) -> - Pcre.replace ~pat:"^helm:rdf.*//cic:" ~templ:server_url uri - | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//theory:" uri) -> - Pcre.replace ~pat:"^helm:rdf.*//theory:" ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_rdf_cic_RE uri) -> + Pcre.replace ~rex:heading_rdf_cic_RE ~templ:server_url uri + | uri when (Pcre.pmatch ~rex:heading_rdf_theory_RE uri) -> + Pcre.replace ~rex:heading_rdf_theory_RE ~templ:server_url uri | uri -> raise (Http_getter_invalid_URI uri) in let log = ref (logmsg ^ "Processing server: " ^ server_url ^ "
\n") in @@ -255,13 +273,13 @@ let update_from_server logmsg server_url = (* use global maps *) List.iter (fun l -> try - (match Pcre.split ~pat:"[ \\t]+" l with + (match Pcre.split ~rex:index_line_sep_RE l with | [uri; "gz"] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml.gz") | [uri] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml") | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") with Http_getter_invalid_URI uri -> log := !log ^ "Ignoring invalid XML URI: " ^ uri ^ "
\n") - (Pcre.split ~pat:"\n+" xml_index)) (* xml_index lines *) + (Pcre.split ~rex:index_sep_RE xml_index)) (* xml_index lines *) | None -> ()); (match rdf_index with | Some rdf_index -> @@ -269,26 +287,28 @@ let update_from_server logmsg server_url = (* use global maps *) List.iter (fun l -> try - (match Pcre.split ~pat:"[ \\t]+" l with + (match Pcre.split ~rex:index_line_sep_RE l with | [uri; "gz"] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml.gz") | [uri] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml") | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") with Http_getter_invalid_URI uri -> log := !log ^ "Ignoring invalid RDF URI: " ^ uri ^ "
\n") - (Pcre.split ~pat:"\n+" rdf_index)) (* rdf_index lines *) + (Pcre.split ~rex:index_sep_RE rdf_index)) (* rdf_index lines *) | None -> ()); (match xsl_index with | Some xsl_index -> (log := !log ^ "Updating XSLT db ...
\n"; List.iter (fun l -> xsl_map#add l (server_url ^ "/" ^ l)) - (Pcre.split ~pat:"\n+" xsl_index); + (Pcre.split ~rex:index_sep_RE xsl_index); log := !log ^ "All done!
\n") | None -> ()); + debug_print "done with this server"; !log in (* thread action *) + let callback (req: Http_types.request) outchan = try debug_print ("Connection from " ^ req#clientAddr); @@ -300,22 +320,22 @@ let callback (req: Http_types.request) outchan = match req#path with | "/getxml" -> let enc = parse_enc req in - let patch_dtd = - try parse_patch_dtd req with Http_types.Param_not_found _ -> true + let patch = + try parse_patch req with Http_types.Param_not_found _ -> true in Http_getter_cache.respond_xml - ~url:(resolve uri) ~uri ~enc ~patch_dtd outchan + ~url:(resolve uri) ~uri ~enc ~patch outchan | "/getxslt" -> - let patch_dtd = - try parse_patch_dtd req with Http_types.Param_not_found _ -> true + let patch = + try parse_patch req with Http_types.Param_not_found _ -> true in - Http_getter_cache.respond_xsl ~url:(resolve uri) ~patch_dtd outchan + Http_getter_cache.respond_xsl ~url:(resolve uri) ~patch outchan | "/getdtd" -> - let patch_dtd = - try parse_patch_dtd req with Http_types.Param_not_found _ -> true + let patch = + try parse_patch req with Http_types.Param_not_found _ -> true in Http_getter_cache.respond_dtd - ~patch_dtd ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan + ~patch ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan | "/resolve" -> (try return_xml_msg @@ -329,21 +349,22 @@ let callback (req: Http_types.request) outchan = return_html_msg "Register done" outchan | _ -> assert false) | "/update" -> - (xml_map#clear; rdf_map#clear; xsl_map#clear; + (Http_getter_env.reload (); (* reload servers list from servers file *) + xml_map#clear; rdf_map#clear; xsl_map#clear; let log = List.fold_left update_from_server "" (* initial logmsg: empty *) (* reverse order: 1st server is the most important one *) - (List.rev Http_getter_env.servers) + (List.rev !Http_getter_env.servers) in xml_map#sync; rdf_map#sync; xsl_map#sync; return_html_msg log outchan) | "/getalluris" -> return_all_xml_uris (fun uri -> - (Pcre.pmatch ~pat:"^cic:" uri) && - not (Pcre.pmatch ~pat:"\\.types$" uri)) + (Pcre.pmatch ~rex:heading_cic_RE uri) && + not (Pcre.pmatch ~rex:trailing_types_RE uri)) outchan | "/getallrdfuris" -> (let classs = req#param "class" in @@ -363,26 +384,39 @@ let callback (req: Http_types.request) outchan = Http_daemon.respond ~body:Http_getter_const.empty_xml outchan | invalid_request -> Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan); - debug_print "Done!" + debug_print "Done!\n" with | Http_types.Param_not_found attr_name -> return_400 (sprintf "Parameter '%s' is missing" attr_name) outchan | Http_getter_bad_request msg -> return_html_error msg outchan | Http_getter_internal_error msg -> return_html_internal_error msg outchan + | Shell.Subprocess_error l -> + return_html_internal_error + (String.concat "
\n" + (List.map + (fun (cmd, code) -> + sprintf "Command '%s' returned %s" + cmd (string_of_proc_status code)) + l)) + outchan | exc -> return_html_error ("Uncaught exception: " ^ (Printexc.to_string exc)) outchan in - (* daemon initialization *) -Http_getter_env.dump_env (); -flush stdout; -Unix.putenv "http_proxy" ""; -Sys.catch_break true; -try - Http_daemon.start' - ~timeout:None ~port:Http_getter_env.port ~mode:`Thread callback -with Sys.Break -> - save_maps () + (* daemon initialization *) + +let main () = + Http_getter_env.dump_env (); + Unix.putenv "http_proxy" ""; + at_exit save_maps; + Sys.catch_break true; + try + Http_daemon.start' + ~timeout:(Some 600) ~port:Http_getter_env.port ~mode:`Thread callback + with Sys.Break -> () (* 'save_maps' already registered with 'at_exit' *) +in + +main ()