(* * Copyright (C) 2000, HELM Team. * * This file is part of HELM, an Hypertextual, Electronic * Library of Mathematics, developed at the Computer Science * Department, University of Bologna, Italy. * * HELM is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * HELM is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HELM; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, * MA 02111-1307, USA. * * For details, see the HELM World-Wide-Web page, * http://cs.unibo.it/helm/. *) (* TODO optimization: precompile almost all regexp *) open Http_getter_common;; open Http_getter_types;; open Printf;; (* debugging settings *) let debug = true;; let debug_print s = if debug then prerr_endline ("[HTTP-Getter] " ^ s);; let http_debug = false;; Http_common.debug := http_debug;; let http_get url = (* debug_print ("Downloading URL: " ^ url); *) try Some (Http_client.Convenience.http_get url) with Http_client.Http_error (code, _) -> None (* (debug_print (sprintf "Failed to download %s, HTTP response was %d" url code); None) *) ;; let parse_enc (req: Http_types.request) = try (match req#param "format" with | "normal" -> Enc_normal | "gz" -> Enc_gzipped | s -> raise (Http_getter_bad_request ("Invalid format: " ^ s))) with Http_types.Param_not_found _ -> Enc_normal ;; let parse_patch_dtd (req: Http_types.request) = match req#param "patch_dtd" with | s when String.lowercase s = "yes" -> true | s when String.lowercase s = "no" -> false | s -> raise (Http_getter_bad_request ("Invalid patch_dtd value: " ^ s)) ;; let parse_output_format (req: Http_types.request) = match req#param "format" with | s when String.lowercase s = "txt" -> Fmt_text | s when String.lowercase s = "xml" -> Fmt_xml | s -> raise (Http_getter_bad_request ("Invalid /ls format: " ^ s)) ;; let parse_ls_uri (req: Http_types.request) = let baseuri = req#param "baseuri" in let subs = Pcre.extract ~pat:"^(\\w+):(.*)$" (Pcre.replace ~pat:"/+$" baseuri) in match (subs.(1), subs.(2)) with | "cic", uri -> Cic uri | "theory", uri -> Theory uri | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri)) ;; let xml_map = new Http_getter_map.map Http_getter_env.xml_dbm in let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm in let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm in let save_maps () = xml_map#close; rdf_map#close; xsl_map#close in let map_of_uri = function | uri when is_xml_uri uri -> xml_map | uri when is_rdf_uri uri -> rdf_map | uri when is_xsl_uri uri -> xsl_map | uri -> raise (Http_getter_unresolvable_URI uri) in let resolve uri = (map_of_uri uri)#resolve uri in let register uri = (map_of_uri uri )#add uri in let return_all_foo_uris map doctype filter outchan = Http_daemon.send_basic_headers ~code:200 outchan; Http_daemon.send_header "Content-Type" "text/xml" outchan; Http_daemon.send_CRLF outchan; output_string outchan (sprintf " <%s> " doctype Http_getter_env.my_own_url doctype doctype); map#iter (fun uri _ -> if filter uri then output_string outchan (sprintf "\t\n" uri)); output_string outchan (sprintf "\n" doctype) in let return_all_xml_uris = return_all_foo_uris xml_map "alluris" in let return_all_rdf_uris = return_all_foo_uris rdf_map "allrdfuris" in let return_ls = let (++) (oldann, oldtypes, oldbody) (newann, newtypes, newbody) = ((if newann > oldann then newann else oldann), (if newtypes > oldtypes then newtypes else oldtypes), (if newbody > oldbody then newbody else oldbody)) in let basepart_RE = Pcre.regexp "^([^.]*\\.[^.]*)((\\.body)|(\\.types))?(\\.ann)?" in let (types_RE, types_ann_RE, body_RE, body_ann_RE) = (Pcre.regexp "\\.types", Pcre.regexp "\\.types.ann", Pcre.regexp "\\.body", Pcre.regexp "\\.body.ann") in let (slash_RE, til_slash_RE, no_slashes_RE) = (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$") in fun lsuri fmt outchan -> let pat = "^" ^ (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p)) in let (dir_RE, obj_RE) = (Pcre.regexp (pat ^ "/"), Pcre.regexp (pat ^ "(\\.|$)")) in let dirs = ref StringSet.empty in let objs = Hashtbl.create 17 in let store_dir d = dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs in let store_obj o = let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in let oldflags = try Hashtbl.find objs basepart with Not_found -> (false, No, No) (* no ann, no types no body *) in let newflags = match o with | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No) | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No) | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes) | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann) | s -> (false, No, No) in Hashtbl.replace objs basepart (oldflags ++ newflags) in xml_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *) (fun key _ -> match key with | uri when Pcre.pmatch ~rex:dir_RE uri -> (* directory hit *) let localpart = Pcre.replace ~rex:dir_RE uri in if Pcre.pmatch ~rex:no_slashes_RE localpart then store_obj localpart else store_dir localpart | uri when Pcre.pmatch ~rex:obj_RE uri -> (* file hit *) store_obj (Pcre.replace ~rex:til_slash_RE uri) | uri -> () (* miss *)); match fmt with | Fmt_text -> let body = (List.fold_left (fun s d -> sprintf "%sdir, %s\n" s d) "" (StringSet.elements !dirs)) ^ (Http_getter_misc.hashtbl_sorted_fold (fun uri (annflag, typesflag, bodyflag) cont -> sprintf "%sobject, %s, <%s,%s,%s>\n" cont uri (if annflag then "YES" else "NO") (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag)) objs "") in Http_daemon.respond ~headers:["Content-Type", "text/plain"] ~body outchan | Fmt_xml -> let body = sprintf " %s " Http_getter_env.my_own_url ("\n" ^ (String.concat "\n" (List.map (fun d -> "
" ^ d ^ "
") (StringSet.elements !dirs))) ^ "\n" ^ (Http_getter_misc.hashtbl_sorted_fold (fun uri (annflag, typesflag, bodyflag) cont -> sprintf "%s \t \t \t " cont uri (if annflag then "YES" else "NO") (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag)) objs "")) in Http_daemon.respond ~headers:["Content-Type", "text/xml"] ~body outchan in let update_from_server logmsg server_url = (* use global maps *) debug_print ("Updating information from " ^ server_url); let xml_url_of_uri = function (* TODO missing sanity checks on server_url, e.g. it can contains $1 *) | uri when (Pcre.pmatch ~pat:"^cic:" uri) -> Pcre.replace ~pat:"^cic:" ~templ:server_url uri | uri when (Pcre.pmatch ~pat:"^theory:" uri) -> Pcre.replace ~pat:"^theory:" ~templ:server_url uri | uri -> raise (Http_getter_invalid_URI uri) in let rdf_url_of_uri = function (* TODO as above *) | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//cic:" uri) -> Pcre.replace ~pat:"^helm:rdf.*//cic:" ~templ:server_url uri | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//theory:" uri) -> Pcre.replace ~pat:"^helm:rdf.*//theory:" ~templ:server_url uri | uri -> raise (Http_getter_invalid_URI uri) in let log = ref (logmsg ^ "Processing server: " ^ server_url ^ "
\n") in let (xml_index, rdf_index, xsl_index) = (* TODO keeps index in memory, is better to keep them on temp files? *) (http_get (server_url ^ "/" ^ Http_getter_env.xml_index), http_get (server_url ^ "/" ^ Http_getter_env.rdf_index), http_get (server_url ^ "/" ^ Http_getter_env.xsl_index)) in if (xml_index = None && rdf_index = None && xsl_index = None) then debug_print (sprintf "Warning: useless server %s" server_url); (match xml_index with | Some xml_index -> (log := !log ^ "Updating XML db ...
\n"; List.iter (fun l -> try (match Pcre.split ~pat:"[ \\t]+" l with | [uri; "gz"] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml.gz") | [uri] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml") | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") with Http_getter_invalid_URI uri -> log := !log ^ "Ignoring invalid XML URI: " ^ uri ^ "
\n") (Pcre.split ~pat:"\n+" xml_index)) (* xml_index lines *) | None -> ()); (match rdf_index with | Some rdf_index -> (log := !log ^ "Updating RDF db ...
\n"; List.iter (fun l -> try (match Pcre.split ~pat:"[ \\t]+" l with | [uri; "gz"] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml.gz") | [uri] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml") | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "
\n") with Http_getter_invalid_URI uri -> log := !log ^ "Ignoring invalid RDF URI: " ^ uri ^ "
\n") (Pcre.split ~pat:"\n+" rdf_index)) (* rdf_index lines *) | None -> ()); (match xsl_index with | Some xsl_index -> (log := !log ^ "Updating XSLT db ...
\n"; List.iter (fun l -> xsl_map#add l (server_url ^ "/" ^ l)) (Pcre.split ~pat:"\n+" xsl_index); log := !log ^ "All done!
\n") | None -> ()); !log in (* thread action *) let callback (req: Http_types.request) outchan = try debug_print ("Connection from " ^ req#clientAddr); debug_print ("Received request: " ^ req#path); (match req#path with | "/help" -> return_html_msg Http_getter_const.usage_string outchan | "/getxml" | "/getxslt" | "/getdtd" | "/resolve" | "/register" -> (let uri = req#param "uri" in (* common parameter *) match req#path with | "/getxml" -> let enc = parse_enc req in let patch_dtd = try parse_patch_dtd req with Http_types.Param_not_found _ -> true in Http_getter_cache.respond_xml ~url:(resolve uri) ~uri ~enc ~patch_dtd outchan | "/getxslt" -> let patch_dtd = try parse_patch_dtd req with Http_types.Param_not_found _ -> true in Http_getter_cache.respond_xsl ~url:(resolve uri) ~patch_dtd outchan | "/getdtd" -> let patch_dtd = try parse_patch_dtd req with Http_types.Param_not_found _ -> true in Http_getter_cache.respond_dtd ~patch_dtd ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan | "/resolve" -> (try return_xml_msg (sprintf "\n" (resolve uri)) outchan with Http_getter_unresolvable_URI uri -> return_xml_msg "\n" outchan) | "/register" -> let url = req#param "url" in register uri url; return_html_msg "Register done" outchan | _ -> assert false) | "/update" -> (xml_map#clear; rdf_map#clear; xsl_map#clear; let log = List.fold_left update_from_server "" (* initial logmsg: empty *) (* reverse order: 1st server is the most important one *) (List.rev Http_getter_env.servers) in xml_map#sync; rdf_map#sync; xsl_map#sync; return_html_msg log outchan) | "/getalluris" -> return_all_xml_uris (fun uri -> (Pcre.pmatch ~pat:"^cic:" uri) && not (Pcre.pmatch ~pat:"\\.types$" uri)) outchan | "/getallrdfuris" -> (let classs = req#param "class" in try let filter = let base = "^helm:rdf:www\\.cs\\.unibo\\.it/helm/rdf/" in match classs with | ("forward" as c) | ("backward" as c) -> (fun uri -> Pcre.pmatch ~pat:(base ^ c) uri) | c -> raise (Http_getter_invalid_RDF_class c) in return_all_rdf_uris filter outchan with Http_getter_invalid_RDF_class c -> raise (Http_getter_bad_request ("Invalid RDF class: " ^ c))) | "/ls" -> return_ls (parse_ls_uri req) (parse_output_format req) outchan | "/getempty" -> Http_daemon.respond ~body:Http_getter_const.empty_xml outchan | invalid_request -> Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan); debug_print "Done!" with | Http_types.Param_not_found attr_name -> return_400 (sprintf "Parameter '%s' is missing" attr_name) outchan | Http_getter_bad_request msg -> return_html_error msg outchan | Http_getter_internal_error msg -> return_html_internal_error msg outchan | exc -> return_html_error ("Uncaught exception: " ^ (Printexc.to_string exc)) outchan in (* daemon initialization *) Http_getter_env.dump_env (); flush stdout; Unix.putenv "http_proxy" ""; Sys.catch_break true; try Http_daemon.start' ~timeout:None ~port:Http_getter_env.port ~mode:`Thread callback with Sys.Break -> save_maps ()