+(*
+ * Copyright (C) 2000, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://cs.unibo.it/helm/.
+ *)
+
+(* TODO optimization: precompile almost all regexp *)
+
+open Http_getter_common;;
+open Http_getter_types;;
+open Printf;;
+
+ (* debugging settings *)
+let debug = true;;
+let debug_print s = if debug then prerr_endline ("[HTTP-Getter] " ^ s);;
+let http_debug = false;;
+Http_common.debug := http_debug;;
+
+let http_get url =
+ debug_print ("Downloading URL: " ^ url);
+ try
+ Some (Http_client.Convenience.http_get url)
+ with Http_client.Http_error (code, _) ->
+ (debug_print
+ (sprintf "Failed to download %s, HTTP response was %d" url code);
+ None)
+;;
+
+let parse_format (req: Http_types.request) =
+ try
+ (match req#param "format" with
+ | "normal" -> Enc_normal
+ | "gz" -> Enc_gzipped
+ | s -> raise (Http_getter_bad_request ("Invalid format: " ^ s)))
+ with Http_types.Param_not_found _ -> Enc_normal
+;;
+let parse_patch_dtd (req: Http_types.request) =
+ try
+ (match req#param "patch_dtd" with
+ | s when String.lowercase s = "yes" -> true
+ | s when String.lowercase s = "no" -> false
+ | s -> raise (Http_getter_bad_request ("Invalid patch_dtd value: " ^ s)))
+ with Http_types.Param_not_found _ -> true
+;;
+let parse_ls_format (req: Http_types.request) =
+ match req#param "format" with
+ | s when String.lowercase s = "txt" -> Fmt_text
+ | s when String.lowercase s = "xml" -> Fmt_xml
+ | s -> raise (Http_getter_bad_request ("Invalid /ls format: " ^ s))
+;;
+let parse_ls_uri (req: Http_types.request) =
+ let baseuri = req#param "baseuri" in
+ let subs =
+ Pcre.extract ~pat:"^(\\w+):(.*)$" (Pcre.replace ~pat:"/+$" baseuri)
+ in
+ match (subs.(1), subs.(2)) with
+ | "cic", uri -> Cic uri
+ | "theory", uri -> Theory uri
+ | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri))
+;;
+
+let xml_map = new Http_getter_map.map Http_getter_env.xml_dbm in
+let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm in
+let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm in
+
+let resolve uri = (* use global maps *)
+ let map =
+ match uri with
+ | uri when is_xml_uri uri -> xml_map
+ | uri when is_rdf_uri uri -> rdf_map
+ | uri when is_xsl_uri uri -> xsl_map
+ | uri -> raise (Http_getter_unresolvable_URI uri)
+ in
+ map#resolve uri
+in
+let return_all_foo_uris map doctype filter outchan =
+ Http_daemon.send_basic_headers outchan;
+ Http_daemon.send_header "Content-Type" "text/xml" outchan;
+ Http_daemon.send_CRLF outchan;
+ output_string
+ outchan
+ (sprintf
+"<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
+<!DOCTYPE %s SYSTEM \"%s/getdtd?uri=%s.dtd\">;
+
+<%s>
+"
+ doctype
+ Http_getter_env.my_own_url
+ doctype
+ doctype);
+ map#iter
+ (fun uri _ ->
+ if filter uri then
+ output_string outchan (sprintf "\t<uri value=\"%s\" />\n" uri));
+ output_string outchan (sprintf "</%s>\n" doctype)
+in
+let return_all_uris = return_all_foo_uris xml_map "alluris" in
+let return_all_rdf_uris = return_all_foo_uris rdf_map "allrdfuris" in
+let return_ls =
+ let (++) (oldann, oldtypes, oldbody) (newann, newtypes, newbody) =
+ ((if newann > oldann then newann else oldann),
+ (if newtypes > oldtypes then newtypes else oldtypes),
+ (if newbody > oldbody then newbody else oldbody))
+ in
+ let basepartRE =
+ Pcre.regexp "^([^.]*\\.[^.]*)((\\.body)|(\\.types))?(\\.ann)?"
+ in
+ fun lsuri format outchan ->
+ let pat =
+ "^" ^ (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p))
+ in
+ let dirs = ref [] in
+ let objs = Hashtbl.create 17 in
+ xml_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *)
+ (fun _ -> function
+ | uri when Pcre.pmatch ~pat:(pat ^ "/") uri -> (* directory hit *)
+ let dir =
+ List.hd (Pcre.split ~pat:"/" (Pcre.replace ~pat:(pat ^ "/") uri))
+ in
+ dirs := dir :: !dirs
+ | uri when Pcre.pmatch ~pat:(pat ^ "(\\.|$)") uri -> (* object hit *)
+ let localpart = Pcre.replace ~pat:"^.*/" uri in
+ let basepart = Pcre.replace ~rex:basepartRE ~templ:"$1" localpart in
+ let oldflags =
+ try
+ Hashtbl.find objs basepart
+ with Not_found -> (false, No, No) (* no ann, no types no body *)
+ in
+ let newflags =
+ match localpart with
+ | s when Pcre.pmatch ~pat:"\\.types" s -> (false, Yes, No)
+ | s when Pcre.pmatch ~pat:"\\.types.ann" s -> (true, Ann, No)
+ | s when Pcre.pmatch ~pat:"\\.body" s -> (false, No, Yes)
+ | s when Pcre.pmatch ~pat:"\\.body.ann" s -> (true, No, Ann)
+ | s ->
+ raise
+ (Http_getter_internal_error ("Invalid /ls localpart: " ^ s))
+ in
+ Hashtbl.replace objs basepart (oldflags ++ newflags)
+ | _ -> () (* miss *));
+ match format with
+ | Fmt_text ->
+ let body =
+ "dir, " ^ (String.concat "\ndir, " (List.sort compare !dirs)) ^ "\n" ^
+ (Http_getter_misc.hashtbl_sorted_fold
+ (fun uri (annflag, typesflag, bodyflag) cont ->
+ sprintf
+ "%sobject, %s, <%s,%s,%s>\n"
+ cont uri (if annflag then "YES" else "NO")
+ (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag))
+ objs "")
+ in Http_daemon.respond ~headers:["Content-Type", "text/txt"] ~body outchan
+ | Fmt_xml ->
+ let body =
+ sprintf
+"<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
+<!DOCTYPE ls SYSTEM \"%s/getdtd?uri=ls.dtd\"
+
+<ls>
+%s
+</ls>
+"
+ Http_getter_env.my_own_url
+ ("\n" ^
+ (String.concat
+ "\n"
+ (List.map
+ (fun d -> "<section>" ^ d ^ "</section>")
+ (List.sort compare !dirs))) ^ "\n" ^
+ (Http_getter_misc.hashtbl_sorted_fold
+ (fun uri (annflag, typesflag, bodyflag) cont ->
+ sprintf
+"%s<object name=\"%s\">
+\t<ann value=\"%s\">
+\t<types value=\"%s\">
+\t<body value=\"%s\">
+</object>
+"
+ cont uri (if annflag then "YES" else "NO")
+ (string_of_ls_flag typesflag)
+ (string_of_ls_flag bodyflag))
+ objs ""))
+ in Http_daemon.respond ~headers:["Content-Type", "text/xml"] ~body outchan
+in
+let update_from_server logmsg server_url = (* use global maps *)
+ let xml_url_of_uri = function
+ (* TODO missing sanity checks on server_url, e.g. it can contains $1 *)
+ | uri when (Pcre.pmatch ~pat:"^cic://" uri) ->
+ Pcre.replace ~pat:"^cic://" ~templ:server_url uri
+ | uri when (Pcre.pmatch ~pat:"^theory://" uri) ->
+ Pcre.replace ~pat:"^theory://" ~templ:server_url uri
+ | uri -> raise (Http_getter_invalid_URI uri)
+ in
+ let rdf_url_of_uri = function (* TODO as above *)
+ | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//cic:" uri) ->
+ Pcre.replace ~pat:"^helm:rdf.*//cic:" ~templ:server_url uri
+ | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//theory:" uri) ->
+ Pcre.replace ~pat:"^helm:rdf.*//theory:" ~templ:server_url uri
+ | uri -> raise (Http_getter_invalid_URI uri)
+ in
+ let log = ref ("Processing server: " ^ server_url ^ "<br />\n") in
+ let (xml_index, rdf_index, xsl_index) =
+ (* TODO keeps index in memory, is better to keep them on temp files? *)
+ (http_get (server_url ^ "/" ^ Http_getter_env.xml_index),
+ http_get (server_url ^ "/" ^ Http_getter_env.rdf_index),
+ http_get (server_url ^ "/" ^ Http_getter_env.xsl_index))
+ in
+ if (xml_index = None && rdf_index = None && xsl_index = None) then
+ debug_print (sprintf "Warning: useless server %s" server_url);
+ (match xml_index with
+ | Some xml_index ->
+ (log := !log ^ "Updating XML db ...<br />\n";
+ List.iter
+ (fun l ->
+ try
+ (match Pcre.split ~pat:"[ \\t]+" l with
+ | [uri; "gz"] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml.gz")
+ | [uri] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml")
+ | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "<br />\n")
+ with Http_getter_invalid_URI uri ->
+ log := !log ^ "Ignoring invalid XML URI: " ^ uri)
+ (Pcre.split ~pat:"\n+" xml_index)) (* xml_index lines *)
+ | None -> ());
+ (match rdf_index with
+ | Some rdf_index ->
+ (log := !log ^ "Updating RDF db ...<br />\n";
+ List.iter
+ (fun l ->
+ try
+ (match Pcre.split ~pat:"[ \\t]+" l with
+ | [uri; "gz"] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml.gz")
+ | [uri] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml")
+ | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "<br />\n")
+ with Http_getter_invalid_URI uri ->
+ log := !log ^ "Ignoring invalid RDF URI: " ^ uri)
+ (Pcre.split ~pat:"\n+" rdf_index)) (* rdf_index lines *)
+ | None -> ());
+ (match xsl_index with
+ | Some xsl_index ->
+ (log := !log ^ "Updating XSLT db ...<br />\n";
+ List.iter
+ (fun l -> xsl_map#add l (server_url ^ "/" ^ l))
+ (Pcre.split ~pat:"\n+" xsl_index);
+ log := !log ^ "All done!<br />\n")
+ | None -> ());
+ !log
+in
+
+ (* thread action *)
+let callback (req: Http_types.request) outchan =
+ try
+ debug_print ("Connection from " ^ req#clientAddr);
+ debug_print ("Received request: " ^ req#path);
+ (match req#path with
+ | "/help" -> return_html_msg Http_getter_const.usage_string outchan
+ | "/getxml" | "/getxslt" | "/getdtd" | "/resolve" | "/register" ->
+ (let uri = req#param "uri" in (* common parameter *)
+ match req#path with
+ | "/getxml" ->
+ let enc = parse_format req in
+ let patch_dtd = parse_patch_dtd req in
+ Http_getter_cache.respond_xml
+ ~url:(resolve uri) ~uri ~enc ~patch_dtd outchan
+ | "/getxslt" ->
+(* let patch_dtd = parse_patch_dtd req in *)
+ (* TODO add support and default value for patch_dtd *)
+ Http_getter_cache.respond_xsl ~url:(resolve uri) outchan
+ | "/getdtd" ->
+ let patch_dtd = parse_patch_dtd req in
+ Http_getter_cache.respond_dtd
+ ~patch_dtd ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan
+ | "/resolve" ->
+ (try
+ return_xml_msg
+ (sprintf "<url value=\"%s\" />\n" (resolve uri))
+ outchan
+ with Http_getter_unresolvable_URI uri ->
+ return_xml_msg "<unresolved />\n" outchan)
+ | "/register" ->
+ let url = req#param "url" in
+ xml_map#add uri url;
+ return_html_msg "Register done" outchan
+ | _ -> assert false)
+ | "/update" ->
+ (xml_map#clear; rdf_map#clear; xsl_map#clear;
+ let log =
+ List.fold_left
+ update_from_server
+ "" (* initial logmsg: empty *)
+ (* reverse order: 1st server is the most important one *)
+ (List.rev Http_getter_env.servers)
+ in
+ return_html_msg log outchan)
+ | "/getalluris" ->
+ return_all_uris
+ (fun uri ->
+ (Pcre.pmatch ~pat:"^cic:" uri) &&
+ not (Pcre.pmatch ~pat:"\\.types$" uri))
+ outchan
+ | "/getallrdfuris" ->
+ (let classs = req#param "class" in
+ try
+ let filter =
+ let base = "^helm:rdf:www\\.cs\\.unibo\\.it/helm/rdf/" in
+ match classs with
+ | ("forward" as c) | ("backward" as c) ->
+ (fun uri -> Pcre.pmatch ~pat:(base ^ c) uri)
+ | c -> raise (Http_getter_invalid_RDF_class c)
+ in
+ return_all_rdf_uris filter outchan
+ with Http_getter_invalid_RDF_class c ->
+ raise (Http_getter_bad_request ("Invalid RDF class: " ^ c)))
+ | "/ls" -> return_ls (parse_ls_uri req) (parse_ls_format req) outchan
+ | "/getempty" ->
+ Http_daemon.respond ~body:Http_getter_const.empty_xml outchan
+ | invalid_request ->
+ Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan)
+ with
+ | Http_types.Param_not_found attr_name ->
+ return_400 (sprintf "Parameter '%s' is missing" attr_name) outchan
+ | Http_getter_bad_request msg -> return_html_error msg outchan
+ | Http_getter_internal_error msg -> return_html_internal_error msg outchan
+ | exc ->
+ return_html_error
+ ("Uncaught exception: " ^ (Printexc.to_string exc))
+ outchan
+in
+
+ (* daemon initialization *)
+Http_getter_env.dump_env ();
+flush stdout;
+Unix.putenv "http_proxy" "";
+Http_daemon.start'
+ ~timeout:None ~port:Http_getter_env.port ~mode:`Thread callback
+