(*
- * Copyright (C) 2000, HELM Team.
+ * Copyright (C) 2003:
+ * Stefano Zacchiroli <zack@cs.unibo.it>
+ * for the HELM Team http://helm.cs.unibo.it/
*
* This file is part of HELM, an Hypertextual, Electronic
* Library of Mathematics, developed at the Computer Science
* MA 02111-1307, USA.
*
* For details, see the HELM World-Wide-Web page,
- * http://cs.unibo.it/helm/.
+ * http://helm.cs.unibo.it/
*)
-(* TODO optimization: precompile almost all regexp *)
-
open Http_getter_common;;
+open Http_getter_misc;;
open Http_getter_types;;
+open Http_getter_debugger;;
open Printf;;
- (* debugging settings *)
-let debug = true;;
-let debug_print s = if debug then prerr_endline ("[HTTP-Getter] " ^ s);;
-let http_debug = false;;
-Http_common.debug := http_debug;;
+ (* constants *)
-let http_get url =
- debug_print ("Downloading URL: " ^ url);
- try
- Some (Http_client.Convenience.http_get url)
- with Http_client.Http_error (code, _) ->
- (debug_print
- (sprintf "Failed to download %s, HTTP response was %d" url code);
- None)
-;;
+let common_headers = [
+ "Cache-Control", "no-cache";
+ "Pragma", "no-cache";
+ "Expires", "0"
+]
-let parse_format (req: Http_types.request) =
+ (* HTTP queries argument parsing *)
+
+ (* parse encoding ("format" parameter), default is Enc_normal *)
+let parse_enc (req: Http_types.request) =
try
(match req#param "format" with
| "normal" -> Enc_normal
| s -> raise (Http_getter_bad_request ("Invalid format: " ^ s)))
with Http_types.Param_not_found _ -> Enc_normal
;;
-let parse_patch_dtd (req: Http_types.request) =
+ (* parse "patch_dtd" parameter, default is true *)
+let parse_patch (req: Http_types.request) =
try
(match req#param "patch_dtd" with
| s when String.lowercase s = "yes" -> true
| s -> raise (Http_getter_bad_request ("Invalid patch_dtd value: " ^ s)))
with Http_types.Param_not_found _ -> true
;;
-let parse_ls_format (req: Http_types.request) =
+ (* parse output format ("format" parameter), no default value *)
+let parse_output_format (req: Http_types.request) =
match req#param "format" with
| s when String.lowercase s = "txt" -> Fmt_text
| s when String.lowercase s = "xml" -> Fmt_xml
| s -> raise (Http_getter_bad_request ("Invalid /ls format: " ^ s))
;;
-let parse_ls_uri (req: Http_types.request) =
- let baseuri = req#param "baseuri" in
- let subs =
- Pcre.extract ~pat:"^(\\w+):(.*)$" (Pcre.replace ~pat:"/+$" baseuri)
+ (* parse "baseuri" format for /ls method, no default value *)
+let parse_ls_uri =
+ let parse_ls_RE = Pcre.regexp "^(\\w+):(.*)$" in
+ let trailing_slash_RE = Pcre.regexp "/+$" in
+ let wrong_uri uri =
+ raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ uri))
in
- match (subs.(1), subs.(2)) with
- | "cic", uri -> Cic uri
- | "theory", uri -> Theory uri
- | _ -> raise (Http_getter_bad_request ("Invalid /ls baseuri: " ^ baseuri))
+ fun (req: Http_types.request) ->
+ let baseuri = req#param "baseuri" in
+ try
+ let subs =
+ Pcre.extract ~rex:parse_ls_RE
+ (Pcre.replace ~rex:trailing_slash_RE baseuri)
+ in
+ (match (subs.(1), subs.(2)) with
+ | "cic", uri -> Cic uri
+ | "theory", uri -> Theory uri
+ | _ -> wrong_uri baseuri)
+ with Not_found -> wrong_uri baseuri
;;
-let xml_map = new Http_getter_map.map Http_getter_env.xml_dbm in
+ (* global maps, shared by all threads *)
+
+let cic_map = new Http_getter_map.map Http_getter_env.cic_dbm in
+let nuprl_map = new Http_getter_map.map Http_getter_env.nuprl_dbm in
let rdf_map = new Http_getter_map.map Http_getter_env.rdf_dbm in
let xsl_map = new Http_getter_map.map Http_getter_env.xsl_dbm in
-let resolve uri = (* use global maps *)
- let map =
- match uri with
- | uri when is_xml_uri uri -> xml_map
- | uri when is_rdf_uri uri -> rdf_map
- | uri when is_xsl_uri uri -> xsl_map
- | uri -> raise (Http_getter_unresolvable_URI uri)
- in
- map#resolve uri
+let save_maps () =
+ cic_map#close; nuprl_map#close; rdf_map#close; xsl_map#close in
+let map_of_uri = function
+ | uri when is_cic_uri uri -> cic_map
+ | uri when is_nuprl_uri uri -> nuprl_map
+ | uri when is_rdf_uri uri -> rdf_map
+ | uri when is_xsl_uri uri -> xsl_map
+ | uri -> raise (Http_getter_unresolvable_URI uri)
+in
+let resolve uri =
+ try
+ (map_of_uri uri)#resolve uri
+ with Http_getter_map.Key_not_found _ ->
+ raise (Http_getter_unresolvable_URI uri)
+in
+let register uri =
+ (* Warning: this fail if uri is already registered *)
+ (map_of_uri uri)#add uri
in
let return_all_foo_uris map doctype filter outchan =
- Http_daemon.send_basic_headers outchan;
+ (** return all URIs contained in 'map' which satisfy predicate 'filter'; URIs
+ are written in an XMLish format ('doctype' is the XML doctype) onto 'outchan'
+ *)
+ Http_daemon.send_basic_headers ~code:200 outchan;
Http_daemon.send_header "Content-Type" "text/xml" outchan;
+ Http_daemon.send_headers common_headers outchan;
Http_daemon.send_CRLF outchan;
output_string
outchan
(sprintf
"<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
-<!DOCTYPE %s SYSTEM \"%s/getdtd?uri=%s.dtd\">;
+<!DOCTYPE %s SYSTEM \"%s/getdtd?uri=%s.dtd\">
<%s>
"
output_string outchan (sprintf "\t<uri value=\"%s\" />\n" uri));
output_string outchan (sprintf "</%s>\n" doctype)
in
-let return_all_uris = return_all_foo_uris xml_map "alluris" in
+let return_all_xml_uris = return_all_foo_uris cic_map "alluris" in
let return_all_rdf_uris = return_all_foo_uris rdf_map "allrdfuris" in
let return_ls =
let (++) (oldann, oldtypes, oldbody) (newann, newtypes, newbody) =
((if newann > oldann then newann else oldann),
(if newtypes > oldtypes then newtypes else oldtypes),
- (if newbody > oldbody then newbody else oldbody))
- in
- let basepartRE =
+ (if newbody > oldbody then newbody else oldbody))
+ in
+ let basepart_RE =
Pcre.regexp "^([^.]*\\.[^.]*)((\\.body)|(\\.types))?(\\.ann)?"
in
- fun lsuri format outchan ->
- let pat =
- "^" ^ (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p))
+ let (types_RE, types_ann_RE, body_RE, body_ann_RE) =
+ (Pcre.regexp "\\.types", Pcre.regexp "\\.types.ann",
+ Pcre.regexp "\\.body", Pcre.regexp "\\.body.ann")
in
- let dirs = ref [] in
- let objs = Hashtbl.create 17 in
- xml_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *)
- (fun _ -> function
- | uri when Pcre.pmatch ~pat:(pat ^ "/") uri -> (* directory hit *)
- let dir =
- List.hd (Pcre.split ~pat:"/" (Pcre.replace ~pat:(pat ^ "/") uri))
- in
- dirs := dir :: !dirs
- | uri when Pcre.pmatch ~pat:(pat ^ "(\\.|$)") uri -> (* object hit *)
- let localpart = Pcre.replace ~pat:"^.*/" uri in
- let basepart = Pcre.replace ~rex:basepartRE ~templ:"$1" localpart in
- let oldflags =
- try
- Hashtbl.find objs basepart
- with Not_found -> (false, No, No) (* no ann, no types no body *)
- in
- let newflags =
- match localpart with
- | s when Pcre.pmatch ~pat:"\\.types" s -> (false, Yes, No)
- | s when Pcre.pmatch ~pat:"\\.types.ann" s -> (true, Ann, No)
- | s when Pcre.pmatch ~pat:"\\.body" s -> (false, No, Yes)
- | s when Pcre.pmatch ~pat:"\\.body.ann" s -> (true, No, Ann)
- | s ->
- raise
- (Http_getter_internal_error ("Invalid /ls localpart: " ^ s))
- in
- Hashtbl.replace objs basepart (oldflags ++ newflags)
- | _ -> () (* miss *));
- match format with
- | Fmt_text ->
- let body =
- "dir, " ^ (String.concat "\ndir, " (List.sort compare !dirs)) ^ "\n" ^
- (Http_getter_misc.hashtbl_sorted_fold
- (fun uri (annflag, typesflag, bodyflag) cont ->
- sprintf
- "%sobject, %s, <%s,%s,%s>\n"
- cont uri (if annflag then "YES" else "NO")
- (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag))
- objs "")
- in Http_daemon.respond ~headers:["Content-Type", "text/txt"] ~body outchan
- | Fmt_xml ->
- let body =
- sprintf
+ let (slash_RE, til_slash_RE, no_slashes_RE) =
+ (Pcre.regexp "/", Pcre.regexp "^.*/", Pcre.regexp "^[^/]*$")
+ in
+ fun lsuri fmt outchan ->
+ let pat =
+ "^" ^
+ (match lsuri with Cic p -> ("cic:" ^ p) | Theory p -> ("theory:" ^ p))
+ in
+ let (dir_RE, obj_RE) =
+ (Pcre.regexp (pat ^ "/"), Pcre.regexp (pat ^ "(\\.|$)"))
+ in
+ let dirs = ref StringSet.empty in
+ let objs = Hashtbl.create 17 in
+ let store_dir d =
+ dirs := StringSet.add (List.hd (Pcre.split ~rex:slash_RE d)) !dirs
+ in
+ let store_obj o =
+ let basepart = Pcre.replace ~rex:basepart_RE ~templ:"$1" o in
+ let oldflags =
+ try
+ Hashtbl.find objs basepart
+ with Not_found -> (false, No, No) (* no ann, no types, no body *)
+ in
+ let newflags =
+ match o with
+ | s when Pcre.pmatch ~rex:types_RE s -> (false, Yes, No)
+ | s when Pcre.pmatch ~rex:types_ann_RE s -> (true, Ann, No)
+ | s when Pcre.pmatch ~rex:body_RE s -> (false, No, Yes)
+ | s when Pcre.pmatch ~rex:body_ann_RE s -> (true, No, Ann)
+ | s -> (false, No, No)
+ in
+ Hashtbl.replace objs basepart (oldflags ++ newflags)
+ in
+ cic_map#iter (* BLEARGH Dbm module lacks support for fold-like functions *)
+ (fun key _ ->
+ match key with
+ | uri when Pcre.pmatch ~rex:dir_RE uri -> (* directory hit *)
+ let localpart = Pcre.replace ~rex:dir_RE uri in
+ if Pcre.pmatch ~rex:no_slashes_RE localpart then
+ store_obj localpart
+ else
+ store_dir localpart
+ | uri when Pcre.pmatch ~rex:obj_RE uri -> (* file hit *)
+ store_obj (Pcre.replace ~rex:til_slash_RE uri)
+ | uri -> () (* miss *));
+ match fmt with
+ | Fmt_text ->
+ let body =
+ (List.fold_left
+ (fun s d -> sprintf "%sdir, %s\n" s d) ""
+ (StringSet.elements !dirs)) ^
+ (Http_getter_misc.hashtbl_sorted_fold
+ (fun uri (annflag, typesflag, bodyflag) cont ->
+ sprintf "%sobject, %s, <%s,%s,%s>\n"
+ cont uri (if annflag then "YES" else "NO")
+ (string_of_ls_flag typesflag) (string_of_ls_flag bodyflag))
+ objs "")
+ in
+ Http_daemon.respond
+ ~headers:(("Content-Type", "text/plain") :: common_headers)
+ ~body outchan
+ | Fmt_xml ->
+ let body =
+ sprintf
"<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
-<!DOCTYPE ls SYSTEM \"%s/getdtd?uri=ls.dtd\"
+<!DOCTYPE ls SYSTEM \"%s/getdtd?uri=ls.dtd\">
<ls>
%s
</ls>
"
- Http_getter_env.my_own_url
- ("\n" ^
- (String.concat
- "\n"
- (List.map
- (fun d -> "<section>" ^ d ^ "</section>")
- (List.sort compare !dirs))) ^ "\n" ^
- (Http_getter_misc.hashtbl_sorted_fold
- (fun uri (annflag, typesflag, bodyflag) cont ->
- sprintf
+ Http_getter_env.my_own_url
+ ("\n" ^
+ (String.concat
+ "\n"
+ (List.map
+ (fun d -> "<section>" ^ d ^ "</section>")
+ (StringSet.elements !dirs))) ^ "\n" ^
+ (Http_getter_misc.hashtbl_sorted_fold
+ (fun uri (annflag, typesflag, bodyflag) cont ->
+ sprintf
"%s<object name=\"%s\">
-\t<ann value=\"%s\">
-\t<types value=\"%s\">
-\t<body value=\"%s\">
+\t<ann value=\"%s\" />
+\t<types value=\"%s\" />
+\t<body value=\"%s\" />
</object>
"
- cont uri (if annflag then "YES" else "NO")
- (string_of_ls_flag typesflag)
- (string_of_ls_flag bodyflag))
- objs ""))
- in Http_daemon.respond ~headers:["Content-Type", "text/xml"] ~body outchan
+ cont uri (if annflag then "YES" else "NO")
+ (string_of_ls_flag typesflag)
+ (string_of_ls_flag bodyflag))
+ objs ""))
+ in
+ Http_daemon.respond
+ ~headers:(("Content-Type", "text/xml") :: common_headers)
+ ~body outchan
+in
+let (index_line_sep_RE, index_sep_RE, trailing_types_RE,
+ heading_cic_RE, heading_theory_RE, heading_nuprl_RE,
+ heading_rdf_cic_RE, heading_rdf_theory_RE) =
+ (Pcre.regexp "[ \t]+", Pcre.regexp "\r\n|\r|\n",
+ Pcre.regexp "\\.types$",
+ Pcre.regexp "^cic:", Pcre.regexp "^theory:", Pcre.regexp "^nuprl:",
+ Pcre.regexp "^helm:rdf.*//cic:", Pcre.regexp "^helm:rdf.*//theory:")
in
let update_from_server logmsg server_url = (* use global maps *)
+ debug_print ("Updating information from " ^ server_url);
let xml_url_of_uri = function
(* TODO missing sanity checks on server_url, e.g. it can contains $1 *)
- | uri when (Pcre.pmatch ~pat:"^cic://" uri) ->
- Pcre.replace ~pat:"^cic://" ~templ:server_url uri
- | uri when (Pcre.pmatch ~pat:"^theory://" uri) ->
- Pcre.replace ~pat:"^theory://" ~templ:server_url uri
+ | uri when (Pcre.pmatch ~rex:heading_cic_RE uri) ->
+ Pcre.replace ~rex:heading_cic_RE ~templ:server_url uri
+ | uri when (Pcre.pmatch ~rex:heading_theory_RE uri) ->
+ Pcre.replace ~rex:heading_theory_RE ~templ:server_url uri
+ | uri when (Pcre.pmatch ~rex:heading_nuprl_RE uri) ->
+ Pcre.replace ~rex:heading_nuprl_RE ~templ:server_url uri
| uri -> raise (Http_getter_invalid_URI uri)
in
let rdf_url_of_uri = function (* TODO as above *)
- | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//cic:" uri) ->
- Pcre.replace ~pat:"^helm:rdf.*//cic:" ~templ:server_url uri
- | uri when (Pcre.pmatch ~pat:"^helm:rdf.*//theory:" uri) ->
- Pcre.replace ~pat:"^helm:rdf.*//theory:" ~templ:server_url uri
+ | uri when (Pcre.pmatch ~rex:heading_rdf_cic_RE uri) ->
+ Pcre.replace ~rex:heading_rdf_cic_RE ~templ:server_url uri
+ | uri when (Pcre.pmatch ~rex:heading_rdf_theory_RE uri) ->
+ Pcre.replace ~rex:heading_rdf_theory_RE ~templ:server_url uri
| uri -> raise (Http_getter_invalid_URI uri)
in
- let log = ref ("Processing server: " ^ server_url ^ "<br />\n") in
+ let log = ref (logmsg ^ "Processing server: " ^ server_url ^ "<br />\n") in
let (xml_index, rdf_index, xsl_index) =
(* TODO keeps index in memory, is better to keep them on temp files? *)
(http_get (server_url ^ "/" ^ Http_getter_env.xml_index),
| Some xml_index ->
(log := !log ^ "Updating XML db ...<br />\n";
List.iter
- (fun l ->
- try
- (match Pcre.split ~pat:"[ \\t]+" l with
- | [uri; "gz"] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml.gz")
- | [uri] -> xml_map#add uri ((xml_url_of_uri uri) ^ ".xml")
- | _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "<br />\n")
- with Http_getter_invalid_URI uri ->
- log := !log ^ "Ignoring invalid XML URI: " ^ uri)
- (Pcre.split ~pat:"\n+" xml_index)) (* xml_index lines *)
+ (function
+ | l when is_blank_line l -> () (* skip blank and commented lines *)
+ | l ->
+ try
+ (match Pcre.split ~rex:index_line_sep_RE l with
+ | [uri; "gz"] ->
+ assert (is_cic_uri uri || is_nuprl_uri uri) ;
+ (map_of_uri uri)#replace
+ uri ((xml_url_of_uri uri) ^ ".xml.gz")
+ | [uri] ->
+ assert (is_cic_uri uri || is_nuprl_uri uri) ;
+ (map_of_uri uri)#replace
+ uri ((xml_url_of_uri uri) ^ ".xml")
+ | _ ->
+ log := !log ^ "Ignoring invalid line: '" ^ l ^ "'<br />\n")
+ with Http_getter_invalid_URI uri ->
+ log := !log ^ "Ignoring invalid XML URI: '" ^ uri ^ "'<br />\n")
+ (Pcre.split ~rex:index_sep_RE xml_index)) (* xml_index lines *)
| None -> ());
(match rdf_index with
| Some rdf_index ->
List.iter
(fun l ->
try
- (match Pcre.split ~pat:"[ \\t]+" l with
- | [uri; "gz"] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml.gz")
- | [uri] -> rdf_map#add uri ((rdf_url_of_uri uri) ^ ".xml")
+ (match Pcre.split ~rex:index_line_sep_RE l with
+ | [uri; "gz"] ->
+ rdf_map#replace uri ((rdf_url_of_uri uri) ^ ".xml.gz")
+ | [uri] -> rdf_map#replace uri ((rdf_url_of_uri uri) ^ ".xml")
| _ -> log := !log ^ "Ignoring invalid line: " ^ l ^ "<br />\n")
with Http_getter_invalid_URI uri ->
- log := !log ^ "Ignoring invalid RDF URI: " ^ uri)
- (Pcre.split ~pat:"\n+" rdf_index)) (* rdf_index lines *)
+ log := !log ^ "Ignoring invalid RDF URI: " ^ uri ^ "<br />\n")
+ (Pcre.split ~rex:index_sep_RE rdf_index)) (* rdf_index lines *)
| None -> ());
(match xsl_index with
| Some xsl_index ->
(log := !log ^ "Updating XSLT db ...<br />\n";
List.iter
- (fun l -> xsl_map#add l (server_url ^ "/" ^ l))
- (Pcre.split ~pat:"\n+" xsl_index);
+ (fun l -> xsl_map#replace l (server_url ^ "/" ^ l))
+ (Pcre.split ~rex:index_sep_RE xsl_index);
log := !log ^ "All done!<br />\n")
| None -> ());
+ debug_print "done with this server";
!log
in
+let update_from_all_servers () = (* use global maps *)
+ cic_map#clear; nuprl_map#clear; rdf_map#clear; xsl_map#clear;
+ let log =
+ List.fold_left
+ update_from_server
+ "" (* initial logmsg: empty *)
+ (* reverse order: 1st server is the most important one *)
+ (List.rev !Http_getter_env.servers)
+ in
+ cic_map#sync; nuprl_map#sync; rdf_map#sync; xsl_map#sync;
+ log
+in
(* thread action *)
+
let callback (req: Http_types.request) outchan =
try
debug_print ("Connection from " ^ req#clientAddr);
debug_print ("Received request: " ^ req#path);
(match req#path with
- | "/help" -> return_html_msg Http_getter_const.usage_string outchan
+ | "/help" ->
+ return_html_raw
+ (Http_getter_const.usage_string (Http_getter_env.env_to_string ()))
+ outchan
| "/getxml" | "/getxslt" | "/getdtd" | "/resolve" | "/register" ->
(let uri = req#param "uri" in (* common parameter *)
match req#path with
| "/getxml" ->
- let enc = parse_format req in
- let patch_dtd = parse_patch_dtd req in
+ let enc = parse_enc req in
+ let patch = parse_patch req in
Http_getter_cache.respond_xml
- ~url:(resolve uri) ~uri ~enc ~patch_dtd outchan
+ ~url:(resolve uri) ~uri ~enc ~patch outchan
| "/getxslt" ->
-(* let patch_dtd = parse_patch_dtd req in *)
- (* TODO add support and default value for patch_dtd *)
- Http_getter_cache.respond_xsl ~url:(resolve uri) outchan
+ let patch = parse_patch req in
+ Http_getter_cache.respond_xsl ~url:(resolve uri) ~patch outchan
| "/getdtd" ->
- let patch_dtd = parse_patch_dtd req in
+ let patch = parse_patch req in
Http_getter_cache.respond_dtd
- ~patch_dtd ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan
+ ~patch ~url:(Http_getter_env.dtd_dir ^ "/" ^ uri) outchan
| "/resolve" ->
(try
- return_xml_msg
+ return_xml_raw
(sprintf "<url value=\"%s\" />\n" (resolve uri))
outchan
with Http_getter_unresolvable_URI uri ->
- return_xml_msg "<unresolved />\n" outchan)
+ return_xml_raw "<unresolved />\n" outchan)
| "/register" ->
let url = req#param "url" in
- xml_map#add uri url;
+ register uri url;
return_html_msg "Register done" outchan
| _ -> assert false)
| "/update" ->
- (xml_map#clear; rdf_map#clear; xsl_map#clear;
- let log =
- List.fold_left
- update_from_server
- "" (* initial logmsg: empty *)
- (* reverse order: 1st server is the most important one *)
- (List.rev Http_getter_env.servers)
+ Http_getter_env.reload (); (* reload servers list from servers file *)
+ let log = update_from_all_servers () in
+ return_html_msg log outchan
+ | "/list_servers" ->
+ return_html_raw
+ (sprintf "<html><body><table>\n%s\n</table></body></html>"
+ (String.concat "\n"
+ (List.map
+ (let i = ref ~-1 in
+ fun s -> incr i; sprintf "<tr><td>%d</td><td>%s</td></tr>" !i s)
+ !Http_getter_env.servers)))
+ outchan
+ | "/add_server" ->
+ let name = req#param "url" in
+ (try
+ let position =
+ try
+ let res = int_of_string (req#param "position") in
+ if res < 0 then
+ raise (Failure "int_of_string");
+ res
+ with Failure "int_of_string" ->
+ raise (Http_getter_bad_request
+ (sprintf "position must be a non negative integer (%s given)"
+ (req#param "position")))
+ in
+ if position = 0 then (* fallback to default value *)
+ raise (Http_types.Param_not_found "foo")
+ else if position > 0 then begin (* add server and update all *)
+ Http_getter_env.add_server ~position name;
+ let log = update_from_all_servers () in
+ return_html_msg
+ (sprintf "Added server %s in position %d)<br />\n%s"
+ name position log)
+ outchan
+ end else (* position < 0 *) (* error! *)
+ assert false (* already checked above *)
+ with Http_types.Param_not_found _ -> (* add as 1st server by default *)
+ Http_getter_env.add_server ~position:0 name;
+ let log = update_from_server (* quick update (new server only) *)
+ (sprintf "Added server %s in head position<br />\n" name) name
+ in
+ return_html_msg log outchan)
+ | "/remove_server" ->
+ let position =
+ try
+ let res = int_of_string (req#param "position") in
+ if res < 0 then
+ raise (Failure "int_of_string");
+ res
+ with Failure "int_of_string" ->
+ raise (Http_getter_bad_request
+ (sprintf "position must be a non negative integer (%s given)"
+ (req#param "position")))
in
- return_html_msg log outchan)
+ let server_name =
+ try
+ List.nth !Http_getter_env.servers position
+ with Failure "nth" ->
+ raise (Http_getter_bad_request
+ (sprintf "no server with position %d" position))
+ in
+ Http_getter_env.remove_server position;
+ let log = update_from_all_servers () in
+ return_html_msg
+ (sprintf "Removed server %s (position %d)<br />\n%s"
+ server_name position log)
+ outchan
| "/getalluris" ->
- return_all_uris
+ return_all_xml_uris
(fun uri ->
- (Pcre.pmatch ~pat:"^cic:" uri) &&
- not (Pcre.pmatch ~pat:"\\.types$" uri))
+ (Pcre.pmatch ~rex:heading_cic_RE uri) &&
+ not (Pcre.pmatch ~rex:trailing_types_RE uri))
outchan
| "/getallrdfuris" ->
(let classs = req#param "class" in
return_all_rdf_uris filter outchan
with Http_getter_invalid_RDF_class c ->
raise (Http_getter_bad_request ("Invalid RDF class: " ^ c)))
- | "/ls" -> return_ls (parse_ls_uri req) (parse_ls_format req) outchan
+ | "/ls" -> return_ls (parse_ls_uri req) (parse_output_format req) outchan
| "/getempty" ->
Http_daemon.respond ~body:Http_getter_const.empty_xml outchan
| invalid_request ->
- Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan)
+ Http_daemon.respond_error ~status:(`Client_error `Bad_request) outchan);
+ debug_print "Done!\n"
with
| Http_types.Param_not_found attr_name ->
return_400 (sprintf "Parameter '%s' is missing" attr_name) outchan
| Http_getter_bad_request msg -> return_html_error msg outchan
| Http_getter_internal_error msg -> return_html_internal_error msg outchan
+ | Shell.Subprocess_error l ->
+ return_html_internal_error
+ (String.concat "<br />\n"
+ (List.map
+ (fun (cmd, code) ->
+ sprintf "Command '%s' returned %s"
+ cmd (string_of_proc_status code))
+ l))
+ outchan
| exc ->
return_html_error
("Uncaught exception: " ^ (Printexc.to_string exc))
outchan
in
- (* daemon initialization *)
-Http_getter_env.dump_env ();
-flush stdout;
-Unix.putenv "http_proxy" "";
-Http_daemon.start'
- ~timeout:None ~port:Http_getter_env.port ~mode:`Thread callback
+ (* daemon initialization *)
+
+let main () =
+ print_string (Http_getter_env.env_to_string ());
+ flush stdout;
+ Unix.putenv "http_proxy" "";
+ at_exit save_maps;
+ Sys.catch_break true;
+ try
+ Http_daemon.start'
+ ~timeout:(Some 600) ~port:Http_getter_env.port ~mode:`Thread callback
+ with Sys.Break -> () (* 'save_maps' already registered with 'at_exit' *)
+in
+
+main ()