(*
- * Copyright (C) 2000, HELM Team.
+ * Copyright (C) 2003:
+ * Stefano Zacchiroli <zack@cs.unibo.it>
+ * for the HELM Team http://helm.cs.unibo.it/
*
* This file is part of HELM, an Hypertextual, Electronic
* Library of Mathematics, developed at the Computer Science
* MA 02111-1307, USA.
*
* For details, see the HELM World-Wide-Web page,
- * http://cs.unibo.it/helm/.
+ * http://helm.cs.unibo.it/
*)
+(* TODO cache expires control!!! *)
+(* TODO possible race condition, e.g.:
+ two clients require the same URI which is available in cache compressed, the
+ getter need to uncompress it, send back to client, and delete the
+ uncompressed file. Actually the uncompressed file name is the same, a temp
+ file isn't used. *)
+(* TODO possible race condition, e.g.:
+ two clients require the same URI which is not available in cache, cache
+ filling operation can collide *)
+(* TODO uwobo loop:
+ if two large proof (already in cache) are requested at the same time by two
+ clients, uwobo (java implementation, not yet tested with the OCaml one)
+ starts looping sending output to one of the client *)
+
open Http_getter_common;;
+open Http_getter_misc;;
open Http_getter_types;;
open Printf;;
| Enc_normal -> basename
| Enc_gzipped -> basename ^ ".gz")
-let respond_xml ?(enc = Enc_normal) ?(patch_dtd = true) ~url ~uri outchan =
+let respond_xml ?(enc = Enc_normal) ?(patch = true) ~url ~uri outchan =
let resource_type = resource_type_of_url url in
let extension = extension_of_resource_type resource_type in
let downloadname =
- match http_getter_uri_of_string uri with
+ match http_getter_uri_of_string uri with (* parse uri *)
| Xml_uri (Cic baseuri) | Xml_uri (Theory baseuri) ->
(* assumption: baseuri starts with "/" *)
sprintf "%s%s.%s" Http_getter_env.xml_dir baseuri extension
Http_getter_env.rdf_dir escaped_prefix baseuri extension
in
let patch_fun =
- if patch_dtd then Http_getter_common.patch_xml else (fun x -> x)
+ if patch then Http_getter_common.patch_xml else (fun x -> x)
in
let basename = Pcre.replace ~pat:"\\.gz$" downloadname in
if not (is_in_cache basename) then begin (* download and fill cache *)
+ mkdir ~parents:true (Filename.dirname downloadname);
wget ~output:downloadname url;
match (resource_type, Http_getter_env.cache_mode) with
| Enc_normal, Enc_normal ->
| Enc_gzipped, Enc_normal -> Sys.remove (basename ^ ".gz")
(* TODO enc is not yet supported *)
-let respond_xsl ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan =
+let respond_xsl ?(enc = Enc_normal) ?(patch = true) ~url outchan =
let patch_fun =
- if patch_dtd then Http_getter_common.patch_xsl else (fun x -> x)
+ if patch then Http_getter_common.patch_xsl else (fun x -> x)
in
let fname = tempfile () in
wget ~output:fname url;
return_file ~fname ~contype:"text/xml" ~patch_fun outchan;
Sys.remove fname
- (* TODO patch_dtd and format are fooish, implement them! *)
-let respond_dtd ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan =
+ (* TODO enc is not yet supported *)
+let respond_dtd ?(enc = Enc_normal) ?(patch = true) ~url outchan =
let patch_fun =
- if patch_dtd then Http_getter_common.patch_dtd else (fun x -> x)
+ if patch then Http_getter_common.patch_dtd else (fun x -> x)
in
if Sys.file_exists url then
- (* TODO check this: old getter here use text/xml *)
+ (* TODO check this: old getter here used text/xml *)
return_file ~fname:url ~contype:"text/plain" ~patch_fun outchan
else
return_html_error ("Can't find DTD: " ^ url) outchan