X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Fhttp_getter%2Fhttp_getter_cache.ml;h=d132b69a56b2297e6508a0193cf893ace027fd99;hb=a4df9661e15509e5da6ed9c57e3ab6a27a440c3f;hp=faa0c014a6491c50a0550b203a00bfadcb50c254;hpb=32bc9336091a84376f93b06325c08dab58124a0d;p=helm.git diff --git a/helm/http_getter/http_getter_cache.ml b/helm/http_getter/http_getter_cache.ml index faa0c014a..d132b69a5 100644 --- a/helm/http_getter/http_getter_cache.ml +++ b/helm/http_getter/http_getter_cache.ml @@ -1,5 +1,7 @@ (* - * Copyright (C) 2003, HELM Team. + * Copyright (C) 2003: + * Stefano Zacchiroli + * for the HELM Team http://helm.cs.unibo.it/ * * This file is part of HELM, an Hypertextual, Electronic * Library of Mathematics, developed at the Computer Science @@ -21,9 +23,23 @@ * MA 02111-1307, USA. * * For details, see the HELM World-Wide-Web page, - * http://cs.unibo.it/helm/. + * http://helm.cs.unibo.it/ *) +(* TODO cache expires control!!! *) +(* TODO possible race condition, e.g.: + two clients require the same URI which is available in cache compressed, the + getter need to uncompress it, send back to client, and delete the + uncompressed file. Actually the uncompressed file name is the same, a temp + file isn't used. *) +(* TODO possible race condition, e.g.: + two clients require the same URI which is not available in cache, cache + filling operation can collide *) +(* TODO uwobo loop: + if two large proof (already in cache) are requested at the same time by two + clients, uwobo (java implementation, not yet tested with the OCaml one) + starts looping sending output to one of the client *) + open Http_getter_common;; open Http_getter_misc;; open Http_getter_types;; @@ -44,7 +60,7 @@ let is_in_cache basename = | Enc_normal -> basename | Enc_gzipped -> basename ^ ".gz") -let respond_xml ?(enc = Enc_normal) ?(patch_dtd = true) ~url ~uri outchan = +let respond_xml ?(enc = Enc_normal) ?(patch = true) ~url ~uri outchan = let resource_type = resource_type_of_url url in let extension = extension_of_resource_type resource_type in let downloadname = @@ -66,7 +82,7 @@ let respond_xml ?(enc = Enc_normal) ?(patch_dtd = true) ~url ~uri outchan = Http_getter_env.rdf_dir escaped_prefix baseuri extension in let patch_fun = - if patch_dtd then Http_getter_common.patch_xml else (fun x -> x) + if patch then Http_getter_common.patch_xml else (fun x -> x) in let basename = Pcre.replace ~pat:"\\.gz$" downloadname in if not (is_in_cache basename) then begin (* download and fill cache *) @@ -100,9 +116,9 @@ let respond_xml ?(enc = Enc_normal) ?(patch_dtd = true) ~url ~uri outchan = | Enc_gzipped, Enc_normal -> Sys.remove (basename ^ ".gz") (* TODO enc is not yet supported *) -let respond_xsl ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan = +let respond_xsl ?(enc = Enc_normal) ?(patch = true) ~url outchan = let patch_fun = - if patch_dtd then Http_getter_common.patch_xsl else (fun x -> x) + if patch then Http_getter_common.patch_xsl else (fun x -> x) in let fname = tempfile () in wget ~output:fname url; @@ -110,9 +126,9 @@ let respond_xsl ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan = Sys.remove fname (* TODO enc is not yet supported *) -let respond_dtd ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan = +let respond_dtd ?(enc = Enc_normal) ?(patch = true) ~url outchan = let patch_fun = - if patch_dtd then Http_getter_common.patch_dtd else (fun x -> x) + if patch then Http_getter_common.patch_dtd else (fun x -> x) in if Sys.file_exists url then (* TODO check this: old getter here used text/xml *)