X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Fhttp_getter%2Fhttp_getter_cache.ml;h=d132b69a56b2297e6508a0193cf893ace027fd99;hb=89262281b6e83bd2321150f81f1a0583645eb0c8;hp=dae28417d510529450a4bb6afca9ce066180c32a;hpb=22cc7ae953e37e4c725d689f9b6087acec6ce609;p=helm.git diff --git a/helm/http_getter/http_getter_cache.ml b/helm/http_getter/http_getter_cache.ml index dae28417d..d132b69a5 100644 --- a/helm/http_getter/http_getter_cache.ml +++ b/helm/http_getter/http_getter_cache.ml @@ -1,5 +1,7 @@ (* - * Copyright (C) 2000, HELM Team. + * Copyright (C) 2003: + * Stefano Zacchiroli + * for the HELM Team http://helm.cs.unibo.it/ * * This file is part of HELM, an Hypertextual, Electronic * Library of Mathematics, developed at the Computer Science @@ -21,9 +23,23 @@ * MA 02111-1307, USA. * * For details, see the HELM World-Wide-Web page, - * http://cs.unibo.it/helm/. + * http://helm.cs.unibo.it/ *) +(* TODO cache expires control!!! *) +(* TODO possible race condition, e.g.: + two clients require the same URI which is available in cache compressed, the + getter need to uncompress it, send back to client, and delete the + uncompressed file. Actually the uncompressed file name is the same, a temp + file isn't used. *) +(* TODO possible race condition, e.g.: + two clients require the same URI which is not available in cache, cache + filling operation can collide *) +(* TODO uwobo loop: + if two large proof (already in cache) are requested at the same time by two + clients, uwobo (java implementation, not yet tested with the OCaml one) + starts looping sending output to one of the client *) + open Http_getter_common;; open Http_getter_misc;; open Http_getter_types;; @@ -44,7 +60,7 @@ let is_in_cache basename = | Enc_normal -> basename | Enc_gzipped -> basename ^ ".gz") -let respond_xml ?(enc = Enc_normal) ?(patch_dtd = true) ~url ~uri outchan = +let respond_xml ?(enc = Enc_normal) ?(patch = true) ~url ~uri outchan = let resource_type = resource_type_of_url url in let extension = extension_of_resource_type resource_type in let downloadname = @@ -66,7 +82,7 @@ let respond_xml ?(enc = Enc_normal) ?(patch_dtd = true) ~url ~uri outchan = Http_getter_env.rdf_dir escaped_prefix baseuri extension in let patch_fun = - if patch_dtd then Http_getter_common.patch_xml else (fun x -> x) + if patch then Http_getter_common.patch_xml else (fun x -> x) in let basename = Pcre.replace ~pat:"\\.gz$" downloadname in if not (is_in_cache basename) then begin (* download and fill cache *) @@ -100,9 +116,9 @@ let respond_xml ?(enc = Enc_normal) ?(patch_dtd = true) ~url ~uri outchan = | Enc_gzipped, Enc_normal -> Sys.remove (basename ^ ".gz") (* TODO enc is not yet supported *) -let respond_xsl ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan = +let respond_xsl ?(enc = Enc_normal) ?(patch = true) ~url outchan = let patch_fun = - if patch_dtd then Http_getter_common.patch_xsl else (fun x -> x) + if patch then Http_getter_common.patch_xsl else (fun x -> x) in let fname = tempfile () in wget ~output:fname url; @@ -110,9 +126,9 @@ let respond_xsl ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan = Sys.remove fname (* TODO enc is not yet supported *) -let respond_dtd ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan = +let respond_dtd ?(enc = Enc_normal) ?(patch = true) ~url outchan = let patch_fun = - if patch_dtd then Http_getter_common.patch_dtd else (fun x -> x) + if patch then Http_getter_common.patch_dtd else (fun x -> x) in if Sys.file_exists url then (* TODO check this: old getter here used text/xml *)