*)
open Http_getter_common;;
+open Http_getter_misc;;
open Http_getter_types;;
open Printf;;
let resource_type = resource_type_of_url url in
let extension = extension_of_resource_type resource_type in
let downloadname =
- match http_getter_uri_of_string uri with
+ match http_getter_uri_of_string uri with (* parse uri *)
| Xml_uri (Cic baseuri) | Xml_uri (Theory baseuri) ->
(* assumption: baseuri starts with "/" *)
sprintf "%s%s.%s" Http_getter_env.xml_dir baseuri extension
in
let basename = Pcre.replace ~pat:"\\.gz$" downloadname in
if not (is_in_cache basename) then begin (* download and fill cache *)
+ mkdir ~parents:true (Filename.dirname downloadname);
wget ~output:downloadname url;
match (resource_type, Http_getter_env.cache_mode) with
| Enc_normal, Enc_normal ->
return_file ~fname ~contype:"text/xml" ~patch_fun outchan;
Sys.remove fname
- (* TODO patch_dtd and format are fooish, implement them! *)
+ (* TODO enc is not yet supported *)
let respond_dtd ?(enc = Enc_normal) ?(patch_dtd = true) ~url outchan =
let patch_fun =
if patch_dtd then Http_getter_common.patch_dtd else (fun x -> x)
in
if Sys.file_exists url then
- (* TODO check this: old getter here use text/xml *)
+ (* TODO check this: old getter here used text/xml *)
return_file ~fname:url ~contype:"text/plain" ~patch_fun outchan
else
return_html_error ("Can't find DTD: " ^ url) outchan