From: Stefano Zacchiroli Date: Wed, 8 Jan 2003 17:15:16 +0000 (+0000) Subject: - rewritten cp, wget, mkdir in OCaml X-Git-Tag: v0_3_99~65 X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=commitdiff_plain;h=5e2135da18eeca0970692c12a0174690e6db7bd6;p=helm.git - rewritten cp, wget, mkdir in OCaml - added http_get_iter_buf, an iter over remote HTTP GET resources --- diff --git a/helm/http_getter/http_getter_misc.ml b/helm/http_getter/http_getter_misc.ml index 0bc701685..5cb713f20 100644 --- a/helm/http_getter/http_getter_misc.ml +++ b/helm/http_getter/http_getter_misc.ml @@ -29,6 +29,16 @@ open Http_getter_debugger;; open Printf;; +let trailing_dot_gz_RE = Pcre.regexp "\\.gz$" (* for g{,un}zip *) +let url_RE = Pcre.regexp "^([\\w.]+)(:(\\d+))?(/.*)?$" +let http_scheme_RE = Pcre.regexp ~flags:[`CASELESS] "^http://" +let file_scheme_RE = Pcre.regexp ~flags:[`CASELESS] "^file://" +let dir_sep_RE = Pcre.regexp "/" +let heading_slash_RE = Pcre.regexp "^/" + +let bufsiz = 16384 (* for file system I/O *) +let tcp_bufsiz = 4096 (* for TCP I/O *) + let fold_file f init fname = let inchan = open_in fname in let rec fold_lines' value = @@ -49,36 +59,76 @@ let hashtbl_sorted_fold f tbl init = List.fold_left (fun acc k -> f k (Hashtbl.find tbl k) acc) init sorted_keys let cp src dst = - Shell.call - ~stdout:Shell.to_dev_null ~stderr:Shell.to_dev_null - [Shell.cmd "cp" [src; dst]] + let (ic, oc) = (open_in src, open_out dst) in + let buf = String.create bufsiz in + (try + while true do + let bytes = input ic buf 0 bufsiz in + if bytes = 0 then raise End_of_file else output oc buf 0 bytes + done + with End_of_file -> ()); + close_in ic; close_out oc -let file_scheme_RE = Pcre.regexp "^file://" -let wget ?output url = - let use_wget () = - let flags = - (match output with Some file -> ["-O"; file] | None -> []) @ [url] +let parse_url url = + try + let subs = + Pcre.extract ~rex:url_RE (Pcre.replace ~rex:http_scheme_RE url) in - debug_print ("wget " ^ String.concat " " flags); - Shell.call - ~stdout:Shell.to_dev_null ~stderr:Shell.to_dev_null - [Shell.cmd "wget" flags] - in - if Pcre.pmatch ~rex:file_scheme_RE url then begin (* file:// URL *) - let src_fname = Pcre.replace ~rex:file_scheme_RE url in - match output with - | Some dst_fname -> cp src_fname dst_fname - | None -> - let dst_fname = Filename.basename src_fname in - if src_fname <> dst_fname then - cp src_fname dst_fname - else (* src and dst are the same: do nothing *) - () - end else (* other URL, pass it to wget *) - use_wget () - -let bufsiz = 16384 (* for g{,un}zip *) -let trailing_dot_gz_RE = Pcre.regexp "\\.gz$" (* for g{,un}zip *) + (subs.(1), + (if subs.(2) = "" then 80 else int_of_string subs.(3)), + (if subs.(4) = "" then "/" else subs.(4))) + with exc -> + failwith + (sprintf "Can't parse url: %s (exception: %s)" + url (Printexc.to_string exc)) +let init_socket addr port = + let inet_addr = (Unix.gethostbyname addr).Unix.h_addr_list.(0) in + let sockaddr = Unix.ADDR_INET (inet_addr, port) in + let suck = Unix.socket Unix.PF_INET Unix.SOCK_STREAM 0 in + Unix.connect suck sockaddr; + let outchan = Unix.out_channel_of_descr suck in + let inchan = Unix.in_channel_of_descr suck in + (inchan, outchan) +let http_get_iter_buf ~callback url = + let (address, port, path) = parse_url url in + let buf = String.create tcp_bufsiz in + let (inchan, outchan) = init_socket address port in + output_string outchan (sprintf "GET %s HTTP/1.0\r\n\r\n" path); + flush outchan; + (try + while true do + match input inchan buf 0 tcp_bufsiz with + | 0 -> raise End_of_file + | bytes when bytes = tcp_bufsiz -> (* buffer full, no need to slice it *) + callback buf + | bytes when bytes < tcp_bufsiz -> (* buffer not full, slice it *) + callback (String.sub buf 0 bytes) + | _ -> (* ( bytes < 0 ) || ( bytes > tcp_bufsiz ) *) + assert false + done + with End_of_file -> ()); + close_in inchan (* close also outchan, same fd *) + +let wget ?output url = + match url with + | url when Pcre.pmatch ~rex:file_scheme_RE url -> (* file:// *) + (let src_fname = Pcre.replace ~rex:file_scheme_RE url in + match output with + | Some dst_fname -> cp src_fname dst_fname + | None -> + let dst_fname = Filename.basename src_fname in + if src_fname <> dst_fname then + cp src_fname dst_fname + else (* src and dst are the same: do nothing *) + ()) + | url when Pcre.pmatch ~rex:http_scheme_RE url -> (* http:// *) + (let oc = + open_out (match output with Some f -> f | None -> Filename.basename url) + in + http_get_iter_buf ~callback:(fun data -> output_string oc data) url; + close_out oc) + | scheme -> (* unsupported scheme *) + failwith ("Http_getter_misc.wget: unsupported scheme: " ^ scheme) let gzip ?(keep = false) fname = debug_print (sprintf "gzipping %s (keep: %b)" fname keep); @@ -90,8 +140,7 @@ let gzip ?(keep = false) fname = if bytes = 0 then raise End_of_file else Gzip.output oc buf 0 bytes done with End_of_file -> ()); - close_in ic; - Gzip.close_out oc; + close_in ic; Gzip.close_out oc; if not keep then Sys.remove fname let gunzip ?(keep = false) fname = @@ -106,23 +155,43 @@ let gunzip ?(keep = false) fname = if bytes = 0 then raise End_of_file else output oc buf 0 bytes done with End_of_file -> ()); - Gzip.close_in ic; - close_out oc; + Gzip.close_in ic; close_out oc; if not keep then Sys.remove fname -let tempfile () = - let buf = Buffer.create 28 in (* strlen("/tmp/fileSzb3Mw_http_getter") *) - Shell.call - ~stdout:(Shell.to_buffer buf) - [Shell.cmd "tempfile" ["--suffix=_http_getter"]]; - Pcre.replace ~pat:"\n" (Buffer.contents buf) +let tempfile () = Filename.temp_file "http_getter_" "" + +exception Mkdir_failure of string * string;; (* dirname, failure reason *) +let dir_perm = 0o755 let mkdir ?(parents = false) dirname = - if not (Sys.file_exists dirname) then begin - let flags = if parents then ["-p"; dirname] else [dirname] in - debug_print ("mkdir " ^ String.concat " " flags); - Shell.call [Shell.cmd "mkdir" flags] - end + let mkdirhier () = + let (pieces, hd) = + let split = Pcre.split ~rex:dir_sep_RE dirname in + if Pcre.pmatch ~rex:heading_slash_RE dirname then + (List.tl split, "/") + else + (split, "") + in + ignore + (List.fold_left + (fun pre dir -> + let next_dir = + sprintf "%s%s%s" pre (match pre with "/" | "" -> "" | _ -> "/") dir + in + (try + (match (Unix.stat next_dir).Unix.st_kind with + | Unix.S_DIR -> () (* dir component already exists, go on! *) + | _ -> (* dir component already exists but isn't a dir, abort! *) + raise + (Mkdir_failure (dirname, + sprintf "'%s' already exists but is not a dir" next_dir))) + with Unix.Unix_error (Unix.ENOENT, "stat", _) -> + (* dir component doesn't exists, create it and go on! *) + Unix.mkdir next_dir dir_perm); + next_dir) + hd pieces) + in + if parents then mkdirhier () else Unix.mkdir dirname dir_perm let string_of_proc_status = function | Unix.WEXITED code -> sprintf "[Exited: %d]" code diff --git a/helm/http_getter/http_getter_misc.mli b/helm/http_getter/http_getter_misc.mli index 469ce8159..946446dc6 100644 --- a/helm/http_getter/http_getter_misc.mli +++ b/helm/http_getter/http_getter_misc.mli @@ -26,6 +26,10 @@ * http://helm.cs.unibo.it/ *) + (** 'mkdir' failed, arguments are: name of the directory to be created and + failure reason *) +exception Mkdir_failure of string * string + (** "fold_left" like function on file lines, trailing newline is not passed to the given function *) val fold_file : ('a -> string -> 'a) -> 'a -> string -> 'a @@ -61,4 +65,7 @@ val string_of_proc_status : Unix.process_status -> string None if an error occured while downloading. This function support also "file://" scheme for filesystem resources *) val http_get: string -> string option + (** 'iter' like method that iter over string slices (unspecified length) of a + remote resources fetched via HTTP GET requests *) +val http_get_iter_buf: callback:(string -> unit) -> string -> unit