From: Enrico Tassi Date: Mon, 13 Mar 2006 12:20:25 +0000 (+0000) Subject: Huge commit for the release. Includes: X-Git-Tag: 0.4.95@7852~1609 X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=commitdiff_plain;h=5cb95a2e44f979183a8c3e39baa3b4e7cfaf8182;p=helm.git Huge commit for the release. Includes: - getter_storage modifications to honor multiple (overlapping) ro/rw repositories - removes of ~basedir - addition of the 'publish' matitamake command used to install in system tables/directory a development (that is compiling with -system) - removal of .build,.user,.devel config files (only one is needed) - fixed make dist/install to strip binaries, install the library with the new method - fixed compilation of the getter daemon - deletion of grafite misc that is no more needed --- diff --git a/Makefile b/Makefile index 6a16621dc..5b069ddf8 100644 --- a/Makefile +++ b/Makefile @@ -20,7 +20,7 @@ rec@%: ifeq ($(DISTRIBUTED),yes) library: library-stamp library-stamp: - $(MAKE) -C matita/ dist_library_clean dist_library + $(MAKE) -C matita/ dist_library touch $@ endif @@ -58,7 +58,6 @@ dist_export: dist/configure svn export components $(DISTDIR)/components svn export matita $(DISTDIR)/matita (cd $(DISTDIR) && rm -f $(CLEAN_ON_DIST)) - ln -fs matita.conf.xml.user $(DISTDIR)/matita/matita.conf.xml cp $< $(DISTDIR)/configure cp -r $(EXTRA_DIST) $(DISTDIR) dist_mktarball: @@ -71,7 +70,6 @@ dist_test: (cd $(DISTDIR)/ \ && ./configure \ && $(MAKE) world \ - && $(MAKE) library \ && $(MAKE) install DESTDIR=`pwd`/install) .PHONY: dist dist_export dist_mktarball distcheck dist_extract dist_test dist_autotools diff --git a/components/extlib/hExtlib.ml b/components/extlib/hExtlib.ml index 5f96e0f84..93eabf2ac 100644 --- a/components/extlib/hExtlib.ml +++ b/components/extlib/hExtlib.ml @@ -52,7 +52,7 @@ let profile ?(enable = true) = in at_exit (fun () -> - if !profiling_printings () then + if !profiling_printings () && !total <> 0. then prerr_endline ("!! TOTAL TIME SPENT IN " ^ s ^ ": " ^ string_of_float !total)); { profile = profile } diff --git a/components/getter/http_getter.ml b/components/getter/http_getter.ml index 1b47a6c38..d2993575a 100644 --- a/components/getter/http_getter.ml +++ b/components/getter/http_getter.ml @@ -103,10 +103,11 @@ let ls_remote lsuri = not_implemented "ls_remote" let exists_remote uri = not_implemented "exists_remote" (* *) -let resolve_remote uri = +let resolve_remote ~writable uri = (* deliver resolve request to http_getter *) let doc = - Http_getter_wget.get (sprintf "%sresolve?uri=%s" (getter_url ()) uri) + Http_getter_wget.get (sprintf "%sresolve?uri=%s&writable=%b" (getter_url ()) + uri writable) in let res = ref Unknown in let start_element tag attrs = @@ -150,13 +151,23 @@ let exists uri = let uri = deref_index_theory uri in Http_getter_storage.exists (uri ^ xml_suffix) -let resolve uri = +let resolve ~writable uri = if remote () then - resolve_remote uri + resolve_remote ~writable uri else let uri = deref_index_theory uri in try - Http_getter_storage.resolve (uri ^ xml_suffix) + Http_getter_storage.resolve ~writable (uri ^ xml_suffix) + with Http_getter_storage.Resource_not_found _ -> raise (Key_not_found uri) + +let filename ~writable uri = + if remote () then + raise (Key_not_found uri) + else + let uri = deref_index_theory uri in + try + Http_getter_storage.resolve + ~must_exists:false ~writable uri with Http_getter_storage.Resource_not_found _ -> raise (Key_not_found uri) let getxml uri = @@ -345,8 +356,9 @@ let getalluris () = (* Shorthands from now on *) let getxml' uri = getxml (UriManager.string_of_uri uri) -let resolve' uri = resolve (UriManager.string_of_uri uri) +let resolve' ~writable uri = resolve ~writable (UriManager.string_of_uri uri) let exists' uri = exists (UriManager.string_of_uri uri) +let filename' ~writable uri = filename ~writable (UriManager.string_of_uri uri) let tilde_expand_key k = try diff --git a/components/getter/http_getter.mli b/components/getter/http_getter.mli index 4bbc447bd..845a17a10 100644 --- a/components/getter/http_getter.mli +++ b/components/getter/http_getter.mli @@ -40,7 +40,11 @@ val help: unit -> string (** @raise Http_getter_types.Unresolvable_URI _ * @raise Http_getter_types.Key_not_found _ *) -val resolve: string -> string (* uri -> url *) +val resolve: writable:bool -> string -> string (* uri -> url *) + + (** as resolve, but does not check if the resource exists + * @raise Http_getter_types.Key_not_found *) +val filename: writable:bool -> string -> string (* uri -> url *) val exists: string -> bool @@ -57,8 +61,9 @@ val ls: string -> ls_item list (** {2 UriManager shorthands} *) val getxml' : UriManager.uri -> string -val resolve' : UriManager.uri -> string +val resolve' : writable:bool -> UriManager.uri -> string val exists' : UriManager.uri -> bool +val filename' : writable:bool -> UriManager.uri -> string (** {2 Misc} *) diff --git a/components/getter/http_getter_storage.ml b/components/getter/http_getter_storage.ml index fc6f415ac..3650d79b9 100644 --- a/components/getter/http_getter_storage.ml +++ b/components/getter/http_getter_storage.ml @@ -35,6 +35,8 @@ exception Resource_not_found of string * string (** method, uri *) let index_fname = "INDEX" +(******************************* HELPERS **************************************) + let trailing_slash_RE = Pcre.regexp "/$" let relative_RE_raw = "(^[^/]+(/[^/]+)*/?$)" let relative_RE = Pcre.regexp relative_RE_raw @@ -47,6 +49,7 @@ let cic_scheme_sep_RE = Pcre.regexp ":/" let gz_suffix = ".gz" let gz_suffix_len = String.length gz_suffix + (* file:///bla -> bla, bla -> bla *) let path_of_file_url url = assert (Pcre.pmatch ~rex:file_scheme_RE url); if Pcre.pmatch ~rex:relative_RE url then @@ -54,80 +57,80 @@ let path_of_file_url url = else (* absolute path, add heading "/" if missing *) "/" ^ (Pcre.replace ~rex:extended_file_scheme_RE url) +let strip_gz_suffix fname = + if extension fname = gz_suffix then + String.sub fname 0 (String.length fname - gz_suffix_len) + else + fname + +let normalize_root uri = (* add trailing slash to roots *) + try + if uri.[String.length uri - 1] = ':' then uri ^ "/" + else uri + with Invalid_argument _ -> uri + +let remove_duplicates l = + Http_getter_misc.list_uniq (List.stable_sort Pervasives.compare l) + +let has_rdonly l = List.exists ((=) `Read_only) l +let has_legacy l = List.exists ((=) `Legacy) l +let is_readwrite attrs = (not (has_legacy attrs) && not (has_rdonly attrs)) + +let is_file_schema url = Pcre.pmatch ~rex:file_scheme_RE url +let is_http_schema url = Pcre.pmatch ~rex:http_scheme_RE url + +let is_empty_listing files = + List.for_all (fun s -> s.[String.length s - 1] = '/') files + +(************************* GLOBALS PREFIXES **********************************) + (** associative list regular expressions -> url prefixes * sorted with longest prefixes first *) -let prefix_map = lazy ( - let map_w_length = - List.map - (fun (uri_prefix, (url_prefix, attrs)) -> - let uri_prefix = normalize_dir uri_prefix in - let url_prefix = normalize_dir url_prefix in - let regexp = Pcre.regexp ("^(" ^ Pcre.quote uri_prefix ^ ")") in - (regexp, String.length uri_prefix, uri_prefix, url_prefix, attrs)) - (Lazy.force Http_getter_env.prefixes) - in - let decreasing_length (_, len1, _, _, _) (_, len2, _, _, _) = - compare len2 len1 in +let prefix_map_ref = ref (lazy ( List.map - (fun (regexp, len, uri_prefix, url_prefix, attrs) -> - (regexp, strip_trailing_slash uri_prefix, url_prefix, attrs)) - (List.fast_sort decreasing_length map_w_length)) + (fun (uri_prefix, (url_prefix, attrs)) -> + let uri_prefix = normalize_dir uri_prefix in + let url_prefix = normalize_dir url_prefix in + let regexp = Pcre.regexp ("^(" ^ Pcre.quote uri_prefix ^ ")") in + regexp, strip_trailing_slash uri_prefix, url_prefix, attrs) + (List.rev (Lazy.force Http_getter_env.prefixes)))) + +let prefix_map () = !prefix_map_ref + (** given an uri returns the prefixes for it *) let lookup uri = let matches = - List.filter (fun (rex, _, _, _) -> Pcre.pmatch ~rex uri) - (Lazy.force prefix_map) in + List.filter (fun (rex, _, l, _) -> Pcre.pmatch ~rex uri) + (Lazy.force (prefix_map ())) in if matches = [] then raise (Unresolvable_URI uri); matches -let resolve_prefix uri = - match lookup uri with - | (rex, _, url_prefix, _) :: _ -> - Pcre.replace_first ~rex ~templ:url_prefix uri - | [] -> assert false - -let resolve_prefixes uri = - let matches = lookup uri in - List.map - (fun (rex, _, url_prefix, _) -> - Pcre.replace_first ~rex ~templ:url_prefix uri) - matches - -let get_attrs uri = - match lookup uri with - | (_, _, _, attrs) :: _ -> attrs - | [] -> assert false - -let is_legacy uri = List.exists ((=) `Legacy) (get_attrs uri) - -let is_read_only uri = - is_legacy uri || List.exists ((=) `Read_only) (get_attrs uri) +let get_attrs uri = List.map (fun (_, _, _, attrs) -> attrs) (lookup uri) +(*************************** ACTIONS ******************************************) + let exists_http _ url = Http_getter_wget.exists (url ^ gz_suffix) || Http_getter_wget.exists url let exists_file _ fname = Sys.file_exists (fname ^ gz_suffix) || Sys.file_exists fname -let resolve_http _ url = +let resolve_http ~must_exists _ url = try - List.find Http_getter_wget.exists [ url ^ gz_suffix; url ] + if must_exists then + List.find Http_getter_wget.exists [ url ^ gz_suffix; url ] + else + url with Not_found -> raise Not_found' -let resolve_file _ fname = +let resolve_file ~must_exists _ fname = try - List.find Sys.file_exists [ fname ^ gz_suffix; fname ] + if must_exists then + List.find Sys.file_exists [ fname ^ gz_suffix; fname ] + else + fname with Not_found -> raise Not_found' -let strip_gz_suffix fname = - if extension fname = gz_suffix then - String.sub fname 0 (String.length fname - gz_suffix_len) - else - fname - -let remove_duplicates l = - Http_getter_misc.list_uniq (List.fast_sort Pervasives.compare l) - let ls_file_single _ path_prefix = let is_dir fname = (Unix.stat fname).Unix.st_kind = Unix.S_DIR in let is_useless dir = try dir.[0] = '.' with _ -> false in @@ -196,36 +199,77 @@ let remove_http _ _ = prerr_endline "Http_getter_storage.remove: not implemented for HTTP scheme"; assert false +(**************************** RESOLUTION OF PREFIXES ************************) + +let resolve_prefixes write exists uri = + let exists_test new_uri = + if is_file_schema new_uri then + exists_file () (path_of_file_url new_uri) + else if is_http_schema new_uri then + exists_http () new_uri + else false + in + let rec aux = function + | (rex, _, url_prefix, attrs) :: tl -> + (match write, is_readwrite attrs, exists with + | true ,false, _ -> aux tl + | true ,true ,true + | false,_ ,true -> + let new_uri = (Pcre.replace_first ~rex ~templ:url_prefix uri) in + if exists_test new_uri then new_uri::aux tl else aux tl + | true ,true ,false + | false,_ ,false -> + (Pcre.replace_first ~rex ~templ:url_prefix uri) :: (aux tl)) + | [] -> [] + in + aux (lookup uri) + +let resolve_prefix w e u = + match resolve_prefixes w e u with + | hd :: _ -> hd + | [] -> + raise + (Resource_not_found + (Printf.sprintf "resolve_prefix write:%b exists:%b" w e,u)) + +(* uncomment to debug prefix resolution *) +(* +let resolve_prefix w e u = + prerr_endline + ("XXX w=" ^ string_of_bool w ^ " e=" ^ string_of_bool e ^" :" ^ u); + let rc = resolve_prefix w e u in + prerr_endline ("YYY :" ^ rc ^ "\n"); + rc +*) + +(************************* DISPATCHERS ***************************************) + type 'a storage_method = { name: string; + write: bool; + exists: bool; file: string -> string -> 'a; (* unresolved uri, resolved uri *) http: string -> string -> 'a; (* unresolved uri, resolved uri *) } -let normalize_root uri = (* add trailing slash to roots *) - try - if uri.[String.length uri - 1] = ':' then uri ^ "/" - else uri - with Invalid_argument _ -> uri - let invoke_method storage_method uri url = try - if Pcre.pmatch ~rex:file_scheme_RE url then + if is_file_schema url then storage_method.file uri (path_of_file_url url) - else if Pcre.pmatch ~rex:http_scheme_RE url then + else if is_http_schema url then storage_method.http uri url else raise (Unsupported_scheme url) with Not_found' -> raise (Resource_not_found (storage_method.name, uri)) - + let dispatch_single storage_method uri = assert (extension uri <> gz_suffix); let uri = normalize_root uri in - let url = resolve_prefix uri in + let url = resolve_prefix storage_method.write storage_method.exists uri in invoke_method storage_method uri url let dispatch_multi storage_method uri = - let urls = resolve_prefixes uri in + let urls = resolve_prefixes storage_method.write storage_method.exists uri in let rec aux = function | [] -> raise (Resource_not_found (storage_method.name, uri)) | url :: tl -> @@ -235,31 +279,54 @@ let dispatch_multi storage_method uri = in aux urls -let exists = - dispatch_single { name = "exists"; file = exists_file; http = exists_http } - -let resolve = - dispatch_single { name = "resolve"; file = resolve_file; http = resolve_http } - -let ls_single = - dispatch_single { name = "ls"; file = ls_file_single; http = ls_http_single } +let dispatch_all storage_method uri = + let urls = resolve_prefixes storage_method.write storage_method.exists uri in + List.map (fun url -> invoke_method storage_method uri url) urls + +(******************************** EXPORTED FUNCTIONS *************************) + +let exists s = + try + dispatch_single + { write = false; + name = "exists"; + exists = true; + file = exists_file; http = exists_http; } s + with Resource_not_found _ -> false + +let resolve ?(must_exists=true) ~writable = + dispatch_single + { write = writable; + name="resolve"; + exists = must_exists; + file = resolve_file ~must_exists; + http = resolve_http ~must_exists; } let remove = - dispatch_single { name = "remove"; file = remove_file; http = remove_http } + dispatch_single + { write = false; + name = "remove"; + exists=true; + file = remove_file; http = remove_http; } let filename ?(find = false) = - if find then - dispatch_multi { name = "filename"; file = get_file; http = get_http } - else - dispatch_single { name = "filename"; file = get_file; http = get_http } + (if find then dispatch_multi else dispatch_single) + { write = false; + name = "filename"; + exists=true; + file = get_file; http = get_http; } - (* ls_single performs ls only below a single prefix, but prefixes which have - * common prefix (sorry) with a given one may need to be considered as well - * for example: when doing "ls cic:/" we would like to see the "cic:/matita" - * directory *) let ls uri_prefix = -(* prerr_endline ("Http_getter_storage.ls " ^ uri_prefix); *) - let direct_results = ls_single uri_prefix in + let ls_all s = + try + dispatch_all + { write=false; + name = "ls"; + exists=true; + file = ls_file_single; http = ls_http_single; } s + with Resource_not_found _ -> [] + in + let direct_results = List.flatten (ls_all uri_prefix) in List.fold_left (fun results (_, uri_prefix', _, _) -> if Filename.dirname uri_prefix' = strip_trailing_slash uri_prefix then @@ -267,9 +334,67 @@ let ls uri_prefix = else results) direct_results - (Lazy.force prefix_map) + (Lazy.force (prefix_map ())) let clean_cache () = ignore (Sys.command (sprintf "rm -rf %s/" (Lazy.force Http_getter_env.cache_dir))) +let list_writable_prefixes _ = + HExtlib.filter_map + (fun (_,_,url,attrs) -> + if is_readwrite attrs then + Some url + else + None) + (Lazy.force (prefix_map ())) + +let is_legacy uri = List.for_all has_legacy (get_attrs uri) + +(* implement this in a fast way! *) +let is_empty buri = + let buri = strip_trailing_slash buri ^ "/" in + let files = ls buri in + is_empty_listing files + +let is_read_only uri = + let is_empty_dir path = + let files = + if is_file_schema path then + ls_file_single () (path_of_file_url path) + else if is_http_schema path then + ls_http_single () path + else + assert false + in + is_empty_listing files + in + let rec aux found_writable = function + | (rex, _, url_prefix, attrs)::tl -> + let new_url = (Pcre.replace_first ~rex ~templ:url_prefix uri) in + let rdonly = has_legacy attrs || has_rdonly attrs in + (match rdonly, is_empty_dir new_url, found_writable with + | true, false, _ -> true + | true, true, _ -> aux found_writable tl + | false, _, _ -> aux true tl) + | [] -> not found_writable (* if found_writable then false else true *) + in + aux false (lookup uri) + +let activate_system_mode () = + let map = Lazy.force (prefix_map ()) in + let map = + HExtlib.filter_map + (fun ((rex, urip, urlp, attrs) as entry) -> + if has_legacy attrs then + Some entry + else if has_rdonly attrs then + Some (rex, urip, urlp, List.filter ((<>) `Read_only) attrs) + else + None) + map + in + let map = map in (* just to remember that ocamlc 'lazy' is a ... *) + prefix_map_ref := (lazy map) + +(* eof *) diff --git a/components/getter/http_getter_storage.mli b/components/getter/http_getter_storage.mli index 24fc329c9..04af29f0c 100644 --- a/components/getter/http_getter_storage.mli +++ b/components/getter/http_getter_storage.mli @@ -61,11 +61,14 @@ val filename: ?find:bool -> string -> string val remove: string -> unit val exists: string -> bool -val resolve: string -> string +val resolve: ?must_exists:bool -> writable:bool -> string -> string (* val get_attrs: string -> Http_getter_types.prefix_attr list *) val is_read_only: string -> bool val is_legacy: string -> bool +val is_empty: string -> bool val clean_cache: unit -> unit +val activate_system_mode: unit -> unit +val list_writable_prefixes: unit -> string list diff --git a/components/grafite_engine/.depend b/components/grafite_engine/.depend index d0e9a3a86..b0d4b7048 100644 --- a/components/grafite_engine/.depend +++ b/components/grafite_engine/.depend @@ -4,9 +4,5 @@ grafiteTypes.cmo: grafiteTypes.cmi grafiteTypes.cmx: grafiteTypes.cmi grafiteSync.cmo: grafiteTypes.cmi grafiteSync.cmi grafiteSync.cmx: grafiteTypes.cmx grafiteSync.cmi -grafiteMisc.cmo: grafiteMisc.cmi -grafiteMisc.cmx: grafiteMisc.cmi -grafiteEngine.cmo: grafiteTypes.cmi grafiteSync.cmi grafiteMisc.cmi \ - grafiteEngine.cmi -grafiteEngine.cmx: grafiteTypes.cmx grafiteSync.cmx grafiteMisc.cmx \ - grafiteEngine.cmi +grafiteEngine.cmo: grafiteTypes.cmi grafiteSync.cmi grafiteEngine.cmi +grafiteEngine.cmx: grafiteTypes.cmx grafiteSync.cmx grafiteEngine.cmi diff --git a/components/grafite_engine/Makefile b/components/grafite_engine/Makefile index d810e1be2..b6eed1e88 100644 --- a/components/grafite_engine/Makefile +++ b/components/grafite_engine/Makefile @@ -4,7 +4,6 @@ PREDICATES = INTERFACE_FILES = \ grafiteTypes.mli \ grafiteSync.mli \ - grafiteMisc.mli \ grafiteEngine.mli \ $(NULL) IMPLEMENTATION_FILES = $(INTERFACE_FILES:%.mli=%.ml) diff --git a/components/grafite_engine/grafiteEngine.ml b/components/grafite_engine/grafiteEngine.ml index bfd0aff59..59e3d5cf9 100644 --- a/components/grafite_engine/grafiteEngine.ml +++ b/components/grafite_engine/grafiteEngine.ml @@ -416,9 +416,8 @@ let refinement_toolkit = { } let eval_coercion status ~add_composites uri = - let basedir = Helm_registry.get "matita.basedir" in let status,compounds = - GrafiteSync.add_coercion ~basedir ~add_composites refinement_toolkit status uri in + GrafiteSync.add_coercion ~add_composites refinement_toolkit status uri in let moo_content = coercion_moo_statement_of uri in let status = GrafiteTypes.add_moo_content [moo_content] status in {status with GrafiteTypes.proof_status = GrafiteTypes.No_proof}, @@ -554,22 +553,26 @@ let add_coercions_of_record_to_moo obj lemmas status = lemmas let add_obj uri obj status = - let basedir = Helm_registry.get "matita.basedir" in - let status,lemmas = GrafiteSync.add_obj ~basedir refinement_toolkit uri obj status in + let status,lemmas = GrafiteSync.add_obj refinement_toolkit uri obj status in status, lemmas let rec eval_command = {ec_go = fun ~disambiguate_command opts status cmd -> let status,cmd = disambiguate_command status cmd in - let basedir = Helm_registry.get "matita.basedir" in let status,uris = match cmd with | GrafiteAst.Default (loc, what, uris) as cmd -> LibraryObjects.set_default what uris; GrafiteTypes.add_moo_content [cmd] status,[] | GrafiteAst.Include (loc, baseuri) -> - let moopath = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in - if not (Sys.file_exists moopath) then - raise (IncludedFileNotCompiled moopath); + let moopath_rw, moopath_r = + LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:true, + LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:false + in + let moopath = + if Sys.file_exists moopath_r then moopath_r else + if Sys.file_exists moopath_rw then moopath_rw else + raise (IncludedFileNotCompiled moopath_rw) + in let status = eval_from_moo.efm_go status moopath in status,[] | GrafiteAst.Set (loc, name, value) -> @@ -585,10 +588,13 @@ let rec eval_command = {ec_go = fun ~disambiguate_command opts status cmd -> HLog.error (sprintf "uri %s belongs to a read-only repository" value); raise (ReadOnlyUri value) end; - if not (GrafiteMisc.is_empty value) && opts.clean_baseuri then begin + if not (Http_getter_storage.is_empty value) && + opts.clean_baseuri + then begin HLog.message ("baseuri " ^ value ^ " is not empty"); HLog.message ("cleaning baseuri " ^ value); - LibraryClean.clean_baseuris ~basedir [value]; + LibraryClean.clean_baseuris [value]; + assert (Http_getter_storage.is_empty value); end; end; GrafiteTypes.set_option status name value,[] diff --git a/components/grafite_engine/grafiteMisc.ml b/components/grafite_engine/grafiteMisc.ml deleted file mode 100644 index 5b86293db..000000000 --- a/components/grafite_engine/grafiteMisc.ml +++ /dev/null @@ -1,33 +0,0 @@ -(* Copyright (C) 2004-2005, HELM Team. - * - * This file is part of HELM, an Hypertextual, Electronic - * Library of Mathematics, developed at the Computer Science - * Department, University of Bologna, Italy. - * - * HELM is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * HELM is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with HELM; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, - * MA 02111-1307, USA. - * - * For details, see the HELM World-Wide-Web page, - * http://helm.cs.unibo.it/ - *) - -(* $Id$ *) - -let is_empty buri = - List.for_all - (function - Http_getter_types.Ls_section _ -> true - | Http_getter_types.Ls_object _ -> false) - (Http_getter.ls (Http_getter_misc.strip_trailing_slash buri ^ "/")) diff --git a/components/grafite_engine/grafiteMisc.mli b/components/grafite_engine/grafiteMisc.mli deleted file mode 100644 index 833bb6360..000000000 --- a/components/grafite_engine/grafiteMisc.mli +++ /dev/null @@ -1,27 +0,0 @@ -(* Copyright (C) 2004-2005, HELM Team. - * - * This file is part of HELM, an Hypertextual, Electronic - * Library of Mathematics, developed at the Computer Science - * Department, University of Bologna, Italy. - * - * HELM is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * HELM is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with HELM; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, - * MA 02111-1307, USA. - * - * For details, see the HELM World-Wide-Web page, - * http://helm.cs.unibo.it/ - *) - - (** check whether no objects are defined below a given baseuri *) -val is_empty: string -> bool diff --git a/components/grafite_engine/grafiteSync.ml b/components/grafite_engine/grafiteSync.ml index f23b42ecc..4809bd2e5 100644 --- a/components/grafite_engine/grafiteSync.ml +++ b/components/grafite_engine/grafiteSync.ml @@ -27,13 +27,13 @@ open Printf -let add_obj refinement_toolkit ~basedir uri obj status = - let lemmas = LibrarySync.add_obj refinement_toolkit uri obj basedir in +let add_obj refinement_toolkit uri obj status = + let lemmas = LibrarySync.add_obj refinement_toolkit uri obj in {status with GrafiteTypes.objects = uri::status.GrafiteTypes.objects}, lemmas -let add_coercion refinement_toolkit ~basedir ~add_composites status uri = - let compounds = LibrarySync.add_coercion ~add_composites ~basedir refinement_toolkit uri in +let add_coercion refinement_toolkit ~add_composites status uri = + let compounds = LibrarySync.add_coercion ~add_composites refinement_toolkit uri in {status with GrafiteTypes.coercions = uri :: status.GrafiteTypes.coercions}, compounds diff --git a/components/grafite_engine/grafiteSync.mli b/components/grafite_engine/grafiteSync.mli index f686eb2d8..5e384a47a 100644 --- a/components/grafite_engine/grafiteSync.mli +++ b/components/grafite_engine/grafiteSync.mli @@ -25,12 +25,12 @@ val add_obj: RefinementTool.kit -> - basedir:string -> UriManager.uri -> Cic.obj -> GrafiteTypes.status -> + UriManager.uri -> Cic.obj -> GrafiteTypes.status -> GrafiteTypes.status * UriManager.uri list val add_coercion: RefinementTool.kit -> - basedir:string -> add_composites:bool -> GrafiteTypes.status -> + add_composites:bool -> GrafiteTypes.status -> UriManager.uri -> GrafiteTypes.status * UriManager.uri list diff --git a/components/lexicon/lexiconEngine.ml b/components/lexicon/lexiconEngine.ml index 4adfe624b..6e3c1b53d 100644 --- a/components/lexicon/lexiconEngine.ml +++ b/components/lexicon/lexiconEngine.ml @@ -108,20 +108,30 @@ let rec eval_command status cmd = let notation_ids' = CicNotation.process_notation cmd in let status = { status with notation_ids = notation_ids' @ status.notation_ids } in - let basedir = Helm_registry.get "matita.basedir" in match cmd with | LexiconAst.Include (loc, baseuri) -> - let lexiconpath = LibraryMisc.lexicon_file_of_baseuri ~basedir ~baseuri in - if not (Sys.file_exists lexiconpath) then - raise (IncludedFileNotCompiled lexiconpath); + let lexiconpath_rw, lexiconpath_r = + LibraryMisc.lexicon_file_of_baseuri ~writable:true ~baseuri, + LibraryMisc.lexicon_file_of_baseuri ~writable:false ~baseuri + in + let lexiconpath = + if Sys.file_exists lexiconpath_rw then lexiconpath_rw else + if Sys.file_exists lexiconpath_r then lexiconpath_r else + raise (IncludedFileNotCompiled lexiconpath_rw) + in let lexicon = LexiconMarshal.load_lexicon lexiconpath in let status = List.fold_left eval_command status lexicon in if Helm_registry.get_bool "db.nodb" then - let metadatapath = LibraryMisc.metadata_file_of_baseuri ~basedir ~baseuri in - if not (Sys.file_exists metadatapath) then - raise (MetadataNotFound metadatapath) - else - add_metadata (LibraryNoDb.load_metadata ~fname:metadatapath) status + let metadatapath_rw, metadatapath_r = + LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:true, + LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:false + in + let metadatapath = + if Sys.file_exists metadatapath_rw then metadatapath_rw else + if Sys.file_exists metadatapath_r then metadatapath_r else + raise (MetadataNotFound metadatapath_rw) + in + add_metadata (LibraryNoDb.load_metadata ~fname:metadatapath) status else status | LexiconAst.Alias (loc, spec) -> diff --git a/components/library/libraryClean.ml b/components/library/libraryClean.ml index 6f72ff495..8b32e97c4 100644 --- a/components/library/libraryClean.ml +++ b/components/library/libraryClean.ml @@ -147,7 +147,7 @@ let moo_root_dir = lazy ( String.sub url 7 (String.length url - 7) (* remove heading "file:///" *) ) -let close_nodb ~basedir buris = +let close_nodb buris = let rev_deps = Hashtbl.create 97 in let all_metadata = HExtlib.find ~test:(fun name -> Filename.check_suffix name ".metadata") @@ -157,6 +157,8 @@ let close_nodb ~basedir buris = (fun path -> let metadata = LibraryNoDb.load_metadata ~fname:path in let baseuri_of_current_metadata = + prerr_endline "ERROR, add to the getter reverse lookup"; + let basedir = "/fake" in let dirname = Filename.dirname path in let basedirlen = String.length basedir in assert (String.sub dirname 0 basedirlen = basedir); @@ -190,7 +192,7 @@ let close_nodb ~basedir buris = in objects_to_remove -let clean_baseuris ?(verbose=true) ~basedir buris = +let clean_baseuris ?(verbose=true) buris = Hashtbl.clear cache_of_processed_baseuri; let buris = List.map Http_getter_misc.strip_trailing_slash buris in debug_prerr "clean_baseuris called on:"; @@ -198,7 +200,7 @@ let clean_baseuris ?(verbose=true) ~basedir buris = List.iter debug_prerr buris; let l = if Helm_registry.get_bool "db.nodb" then - close_nodb ~basedir buris + close_nodb buris else close_db [] buris in @@ -208,10 +210,15 @@ let clean_baseuris ?(verbose=true) ~basedir buris = if debug then List.iter (fun u -> debug_prerr (UriManager.string_of_uri u)) l; List.iter - (fun buri -> - HExtlib.safe_remove (LibraryMisc.obj_file_of_baseuri basedir buri); - HExtlib.safe_remove (LibraryMisc.metadata_file_of_baseuri basedir buri); - HExtlib.safe_remove (LibraryMisc.lexicon_file_of_baseuri basedir buri)) + (fun baseuri -> + try + HExtlib.safe_remove + (LibraryMisc.obj_file_of_baseuri ~writable:true ~baseuri); + HExtlib.safe_remove + (LibraryMisc.metadata_file_of_baseuri ~writable:true ~baseuri); + HExtlib.safe_remove + (LibraryMisc.lexicon_file_of_baseuri ~writable:true ~baseuri) + with Http_getter_types.Key_not_found _ -> ()) (HExtlib.list_uniq (List.fast_sort Pervasives.compare (List.map (UriManager.buri_of_uri) l))); List.iter diff --git a/components/library/libraryClean.mli b/components/library/libraryClean.mli index deca8f4a7..f4ce6de57 100644 --- a/components/library/libraryClean.mli +++ b/components/library/libraryClean.mli @@ -23,4 +23,4 @@ * http://helm.cs.unibo.it/ *) -val clean_baseuris : ?verbose:bool -> basedir:string -> string list -> unit +val clean_baseuris : ?verbose:bool -> string list -> unit diff --git a/components/library/libraryDb.ml b/components/library/libraryDb.ml index 3ea0f481a..858e4c4ff 100644 --- a/components/library/libraryDb.ml +++ b/components/library/libraryDb.ml @@ -69,7 +69,8 @@ let clean_owner_environment () = List.iter (fun suffix -> try - HExtlib.safe_remove (Http_getter.resolve (uri ^ suffix)) + HExtlib.safe_remove + (Http_getter.resolve ~writable:true (uri ^ suffix)) with Http_getter_types.Key_not_found _ -> ()) [""; ".body"; ".types"]) owned_uris; diff --git a/components/library/libraryMisc.ml b/components/library/libraryMisc.ml index 3f1931e42..148b5f8bc 100644 --- a/components/library/libraryMisc.ml +++ b/components/library/libraryMisc.ml @@ -25,14 +25,12 @@ (* $Id$ *) -let obj_file_of_baseuri ~basedir ~baseuri = - let path = basedir ^ "/xml" ^ Pcre.replace ~pat:"^cic:" ~templ:"" baseuri in - path ^ ".moo" +let resolve ~writable baseuri = Http_getter.filename ~writable baseuri -let lexicon_file_of_baseuri ~basedir ~baseuri = - let path = basedir ^ "/xml" ^ Pcre.replace ~pat:"^cic:" ~templ:"" baseuri in - path ^ ".lexicon" +let obj_file_of_baseuri ~writable ~baseuri = + resolve ~writable baseuri ^ ".moo" +let lexicon_file_of_baseuri ~writable ~baseuri = + resolve ~writable baseuri ^ ".lexicon" +let metadata_file_of_baseuri~writable ~baseuri = + resolve ~writable baseuri ^ ".metadata" -let metadata_file_of_baseuri ~basedir ~baseuri = - let path = basedir ^ "/xml" ^ Pcre.replace ~pat:"^cic:" ~templ:"" baseuri in - path ^ ".metadata" diff --git a/components/library/libraryMisc.mli b/components/library/libraryMisc.mli index e4d07faf7..08ac4638d 100644 --- a/components/library/libraryMisc.mli +++ b/components/library/libraryMisc.mli @@ -23,6 +23,6 @@ * http://helm.cs.unibo.it/ *) -val obj_file_of_baseuri: basedir:string -> baseuri:string -> string -val lexicon_file_of_baseuri: basedir:string -> baseuri:string -> string -val metadata_file_of_baseuri: basedir:string -> baseuri:string -> string +val obj_file_of_baseuri: writable:bool -> baseuri:string -> string +val lexicon_file_of_baseuri: writable:bool -> baseuri:string -> string +val metadata_file_of_baseuri: writable:bool -> baseuri:string -> string diff --git a/components/library/librarySync.ml b/components/library/librarySync.ml index 91a838ddf..9ce5801fd 100644 --- a/components/library/librarySync.ml +++ b/components/library/librarySync.ml @@ -43,25 +43,22 @@ let uris_of_obj uri = let univgraphuri = UriManager.univgraphuri_of_uri uri in innertypesuri,bodyuri,univgraphuri -let paths_and_uris_of_obj uri ~basedir = - let basedir = basedir ^ "/xml" in +let paths_and_uris_of_obj uri = + let resolved = Http_getter.filename' ~writable:true uri in + let basepath = Filename.dirname resolved in let innertypesuri, bodyuri, univgraphuri = uris_of_obj uri in - let innertypesfilename = Str.replace_first (Str.regexp "^cic:") "" - (UriManager.string_of_uri innertypesuri) ^ ".xml.gz" in - let innertypespath = basedir ^ "/" ^ innertypesfilename in - let xmlfilename = Str.replace_first (Str.regexp "^cic:/") "" - (UriManager.string_of_uri uri) ^ ".xml.gz" in - let xmlpath = basedir ^ "/" ^ xmlfilename in - let xmlbodyfilename = Str.replace_first (Str.regexp "^cic:/") "" - (UriManager.string_of_uri uri) ^ ".body.xml.gz" in - let xmlbodypath = basedir ^ "/" ^ xmlbodyfilename in - let xmlunivgraphfilename = Str.replace_first (Str.regexp "^cic:/") "" - (UriManager.string_of_uri univgraphuri) ^ ".xml.gz" in - let xmlunivgraphpath = basedir ^ "/" ^ xmlunivgraphfilename in + let innertypesfilename=(UriManager.nameext_of_uri innertypesuri)^".xml.gz"in + let innertypespath = basepath ^ "/" ^ innertypesfilename in + let xmlfilename = (UriManager.nameext_of_uri uri) ^ ".xml.gz" in + let xmlpath = basepath ^ "/" ^ xmlfilename in + let xmlbodyfilename = (UriManager.nameext_of_uri uri) ^ ".body.xml.gz" in + let xmlbodypath = basepath ^ "/" ^ xmlbodyfilename in + let xmlunivgraphfilename=(UriManager.nameext_of_uri univgraphuri)^".xml.gz"in + let xmlunivgraphpath = basepath ^ "/" ^ xmlunivgraphfilename in xmlpath, xmlbodypath, innertypespath, bodyuri, innertypesuri, xmlunivgraphpath, univgraphuri -let save_object_to_disk ~basedir uri obj ugraph univlist = +let save_object_to_disk uri obj ugraph univlist = let ensure_path_exists path = let dir = Filename.dirname path in HExtlib.mkdir dir @@ -75,7 +72,7 @@ let save_object_to_disk ~basedir uri obj ugraph univlist = in let xmlpath, xmlbodypath, innertypespath, bodyuri, innertypesuri, xmlunivgraphpath, univgraphuri = - paths_and_uris_of_obj uri basedir + paths_and_uris_of_obj uri in List.iter HExtlib.mkdir (List.map Filename.dirname [xmlpath]); (* now write to disk *) @@ -104,7 +101,7 @@ let index_obj = fun ~dbd ~uri -> profiler.HExtlib.profile (fun uri -> MetadataDb.index_obj ~dbd ~uri) uri -let add_single_obj uri obj refinement_toolkit ~basedir = +let add_single_obj uri obj refinement_toolkit = let module RT = RefinementTool in let obj = if (*List.mem `Generated (CicUtil.attributes_of_obj obj) &&*) @@ -134,7 +131,7 @@ let add_single_obj uri obj refinement_toolkit ~basedir = index_obj ~dbd ~uri; (* 2 must be in the env *) try (*3*) - let new_stuff = save_object_to_disk ~basedir uri obj ugraph univlist in + let new_stuff = save_object_to_disk uri obj ugraph univlist in try HLog.message (Printf.sprintf "%s defined" (UriManager.string_of_uri uri)) @@ -162,7 +159,7 @@ let remove_single_obj uri = List.iter (fun uri -> (try - let file = Http_getter.resolve' uri in + let file = Http_getter.resolve' ~writable:true uri in HExtlib.safe_remove file; HExtlib.rmdir_descend (Filename.dirname file) with Http_getter_types.Key_not_found _ -> ()); @@ -173,12 +170,12 @@ let remove_single_obj uri = (*** GENERATION OF AUXILIARY LEMMAS ***) -let generate_elimination_principles ~basedir uri refinement_toolkit = +let generate_elimination_principles uri refinement_toolkit = let uris = ref [] in let elim sort = try let uri,obj = CicElim.elim_of ~sort uri 0 in - add_single_obj uri obj refinement_toolkit ~basedir; + add_single_obj uri obj refinement_toolkit; uris := uri :: !uris with CicElim.Can_t_eliminate -> () in @@ -195,7 +192,7 @@ let remove_all_coercions () = UriManager.UriHashtbl.clear coercion_hashtbl; CoercDb.remove_coercion (fun (_,_,u1) -> true) -let add_coercion ~basedir ~add_composites refinement_toolkit uri = +let add_coercion ~add_composites refinement_toolkit uri = let coer_ty,_ = let coer = CicUtil.term_of_uri uri in CicTypeChecker.type_of_aux' [] [] coer CicUniv.empty_ugraph @@ -236,7 +233,7 @@ let add_coercion ~basedir ~add_composites refinement_toolkit uri = if add_composites then List.fold_left (fun acc (_,_,uri,obj) -> - add_single_obj ~basedir uri obj refinement_toolkit; + add_single_obj uri obj refinement_toolkit; uri::acc) composite_uris new_coercions else @@ -274,7 +271,7 @@ let remove_coercion uri = Not_found -> () (* mhh..... *) -let generate_projections ~basedir refinement_toolkit uri fields = +let generate_projections refinement_toolkit uri fields = let uris = ref [] in let projections = CicRecord.projections_of uri (List.map fst fields) in try @@ -285,10 +282,10 @@ let generate_projections ~basedir refinement_toolkit uri fields = CicTypeChecker.type_of_aux' [] [] bo CicUniv.empty_ugraph in let attrs = [`Class `Projection; `Generated] in let obj = Cic.Constant (name,Some bo,ty,[],attrs) in - add_single_obj ~basedir uri obj refinement_toolkit; + add_single_obj uri obj refinement_toolkit; let composites = if coercion then - add_coercion ~basedir ~add_composites:true refinement_toolkit uri + add_coercion ~add_composites:true refinement_toolkit uri else [] in @@ -308,19 +305,17 @@ let generate_projections ~basedir refinement_toolkit uri fields = List.iter remove_single_obj !uris; raise exn - let build_inversion_principle = ref (fun a b -> assert false);; -let generate_inversion refinement_toolkit ~basedir uri obj = +let generate_inversion refinement_toolkit uri obj = match !build_inversion_principle uri obj with None -> [] | Some (ind_uri,ind_obj) -> - add_single_obj ind_uri ind_obj refinement_toolkit ~basedir; + add_single_obj ind_uri ind_obj refinement_toolkit; [ind_uri] - -let add_obj refinement_toolkit uri obj ~basedir = - add_single_obj uri obj refinement_toolkit ~basedir; +let add_obj refinement_toolkit uri obj = + add_single_obj uri obj refinement_toolkit; let uris = ref [] in try begin @@ -328,8 +323,8 @@ let add_obj refinement_toolkit uri obj ~basedir = | Cic.Constant _ -> () | Cic.InductiveDefinition (_,_,_,attrs) -> uris := !uris @ - generate_elimination_principles ~basedir uri refinement_toolkit; - uris := !uris @ generate_inversion refinement_toolkit ~basedir uri obj; + generate_elimination_principles uri refinement_toolkit; + (* uris := !uris @ generate_inversion refinement_toolkit uri obj; *) let rec get_record_attrs = function | [] -> None @@ -340,7 +335,7 @@ let add_obj refinement_toolkit uri obj ~basedir = | None -> () (* not a record *) | Some fields -> uris := !uris @ - (generate_projections ~basedir refinement_toolkit uri fields)) + (generate_projections refinement_toolkit uri fields)) | Cic.CurrentProof _ | Cic.Variable _ -> assert false end; diff --git a/components/library/librarySync.mli b/components/library/librarySync.mli index e9f2bd036..b29a0aa03 100644 --- a/components/library/librarySync.mli +++ b/components/library/librarySync.mli @@ -33,7 +33,7 @@ val build_inversion_principle: (UriManager.uri-> Cic.obj -> (UriManager.uri * Ci (* it returns the list of the uris of the auxiliary lemmas generated *) val add_obj: RefinementTool.kit -> - UriManager.uri -> Cic.obj -> basedir:string -> + UriManager.uri -> Cic.obj -> UriManager.uri list (* inverse of add_obj; *) @@ -46,7 +46,7 @@ val remove_obj: UriManager.uri -> unit (* is true are added to the library. *) (* The list of added objects is returned. *) val add_coercion: - basedir:string -> add_composites:bool -> + add_composites:bool -> RefinementTool.kit -> UriManager.uri -> UriManager.uri list diff --git a/components/metadata/sqlStatements.ml b/components/metadata/sqlStatements.ml index a08073965..42fcebec0 100644 --- a/components/metadata/sqlStatements.ml +++ b/components/metadata/sqlStatements.ml @@ -198,3 +198,11 @@ let fill_hits refObj hits = hits refObj ] +let move_content (name1, tbl1) (name2, tbl2) buri = + assert (tbl1 = tbl2); + sprintf + "INSERT INTRO %s SELECT * FROM %s WHERE source LIKE \"%s%%\";" + name2 name1 buri + + + diff --git a/components/metadata/sqlStatements.mli b/components/metadata/sqlStatements.mli index 9f9af55ef..90a6b64b4 100644 --- a/components/metadata/sqlStatements.mli +++ b/components/metadata/sqlStatements.mli @@ -43,3 +43,8 @@ val rename_tables: (string * string) list -> string list * @param hits name of the hits table *) val fill_hits: string -> string -> string list +(** move content [t1] [t2] [buri] + * moves all the tuples with 'source' that match regex '^buri' from t1 to t2 + * *) +val move_content: (string * tbl) -> (string * tbl) -> string -> string + diff --git a/components/urimanager/uriManager.ml b/components/urimanager/uriManager.ml index 9ff6a7966..eb0388cc2 100644 --- a/components/urimanager/uriManager.ml +++ b/components/urimanager/uriManager.ml @@ -55,6 +55,13 @@ let name_of_uri (uri, _) = let index1 = String.rindex_from uri xpointer_offset '/' + 1 in let index2 = String.rindex uri '.' in String.sub uri index1 (index2 - index1) + +let nameext_of_uri (uri, _) = + let xpointer_offset, mah = + try String.rindex uri '#', 0 with Not_found -> String.length uri - 1, 1 + in + let index1 = String.rindex_from uri xpointer_offset '/' + 1 in + String.sub uri index1 (xpointer_offset - index1 + mah) let buri_of_uri (uri,_) = let xpointer_offset = diff --git a/components/urimanager/uriManager.mli b/components/urimanager/uriManager.mli index 8250cc839..1b8317816 100644 --- a/components/urimanager/uriManager.mli +++ b/components/urimanager/uriManager.mli @@ -34,6 +34,7 @@ val uri_of_string : string -> uri val string_of_uri : uri -> string (* complete uri *) val name_of_uri : uri -> string (* name only (without extension)*) +val nameext_of_uri : uri -> string (* name only (with extension)*) val buri_of_uri : uri -> string (* base uri only, without trailing '/' *) (* given an uri, returns the uri of the corresponding cic file, *) diff --git a/configure.ac b/configure.ac index 00ae3a37d..d3f381e67 100644 --- a/configure.ac +++ b/configure.ac @@ -144,14 +144,26 @@ AC_ARG_WITH(runtime-dir, [ --with-runtime-dir Runtime directory (current working directory if not given)], [ RT_BASE_DIR="${withval}" ], [ RT_BASE_DIR="$RT_BASE_DIR_DEFAULT" ]) -AC_MSG_RESULT($RT_BASE_DIR) +MSG="$RT_BASE_DIR" +if test "yes" = "$RT_BASE_DIR"; then + MSG=" +** error: ** +** empty --with-runtime-dir argument, please use --with-runtime-dir=value **" +fi +AC_MSG_RESULT($MSG) AC_MSG_CHECKING(--with-dbhost argument) AC_ARG_WITH(dbhost, [ --with-dbhost SQL database hostname], [ DBHOST="${withval}" ], [ DBHOST="$DEFAULT_DBHOST" ]) -AC_MSG_RESULT($DBHOST) +MSG="$DBHOST" +if test "yes" = "$DBHOST"; then + MSG=" +** error: ** +** empty --with-dbhost argument, please use --with-dbhost=value **" +fi +AC_MSG_RESULT($MSG) AC_SUBST(CAMLP4O) AC_SUBST(DBHOST) @@ -170,9 +182,7 @@ AC_SUBST(TRANSFORMER_MODULE) AC_OUTPUT([ components/extlib/componentsConf.ml - matita/matita.conf.xml.devel - matita/matita.conf.xml.user - matita/matita.conf.xml.build + matita/matita.conf.xml matita/buildTimeConf.ml matita/gtkmathview.matita.conf.xml matita/help/C/version.txt diff --git a/daemons/http_getter/Makefile b/daemons/http_getter/Makefile index d7b1089af..e8e9280d5 100644 --- a/daemons/http_getter/Makefile +++ b/daemons/http_getter/Makefile @@ -3,7 +3,7 @@ NAME = http_getter REQUIRES = helm-getter helm-logger helm-registry netstring COMMONOPTS = -package "$(REQUIRES)" -pp camlp4o -thread -OCAMLFIND = OCAMLPATH=../ocaml/METAS ocamlfind +OCAMLFIND = OCAMLPATH=../../components/METAS ocamlfind OCAMLC = $(OCAMLFIND) ocamlc -g $(COMMONOPTS) OCAMLOPT = $(OCAMLFIND) opt $(COMMONOPTS) diff --git a/daemons/http_getter/main.ml b/daemons/http_getter/main.ml index 3117a85c9..c200c161d 100644 --- a/daemons/http_getter/main.ml +++ b/daemons/http_getter/main.ml @@ -191,10 +191,10 @@ let return_ls regexp fmt outchan = let return_help outchan = return_html_raw (Http_getter.help ()) outchan -let return_resolve uri outchan = +let return_resolve writable uri outchan = try return_xml_raw - (sprintf "\n" (Http_getter.resolve uri)) + (sprintf "\n" (Http_getter.resolve ~writable uri)) outchan with | Unresolvable_URI _ -> return_xml_raw "\n" outchan @@ -272,7 +272,8 @@ let callback (req: Http_types.request) outchan = | "/getxml" -> let uri = req#param "uri" in let fname = Http_getter.getxml uri in (* local name, in cache *) - let remote_name = Http_getter.resolve uri in (* remote name *) + (* remote name *) + let remote_name = Http_getter.resolve ~writable:false uri in let src_enc = if is_gzip fname then `Gzipped else `Normal in let enc = parse_enc req in let fname, cleanup = convert_file ~from_enc:src_enc ~to_enc:enc fname in @@ -293,7 +294,13 @@ let callback (req: Http_types.request) outchan = | "/getdtd" -> let fname = Http_getter.getdtd (req#param "uri") in respond_dtd (parse_patch req) fname outchan - | "/resolve" -> return_resolve (req#param "uri") outchan + | "/resolve" -> + let writable = + match req#param ~default:"false" "writable" with + | "true" -> true + | _ -> false + in + return_resolve writable (req#param "uri") outchan | "/clean_cache" -> Http_getter.clean_cache (); return_html_msg "Done." outchan diff --git a/matita/Makefile b/matita/Makefile index 272711ad7..00c32a712 100644 --- a/matita/Makefile +++ b/matita/Makefile @@ -102,28 +102,28 @@ world: all endif matita: matita.ml $(LIB_DEPS) $(CMOS) - @echo "OCAMLC $<" + @echo " OCAMLC $<" $(H)$(OCAMLC) $(PKGS) -linkpkg -o $@ $(CMOS) matita.ml matita.opt: matita.ml $(LIBX_DEPS) $(CMXS) - @echo "OCAMLOPT $<" + @echo " OCAMLOPT $<" $(H)$(OCAMLOPT) $(PKGS) -linkpkg -o $@ $(CMXS) matita.ml dump_moo: dump_moo.ml buildTimeConf.cmo - @echo "OCAMLC $<" + @echo " OCAMLC $<" $(H)$(OCAMLC) $(PKGS) -linkpkg -o $@ buildTimeConf.cmo $< dump_moo.opt: dump_moo.ml buildTimeConf.cmx @echo "OCAMLOPT $<" $(H)$(OCAMLOPT) $(PKGS) -linkpkg -o $@ buildTimeConf.cmx $< matitac: matitac.ml $(CLIB_DEPS) $(CCMOS) $(MAINCMOS) - @echo "OCAMLC $<" + @echo " OCAMLC $<" $(H)$(OCAMLC) $(CPKGS) -linkpkg -o $@ $(CCMOS) $(MAINCMOS) matitac.ml matitac.opt: matitac.ml $(CLIBX_DEPS) $(CCMXS) $(MAINCMXS) - @echo "OCAMLOPT $<" + @echo " OCAMLOPT $<" $(H)$(OCAMLOPT) $(CPKGS) -linkpkg -o $@ $(CCMXS) $(MAINCMXS) matitac.ml matitatop: matitatop.ml $(CLIB_DEPS) $(CCMOS) - @echo "OCAMLC $<" + @echo " OCAMLC $<" $(H)$(OCAMLC) $(CPKGS) -linkpkg -o $@ toplevellib.cma $(CCMOS) $< matitadep: matitac @@ -209,21 +209,22 @@ endif ifeq ($(DISTRIBUTED),yes) dist_library: dist_library@library -dist_library_clean: - @echo "MATITACLEAN -system all" - $(H)./matitaclean$(BEST_EXT) \ - -system -conffile `pwd`/matita.conf.xml.build all dist_library@%: - @echo "MATITAMAKE -system init" + @echo "MATITAMAKE init $*" $(H)MATITA_RT_BASE_DIR=`pwd` \ - MATITA_FLAGS="-system -conffile `pwd`/matita.conf.xml.build" \ - ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml.build \ + MATITA_FLAGS="-conffile `pwd`/matita.conf.xml" \ + ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml \ init $* `pwd`/$* - @echo "MATITAMAKE -system build" + @echo "MATITAMAKE publish $*" $(H)MATITA_RT_BASE_DIR=`pwd` \ - MATITA_FLAGS="-system -conffile `pwd`/matita.conf.xml.build" \ - ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml.build \ - build $* + MATITA_FLAGS="-conffile `pwd`/matita.conf.xml" \ + ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml \ + publish $* + @echo "MATITAMAKE destroy $*" + $(H)MATITA_RT_BASE_DIR=`pwd` \ + MATITA_FLAGS="-conffile `pwd`/matita.conf.xml" \ + ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml \ + destroy $* touch $@ endif @@ -236,33 +237,33 @@ INSTALL_STUFF = \ matita.ma.templ \ core_notation.moo \ matita.conf.xml \ - matita.conf.xml.user \ closed.xml \ gtkmathview.matita.conf.xml \ template_makefile.in \ AUTHORS \ LICENSE \ $(NULL) + + ifeq ($(HAVE_OCAMLOPT),yes) -INSTALL_STUFF += $(PROGRAMS_OPT) +INSTALL_STUFF_BIN = $(PROGRAMS_OPT) else -INSTALL_STUFF += $(PROGRAMS_BYTE) +INSTALL_STUFF_BIN = $(PROGRAMS_BYTE) endif -install: - # install main dir and executables - install -d $(DESTDIR) +install: install_preliminaries + #dist_library + +install_preliminaries: + install -d $(DESTDIR)/ma/ cp -a $(INSTALL_STUFF) $(DESTDIR) - # install the library and corresponding scripts - if [ -d $(DESTDIR)/library ]; then rm -rf $(DESTDIR)/library; fi - cp -a .matita/xml/matita/ $(DESTDIR)/library/ - if [ -d $(DESTDIR)/ma ]; then rm -rf $(DESTDIR)/ma; fi - install -d $(DESTDIR)/ma + install -s $(INSTALL_STUFF_BIN) $(DESTDIR) ifeq ($(HAVE_OCAMLOPT),yes) - for p in $(PROGRAMS_BYTE); do ln -s $$p.opt $(DESTDIR)/$$p; done + for p in $(PROGRAMS_BYTE); do ln -fs $$p.opt $(DESTDIR)/$$p; done endif - cp -a library/ $(DESTDIR)/ma/stdlib/ - cp -a contribs/ $(DESTDIR)/ma/contribs/ + cp -a library/ $(DESTDIR)/ma/standard-library + cp -a contribs/ $(DESTDIR)/ma/ + uninstall: rm -rf $(DESTDIR) @@ -346,16 +347,16 @@ depend: include .depend %.cmi: %.mli - @echo "OCAMLC $<" + @echo " OCAMLC $<" $(H)$(OCAMLC) $(PKGS) -c $< %.cmo %.cmi: %.ml - @echo "OCAMLC $<" + @echo " OCAMLC $<" $(H)$(OCAMLC) $(PKGS) -c $< %.cmx: %.ml - @echo "OCAMLOPT $<" + @echo " OCAMLOPT $<" $(H)$(OCAMLOPT) $(PKGS) -c $< %.annot: %.ml - @echo "OCAMLC -dtypes $<" + @echo " OCAMLC -dtypes $<" $(H)$(OCAMLC) -dtypes $(PKGS) -c $< $(CMOS): $(LIB_DEPS) diff --git a/matita/buildTimeConf.ml.in b/matita/buildTimeConf.ml.in index 0be6f2c86..5a448cde2 100644 --- a/matita/buildTimeConf.ml.in +++ b/matita/buildTimeConf.ml.in @@ -51,6 +51,7 @@ let matita_conf = runtime_base_dir ^ "/matita.conf.xml" let closed_xml = runtime_base_dir ^ "/closed.xml" let gtkmathview_conf = runtime_base_dir ^ "/gtkmathview.matita.conf.xml" let matitamake_makefile_template = runtime_base_dir ^ "/template_makefile.in" -let stdlib_dir = runtime_base_dir ^ "/library" +let stdlib_dir_devel = runtime_base_dir ^ "/library" +let stdlib_dir_installed = runtime_base_dir ^ "/ma/standard-library" let help_dir = runtime_base_dir ^ "/help" diff --git a/matita/buildTimeConf.mli b/matita/buildTimeConf.mli index bd87f6872..5477c6c20 100644 --- a/matita/buildTimeConf.mli +++ b/matita/buildTimeConf.mli @@ -45,7 +45,8 @@ val phrase_sep : string val runtime_base_dir : string val script_font : string val script_template : string -val stdlib_dir : string +val stdlib_dir_devel : string +val stdlib_dir_installed : string val undo_history_size : int val version : string diff --git a/matita/library/algebra/groups.ma b/matita/library/algebra/groups.ma index ea345bc54..6cb994812 100644 --- a/matita/library/algebra/groups.ma +++ b/matita/library/algebra/groups.ma @@ -316,10 +316,11 @@ record normal_subgroup (G:Group) : Type ≝ theorem foo: ∀G.∀H:normal_subgroup G.∀x.x*H=H*x. *) - +(* theorem member_of_left_coset_mk_left_coset_x_H_a_to_member_of_left_coset_mk_left_coset_y_H_b_to_member_of_left_coset_mk_left_coset_op_x_y_H_op_a_b: ∀G.∀H:normal_subgroup G.∀x,y,a,b. a ∈ (x*H) → b ∈ (y*H) → (a·b) ∈ ((x·y)*H). intros; simplify; -qed. \ No newline at end of file +qed. +*) diff --git a/matita/matita.conf.xml b/matita/matita.conf.xml deleted file mode 120000 index 7f7b7b8e1..000000000 --- a/matita/matita.conf.xml +++ /dev/null @@ -1 +0,0 @@ -matita.conf.xml.devel \ No newline at end of file diff --git a/matita/matita.conf.xml.build.in b/matita/matita.conf.xml.build.in deleted file mode 100644 index ff78ecb0a..000000000 --- a/matita/matita.conf.xml.build.in +++ /dev/null @@ -1,27 +0,0 @@ - - -
- $(HOME) -
-
- @SRCROOT@/matita/.matita - nobody -
-
- @DBHOST@ - helm - matita -
-
- .matita/getter/cache - - cic:/matita/ - file://$(matita.basedir)/xml/matita/ - - - cic:/ - file:///does_not_exists/ - legacy - -
-
diff --git a/matita/matita.conf.xml.devel.in b/matita/matita.conf.xml.devel.in deleted file mode 100644 index 3a4e7bb70..000000000 --- a/matita/matita.conf.xml.devel.in +++ /dev/null @@ -1,68 +0,0 @@ - - -
- - $(HOME) - - -
-
- - - - - $(user.home)/.matita - - $(user.name) - - -
-
- - @DBHOST@ - helm - matita -
-
- - $(user.home)/.matita/getter/cache - - - cic:/matita/ - file://$(user.home)/.matita/xml/matita/ - - - cic:/ - file:///projects/helm/library/coq_contribs/ - legacy - -
-
diff --git a/matita/matita.conf.xml.in b/matita/matita.conf.xml.in new file mode 100644 index 000000000..2d2f79fd6 --- /dev/null +++ b/matita/matita.conf.xml.in @@ -0,0 +1,73 @@ + + +
+ + $(HOME) + + +
+
+ + + + + $(user.home)/.matita + + $(user.name) + + +
+
+ + @DBHOST@ + helm + matita +
+
+ + $(user.home)/.matita/getter/cache + + + cic:/matita/ + file://@RT_BASE_DIR@/stantard-library/ + ro + + + cic:/matita/ + file://$(user.home)/.matita/xml/matita/ + + + cic:/ + file://@RT_BASE_DIR@/legacy-library/coq/ + legacy + +
+
diff --git a/matita/matita.conf.xml.user.in b/matita/matita.conf.xml.user.in deleted file mode 100644 index ff4be401e..000000000 --- a/matita/matita.conf.xml.user.in +++ /dev/null @@ -1,73 +0,0 @@ - - -
- - $(HOME) - - -
-
- - - - - $(user.home)/.matita - - $(user.name) - - -
-
- - @DBHOST@ - helm - matita -
-
- - $(user.home)/.matita/getter/cache - - - cic:/matita/ - file://@RT_BASE_DIR@/library/ - ro - - - cic:/matita/$(user.name)/ - file://$(user.home)/.matita/xml/matita/ - - - cic:/ - file://@RT_BASE_DIR@/legacy/coq/ - legacy - -
-
diff --git a/matita/matita.glade b/matita/matita.glade index 71506d55a..1518409e8 100644 --- a/matita/matita.glade +++ b/matita/matita.glade @@ -19,6 +19,7 @@ GDK_WINDOW_TYPE_HINT_NORMAL GDK_GRAVITY_NORTH_WEST True + False @@ -472,6 +473,7 @@ GDK_WINDOW_TYPE_HINT_DIALOG GDK_GRAVITY_NORTH_WEST True + False True @@ -561,6 +563,7 @@ GDK_WINDOW_TYPE_HINT_DIALOG GDK_GRAVITY_NORTH_WEST True + False True @@ -654,6 +657,7 @@ GDK_WINDOW_TYPE_HINT_DIALOG GDK_GRAVITY_NORTH_WEST True + False True @@ -692,6 +696,7 @@ GDK_WINDOW_TYPE_HINT_DIALOG GDK_GRAVITY_NORTH_WEST True + False True @@ -836,6 +841,7 @@ GDK_WINDOW_TYPE_HINT_NORMAL GDK_GRAVITY_NORTH_WEST True + False @@ -859,6 +865,8 @@ True + GTK_PACK_DIRECTION_LTR + GTK_PACK_DIRECTION_LTR @@ -2694,6 +2702,7 @@ GDK_WINDOW_TYPE_HINT_DIALOG GDK_GRAVITY_NORTH_WEST True + False True @@ -2819,6 +2828,7 @@ GDK_WINDOW_TYPE_HINT_DIALOG GDK_GRAVITY_NORTH_WEST True + False True @@ -3172,6 +3182,7 @@ GDK_WINDOW_TYPE_HINT_DIALOG GDK_GRAVITY_NORTH_WEST True + False @@ -3445,6 +3456,7 @@ GDK_WINDOW_TYPE_HINT_UTILITY GDK_GRAVITY_NORTH_WEST True + False @@ -3680,6 +3692,7 @@ GDK_WINDOW_TYPE_HINT_NORMAL GDK_GRAVITY_NORTH_WEST True + False @@ -3944,6 +3957,84 @@ + + + True + True + GTK_RELIEF_NORMAL + True + + + + True + 0.5 + 0.5 + 0 + 0 + 0 + 0 + 0 + 0 + + + + True + False + 2 + + + + True + gtk-convert + 4 + 0.5 + 0.5 + 0 + 0 + + + 0 + False + False + + + + + + True + Publish + True + False + GTK_JUSTIFY_LEFT + False + False + 0.5 + 0.5 + 0 + 0 + PANGO_ELLIPSIZE_NONE + -1 + False + 0 + + + 0 + False + False + + + + + + + + + 0 + False + False + + + True diff --git a/matita/matitaGui.ml b/matita/matitaGui.ml index b3ebebd90..e2274c387 100644 --- a/matita/matitaGui.ml +++ b/matita/matitaGui.ml @@ -66,19 +66,17 @@ class console ~(buffer: GText.buffer) () = let clean_current_baseuri grafite_status = try let baseuri = GrafiteTypes.get_string_option grafite_status "baseuri" in - let basedir = Helm_registry.get "matita.basedir" in - LibraryClean.clean_baseuris ~basedir [baseuri] + LibraryClean.clean_baseuris [baseuri] with GrafiteTypes.Option_error _ -> () let ask_and_save_moo_if_needed parent fname lexicon_status grafite_status = - let basedir = Helm_registry.get "matita.basedir" in let baseuri = DependenciesParser.baseuri_of_script ~include_paths:[] fname in - let moo_fname = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in + let moo_fname = LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:true in let save () = let metadata_fname = - LibraryMisc.metadata_file_of_baseuri ~basedir ~baseuri in + LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:true in let lexicon_fname = - LibraryMisc.lexicon_file_of_baseuri ~basedir ~baseuri + LibraryMisc.lexicon_file_of_baseuri ~baseuri ~writable:true in GrafiteMarshal.save_moo moo_fname grafite_status.GrafiteTypes.moo_content_rev; @@ -472,6 +470,15 @@ class gui () = (fun () -> MatitamakeLib.clean_development_in_bg refresh d) in ignore(clean ()))); + connect_button develList#publishButton + (locker (fun () -> + match get_devel_selected () with + | None -> () + | Some d -> + let clean = locker + (fun () -> MatitamakeLib.publish_development_in_bg refresh d) + in + ignore(clean ()))); connect_button develList#closeButton (fun () -> develList#toplevel#misc#hide()); ignore(develList#toplevel#event#connect#delete @@ -613,7 +620,7 @@ class gui () = HLog.set_log_callback self#console#log_callback; GtkSignal.user_handler := (function - | MatitaScript.ActionCancelled -> () + | MatitaScript.ActionCancelled s -> HLog.error s | exn -> if not (Helm_registry.get_bool "matita.debug") then let floc, msg = MatitaExcPp.to_string exn in diff --git a/matita/matitaInit.ml b/matita/matitaInit.ml index 19927a37e..06f7815b9 100644 --- a/matita/matitaInit.ml +++ b/matita/matitaInit.ml @@ -107,6 +107,8 @@ let initialize_environment init_status = if not (already_configured [Getter;Environment] init_status) then begin Http_getter.init (); + if Helm_registry.get_bool "matita.system" then + Http_getter_storage.activate_system_mode (); CicEnvironment.set_trust (* environment trust *) (let trust = Helm_registry.get_opt_default Helm_registry.get_bool @@ -177,6 +179,11 @@ Usage: matitamake [ OPTION ... ] (init | clean | list | destroy | build) build Parameters: name (the name of the development to build, required) Description: completely builds the develpoment. + publish + Parameters: name (the name of the development to publish, required) + Description: cleans the development in the user space, rebuilds it + in the system space ('ro' repositories, that for this operation + becames writable). Notes: If target is omitted an 'all' will be used as the default. With -build you can build a development wherever it is. @@ -200,7 +207,10 @@ let add_cmdline_spec l = extra_cmdline_specs := l @ !extra_cmdline_specs let parse_cmdline init_status = if not (already_configured [CmdLine] init_status) then begin - let includes = ref [ BuildTimeConf.stdlib_dir ] in + let includes = ref [ + BuildTimeConf.stdlib_dir_installed ; + BuildTimeConf.stdlib_dir_devel ] + in let args = ref [] in let add_l l = fun s -> l := s :: !l in let reduce_verbosity () = @@ -284,4 +294,6 @@ let parse_cmdline () = let fill_registry () = status := fill_registry !status +;; +Inversion_principle.init () diff --git a/matita/matitaScript.ml b/matita/matitaScript.ml index d243ebb2e..891d09a50 100644 --- a/matita/matitaScript.ml +++ b/matita/matitaScript.ml @@ -36,7 +36,7 @@ let debug_print = if debug then prerr_endline else ignore (** raised when one of the script margins (top or bottom) is reached *) exception Margin exception NoUnfinishedProof -exception ActionCancelled +exception ActionCancelled of string let safe_substring s i j = try String.sub s i j with Invalid_argument _ -> assert false @@ -161,7 +161,7 @@ let wrap_with_developments guistuff f arg = else f arg in - let do_nothing () = raise ActionCancelled in + let do_nothing () = raise (ActionCancelled "Inclusion not performed") in let handle_with_devel d = let name = MatitamakeLib.name_for_development d in let title = "Unable to include " ^ what in @@ -324,7 +324,9 @@ and eval_executable include_paths (buffer : GText.buffer) guistuff lexicon_statu begin match ex with | TA.Command (_,TA.Set (_,"baseuri",u)) -> - if not (GrafiteMisc.is_empty u) then + if Http_getter_storage.is_read_only u then + raise (ActionCancelled ("baseuri " ^ u ^ " is readonly")); + if not (Http_getter_storage.is_empty u) then (match guistuff.ask_confirmation ~title:"Baseuri redefinition" @@ -333,9 +335,7 @@ and eval_executable include_paths (buffer : GText.buffer) guistuff lexicon_statu "Do you want to redefine the corresponding "^ "part of the library?") with - | `YES -> - let basedir = Helm_registry.get "matita.basedir" in - LibraryClean.clean_baseuris ~basedir [u] + | `YES -> LibraryClean.clean_baseuris [u] | `NO -> () | `CANCEL -> raise MatitaTypes.Cancel) | _ -> () diff --git a/matita/matitaScript.mli b/matita/matitaScript.mli index cfc465541..bf0a873f2 100644 --- a/matita/matitaScript.mli +++ b/matita/matitaScript.mli @@ -24,7 +24,7 @@ *) exception NoUnfinishedProof -exception ActionCancelled +exception ActionCancelled of string class type script = object diff --git a/matita/matitacLib.ml b/matita/matitacLib.ml index 1297a4747..8135e2472 100644 --- a/matita/matitacLib.ml +++ b/matita/matitacLib.ml @@ -103,8 +103,7 @@ let clean_exit n = | Some grafite_status -> try let baseuri = GrafiteTypes.get_string_option grafite_status "baseuri" in - let basedir = Helm_registry.get "matita.basedir" in - LibraryClean.clean_baseuris ~basedir ~verbose:false [baseuri]; + LibraryClean.clean_baseuris ~verbose:false [baseuri]; opt_exit n with GrafiteTypes.Option_error("baseuri", "not found") -> (* no baseuri ==> nothing to clean yet *) @@ -158,6 +157,7 @@ let main ~mode = MatitaInit.initialize_all (); (* must be called after init since args are set by cmdline parsing *) let fname = fname () in + let system_mode = Helm_registry.get_bool "matita.system" in let include_paths = Helm_registry.get_list Helm_registry.string "matita.includes" in grafite_status := Some (GrafiteSync.init ()); @@ -166,6 +166,7 @@ let main ~mode = BuildTimeConf.core_notation_script); Sys.catch_break true; let origcb = HLog.get_log_callback () in + let origcb t s = origcb t ((if system_mode then "[S] " else "") ^ s) in let newcb tag s = match tag with | `Debug | `Message -> () @@ -216,13 +217,16 @@ let main ~mode = end else begin - let basedir = Helm_registry.get "matita.basedir" in let baseuri = DependenciesParser.baseuri_of_script ~include_paths fname in - let moo_fname = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in - let lexicon_fname= LibraryMisc.lexicon_file_of_baseuri ~basedir ~baseuri in + let moo_fname = + LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:true + in + let lexicon_fname= + LibraryMisc.lexicon_file_of_baseuri ~baseuri ~writable:true + in let metadata_fname = - LibraryMisc.metadata_file_of_baseuri ~basedir ~baseuri + LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:true in GrafiteMarshal.save_moo moo_fname moo_content_rev; LibraryNoDb.save_metadata metadata_fname metadata; diff --git a/matita/matitaclean.ml b/matita/matitaclean.ml index 826a4a282..151bb0115 100644 --- a/matita/matitaclean.ml +++ b/matita/matitaclean.ml @@ -34,21 +34,31 @@ let clean_suffixes = [ ".moo"; ".lexicon"; ".metadata"; ".xml.gz" ] let main () = let _ = MatitaInit.initialize_all () in - let basedir = Helm_registry.get "matita.basedir" in match Helm_registry.get_list Helm_registry.string "matita.args" with | [ "all" ] -> LibraryDb.clean_owner_environment (); - let xmldir = basedir ^ "/xml" in - let clean_pat = - String.concat " -o " - (List.map (fun suf -> "-name \\*" ^ suf) clean_suffixes) in - let clean_cmd = - sprintf "find %s \\( %s \\) -exec rm \\{\\} \\; 2> /dev/null" - xmldir clean_pat in - ignore (Sys.command clean_cmd); - ignore - (Sys.command ("find " ^ xmldir ^ - " -type d -exec rmdir -p {} \\; 2> /dev/null")); + let prefixes = + HExtlib.filter_map + (fun s -> + if String.sub s 0 5 = "file:" then + Some (Str.replace_first (Str.regexp "^file://") "" s) + else + None) + (Http_getter_storage.list_writable_prefixes ()) + in + List.iter + (fun xmldir -> + let clean_pat = + String.concat " -o " + (List.map (fun suf -> "-name \\*" ^ suf) clean_suffixes) in + let clean_cmd = + sprintf "find %s \\( %s \\) -exec rm \\{\\} \\; 2> /dev/null" + xmldir clean_pat in + ignore (Sys.command clean_cmd); + ignore + (Sys.command ("find " ^ xmldir ^ + " -type d -exec rmdir -p {} \\; 2> /dev/null"))) + prefixes; exit 0 | [] -> MatitaInit.die_usage () | files -> @@ -70,4 +80,4 @@ let main () = in uri::uris_to_remove) [] files in - LibraryClean.clean_baseuris ~basedir uris_to_remove + LibraryClean.clean_baseuris uris_to_remove diff --git a/matita/matitadep.ml b/matita/matitadep.ml index c1ada6aea..919a3ec03 100644 --- a/matita/matitadep.ml +++ b/matita/matitadep.ml @@ -42,7 +42,6 @@ let main () = MatitaInit.load_configuration_file (); let include_paths = Helm_registry.get_list Helm_registry.string "matita.includes" in - let basedir = Helm_registry.get "matita.basedir" in List.iter (fun ma_file -> let ic = open_in ma_file in @@ -64,7 +63,7 @@ let main () = DependenciesParser.baseuri_of_script ~include_paths path in if not (Http_getter_storage.is_legacy baseuri) then let moo_file = - LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in + LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:false in Hashtbl.add include_deps ma_file moo_file with Sys_error _ -> HLog.warn @@ -78,7 +77,7 @@ let main () = | None -> () | Some u -> Hashtbl.add include_deps file - (LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri:u)) + (LibraryMisc.obj_file_of_baseuri ~baseuri:u ~writable:false)) uri_deps; List.iter (fun ma_file -> @@ -87,7 +86,7 @@ let main () = let deps = HExtlib.list_uniq deps in let deps = ma_file :: deps in let baseuri = Hashtbl.find baseuri_of ma_file in - let moo = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in + let moo = LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:false in Printf.printf "%s: %s\n" moo (String.concat " " deps); Printf.printf "%s: %s\n" (Pcre.replace ~pat:"ma$" ~templ:"mo" ma_file) moo) (Helm_registry.get_list Helm_registry.string "matita.args") diff --git a/matita/matitamake.ml b/matita/matitamake.ml index 36cf79b36..09bc6c70b 100644 --- a/matita/matitamake.ml +++ b/matita/matitamake.ml @@ -96,6 +96,14 @@ let main () = | true -> exit 0 | false -> exit 1 in + let publish_dev args = + if List.length args <> 1 then !usage (); + let name = (List.hd args) in + let dev = dev_of_name name in + match MK.publish_development dev with + | true -> exit 0 + | false -> exit 1 + in let target args = if List.length args < 1 then !usage (); let dev = dev_for_dir (Unix.getcwd ()) in @@ -110,6 +118,7 @@ let main () = "list", list_dev; "destroy", destroy_dev; "build", build_dev; + "publish", publish_dev; ] in usage := MatitaInit.die_usage; @@ -117,8 +126,12 @@ let main () = match args with | [] -> target [ "all" ] | s :: tl -> - try (List.assoc s params) tl - with Not_found -> if s.[0] = '-' then !usage () else target args + let f, args = + try + (List.assoc s params), tl + with Not_found -> + if s.[0] = '-' then (!usage ();assert false) else target, args + in + f args in parse (Helm_registry.get_list Helm_registry.string "matita.args") - diff --git a/matita/matitamakeLib.ml b/matita/matitamakeLib.ml index 38f812388..10a543869 100644 --- a/matita/matitamakeLib.ml +++ b/matita/matitamakeLib.ml @@ -163,8 +163,6 @@ let make chdir args = try Unix.chdir chdir; let cmd = String.concat " " ("make" :: List.map Filename.quote args) in - if Helm_registry.get_int "matita.verbosity" > 1 then - HLog.debug (sprintf "MATITAMAKE: %s" cmd); let rc = Unix.system cmd in Unix.chdir old; match rc with @@ -175,7 +173,12 @@ let make chdir args = logger `Warning ("Unix Error: " ^ cmd ^ ": " ^ err); false -let call_make development target make = +let call_make ?matita_flags development target make = + let matita_flags = + match matita_flags with + | None -> (try Sys.getenv "MATITA_FLAGS" with Not_found -> "") + | Some s -> s + in rebuild_makefile development; let makefile = makefile_for_development development in let nodb = @@ -185,14 +188,19 @@ let call_make development target make = let flags = flags @ if nodb then ["NODB=true"] else [] in let flags = try - flags @ [ sprintf "MATITA_FLAGS=%s" (Sys.getenv "MATITA_FLAGS") ] + flags @ [ sprintf "MATITA_FLAGS=%s" matita_flags ] with Not_found -> flags in - make development.root - (["--no-print-directory"; "-s"; "-k"; "-f"; makefile; target] - @ flags) + let args = + ["--no-print-directory"; "-s"; "-k"; "-f"; makefile; target] @ flags + in +(* + prerr_endline + ("callink make from "^development.root^" args: "^String.concat " " args); +*) + make development.root args -let build_development ?(target="all") development = - call_make development target make +let build_development ?matita_flags ?(target="all") development = + call_make ?matita_flags development target make (* not really good vt100 *) let vt100 s = @@ -257,15 +265,15 @@ let mk_maker refresh_cb = | Unix.Unix_error (_,"read",_) | Unix.Unix_error (_,"select",_) -> true) -let build_development_in_bg ?(target="all") refresh_cb development = - call_make development target (mk_maker refresh_cb) +let build_development_in_bg ?matita_flags ?(target="all") refresh_cb development = + call_make ?matita_flags development target (mk_maker refresh_cb) ;; -let clean_development development = - call_make development "clean" make +let clean_development ?matita_flags development = + call_make ?matita_flags development "clean" make -let clean_development_in_bg refresh_cb development = - call_make development "clean" (mk_maker refresh_cb) +let clean_development_in_bg ?matita_flags refresh_cb development = + call_make development ?matita_flags "clean" (mk_maker refresh_cb) let destroy_development_aux development clean_development = let delete_development development = @@ -298,12 +306,45 @@ let destroy_development_aux development clean_development = end; delete_development development -let destroy_development development = - destroy_development_aux development clean_development +let destroy_development ?matita_flags development = + destroy_development_aux development (clean_development ?matita_flags) -let destroy_development_in_bg refresh development = - destroy_development_aux development (clean_development_in_bg refresh) +let destroy_development_in_bg ?matita_flags refresh development = + destroy_development_aux development + (clean_development_in_bg refresh ?matita_flags ) let root_for_development development = development.root let name_for_development development = development.name +let publish_development_bstract build clean devel = + let matita_flags = "\"-system\"" in + HLog.message "cleaning the development before publishing"; + if clean ~matita_flags:"" devel then + begin + HLog.message "rebuilding the development in 'system' space"; + if build ~matita_flags devel then + begin + HLog.message "publishing succeded"; + true + end + else + begin + HLog.error "building process failed, reverting"; + if not (clean ~matita_flags devel) then + HLog.error "cleaning failed, end of the world (2)"; + false + end + end + else + (HLog.error "unable to clean the development, publishing failed"; false) + +let publish_development devel = + publish_development_bstract + (fun ~matita_flags devel -> build_development ~matita_flags devel) + (fun ~matita_flags devel -> clean_development ~matita_flags devel) devel +let publish_development_in_bg cb devel = + publish_development_bstract + (fun ~matita_flags devel -> build_development_in_bg cb ~matita_flags devel) + (fun ~matita_flags devel -> clean_development_in_bg cb ~matita_flags devel) + devel + diff --git a/matita/matitamakeLib.mli b/matita/matitamakeLib.mli index 4aaab47b1..98a46666e 100644 --- a/matita/matitamakeLib.mli +++ b/matita/matitamakeLib.mli @@ -29,13 +29,17 @@ type development * ask matitamake to recorder [dir] as the root for thedevelopment [name] *) val initialize_development: string -> string -> development option (* make target [default all] *) -val build_development: ?target:string -> development -> bool +val build_development: ?matita_flags:string -> ?target:string -> development -> bool (* make target [default all], the refresh cb is called after every output *) val build_development_in_bg: - ?target:string -> (unit -> unit) -> development -> bool + ?matita_flags:string -> ?target:string -> (unit -> unit) -> development -> bool (* make clean *) -val clean_development: development -> bool -val clean_development_in_bg: (unit -> unit) -> development -> bool +val clean_development: ?matita_flags:string -> development -> bool +val clean_development_in_bg: ?matita_flags:string -> (unit -> unit) -> development -> bool + +val publish_development_in_bg: (unit -> unit) -> development -> bool +val publish_development: development -> bool + (* return the development that handles dir *) val development_for_dir: string -> development option (* return the development *) @@ -43,8 +47,8 @@ val development_for_name: string -> development option (* return the known list of name, development_root *) val list_known_developments: unit -> (string * string ) list (* cleans the development, forgetting about it *) -val destroy_development: development -> unit -val destroy_development_in_bg: (unit -> unit) -> development -> unit +val destroy_development: ?matita_flags:string -> development -> unit +val destroy_development_in_bg: ?matita_flags:string -> (unit -> unit) -> development -> unit (* initiale internal data structures *) val initialize : unit -> unit (* gives back the root *)