ifeq ($(DISTRIBUTED),yes)
library: library-stamp
library-stamp:
- $(MAKE) -C matita/ dist_library_clean dist_library
+ $(MAKE) -C matita/ dist_library
touch $@
endif
svn export components $(DISTDIR)/components
svn export matita $(DISTDIR)/matita
(cd $(DISTDIR) && rm -f $(CLEAN_ON_DIST))
- ln -fs matita.conf.xml.user $(DISTDIR)/matita/matita.conf.xml
cp $< $(DISTDIR)/configure
cp -r $(EXTRA_DIST) $(DISTDIR)
dist_mktarball:
(cd $(DISTDIR)/ \
&& ./configure \
&& $(MAKE) world \
- && $(MAKE) library \
&& $(MAKE) install DESTDIR=`pwd`/install)
.PHONY: dist dist_export dist_mktarball distcheck dist_extract dist_test dist_autotools
in
at_exit
(fun () ->
- if !profiling_printings () then
+ if !profiling_printings () && !total <> 0. then
prerr_endline
("!! TOTAL TIME SPENT IN " ^ s ^ ": " ^ string_of_float !total));
{ profile = profile }
let exists_remote uri = not_implemented "exists_remote"
(* </TODO> *)
-let resolve_remote uri =
+let resolve_remote ~writable uri =
(* deliver resolve request to http_getter *)
let doc =
- Http_getter_wget.get (sprintf "%sresolve?uri=%s" (getter_url ()) uri)
+ Http_getter_wget.get (sprintf "%sresolve?uri=%s&writable=%b" (getter_url ())
+ uri writable)
in
let res = ref Unknown in
let start_element tag attrs =
let uri = deref_index_theory uri in
Http_getter_storage.exists (uri ^ xml_suffix)
-let resolve uri =
+let resolve ~writable uri =
if remote () then
- resolve_remote uri
+ resolve_remote ~writable uri
else
let uri = deref_index_theory uri in
try
- Http_getter_storage.resolve (uri ^ xml_suffix)
+ Http_getter_storage.resolve ~writable (uri ^ xml_suffix)
+ with Http_getter_storage.Resource_not_found _ -> raise (Key_not_found uri)
+
+let filename ~writable uri =
+ if remote () then
+ raise (Key_not_found uri)
+ else
+ let uri = deref_index_theory uri in
+ try
+ Http_getter_storage.resolve
+ ~must_exists:false ~writable uri
with Http_getter_storage.Resource_not_found _ -> raise (Key_not_found uri)
let getxml uri =
(* Shorthands from now on *)
let getxml' uri = getxml (UriManager.string_of_uri uri)
-let resolve' uri = resolve (UriManager.string_of_uri uri)
+let resolve' ~writable uri = resolve ~writable (UriManager.string_of_uri uri)
let exists' uri = exists (UriManager.string_of_uri uri)
+let filename' ~writable uri = filename ~writable (UriManager.string_of_uri uri)
let tilde_expand_key k =
try
(** @raise Http_getter_types.Unresolvable_URI _
* @raise Http_getter_types.Key_not_found _ *)
-val resolve: string -> string (* uri -> url *)
+val resolve: writable:bool -> string -> string (* uri -> url *)
+
+ (** as resolve, but does not check if the resource exists
+ * @raise Http_getter_types.Key_not_found *)
+val filename: writable:bool -> string -> string (* uri -> url *)
val exists: string -> bool
(** {2 UriManager shorthands} *)
val getxml' : UriManager.uri -> string
-val resolve' : UriManager.uri -> string
+val resolve' : writable:bool -> UriManager.uri -> string
val exists' : UriManager.uri -> bool
+val filename' : writable:bool -> UriManager.uri -> string
(** {2 Misc} *)
let index_fname = "INDEX"
+(******************************* HELPERS **************************************)
+
let trailing_slash_RE = Pcre.regexp "/$"
let relative_RE_raw = "(^[^/]+(/[^/]+)*/?$)"
let relative_RE = Pcre.regexp relative_RE_raw
let gz_suffix = ".gz"
let gz_suffix_len = String.length gz_suffix
+ (* file:///bla -> bla, bla -> bla *)
let path_of_file_url url =
assert (Pcre.pmatch ~rex:file_scheme_RE url);
if Pcre.pmatch ~rex:relative_RE url then
else (* absolute path, add heading "/" if missing *)
"/" ^ (Pcre.replace ~rex:extended_file_scheme_RE url)
+let strip_gz_suffix fname =
+ if extension fname = gz_suffix then
+ String.sub fname 0 (String.length fname - gz_suffix_len)
+ else
+ fname
+
+let normalize_root uri = (* add trailing slash to roots *)
+ try
+ if uri.[String.length uri - 1] = ':' then uri ^ "/"
+ else uri
+ with Invalid_argument _ -> uri
+
+let remove_duplicates l =
+ Http_getter_misc.list_uniq (List.stable_sort Pervasives.compare l)
+
+let has_rdonly l = List.exists ((=) `Read_only) l
+let has_legacy l = List.exists ((=) `Legacy) l
+let is_readwrite attrs = (not (has_legacy attrs) && not (has_rdonly attrs))
+
+let is_file_schema url = Pcre.pmatch ~rex:file_scheme_RE url
+let is_http_schema url = Pcre.pmatch ~rex:http_scheme_RE url
+
+let is_empty_listing files =
+ List.for_all (fun s -> s.[String.length s - 1] = '/') files
+
+(************************* GLOBALS PREFIXES **********************************)
+
(** associative list regular expressions -> url prefixes
* sorted with longest prefixes first *)
-let prefix_map = lazy (
- let map_w_length =
- List.map
- (fun (uri_prefix, (url_prefix, attrs)) ->
- let uri_prefix = normalize_dir uri_prefix in
- let url_prefix = normalize_dir url_prefix in
- let regexp = Pcre.regexp ("^(" ^ Pcre.quote uri_prefix ^ ")") in
- (regexp, String.length uri_prefix, uri_prefix, url_prefix, attrs))
- (Lazy.force Http_getter_env.prefixes)
- in
- let decreasing_length (_, len1, _, _, _) (_, len2, _, _, _) =
- compare len2 len1 in
+let prefix_map_ref = ref (lazy (
List.map
- (fun (regexp, len, uri_prefix, url_prefix, attrs) ->
- (regexp, strip_trailing_slash uri_prefix, url_prefix, attrs))
- (List.fast_sort decreasing_length map_w_length))
+ (fun (uri_prefix, (url_prefix, attrs)) ->
+ let uri_prefix = normalize_dir uri_prefix in
+ let url_prefix = normalize_dir url_prefix in
+ let regexp = Pcre.regexp ("^(" ^ Pcre.quote uri_prefix ^ ")") in
+ regexp, strip_trailing_slash uri_prefix, url_prefix, attrs)
+ (List.rev (Lazy.force Http_getter_env.prefixes))))
+
+let prefix_map () = !prefix_map_ref
+ (** given an uri returns the prefixes for it *)
let lookup uri =
let matches =
- List.filter (fun (rex, _, _, _) -> Pcre.pmatch ~rex uri)
- (Lazy.force prefix_map) in
+ List.filter (fun (rex, _, l, _) -> Pcre.pmatch ~rex uri)
+ (Lazy.force (prefix_map ())) in
if matches = [] then raise (Unresolvable_URI uri);
matches
-let resolve_prefix uri =
- match lookup uri with
- | (rex, _, url_prefix, _) :: _ ->
- Pcre.replace_first ~rex ~templ:url_prefix uri
- | [] -> assert false
-
-let resolve_prefixes uri =
- let matches = lookup uri in
- List.map
- (fun (rex, _, url_prefix, _) ->
- Pcre.replace_first ~rex ~templ:url_prefix uri)
- matches
-
-let get_attrs uri =
- match lookup uri with
- | (_, _, _, attrs) :: _ -> attrs
- | [] -> assert false
-
-let is_legacy uri = List.exists ((=) `Legacy) (get_attrs uri)
-
-let is_read_only uri =
- is_legacy uri || List.exists ((=) `Read_only) (get_attrs uri)
+let get_attrs uri = List.map (fun (_, _, _, attrs) -> attrs) (lookup uri)
+(*************************** ACTIONS ******************************************)
+
let exists_http _ url =
Http_getter_wget.exists (url ^ gz_suffix) || Http_getter_wget.exists url
let exists_file _ fname =
Sys.file_exists (fname ^ gz_suffix) || Sys.file_exists fname
-let resolve_http _ url =
+let resolve_http ~must_exists _ url =
try
- List.find Http_getter_wget.exists [ url ^ gz_suffix; url ]
+ if must_exists then
+ List.find Http_getter_wget.exists [ url ^ gz_suffix; url ]
+ else
+ url
with Not_found -> raise Not_found'
-let resolve_file _ fname =
+let resolve_file ~must_exists _ fname =
try
- List.find Sys.file_exists [ fname ^ gz_suffix; fname ]
+ if must_exists then
+ List.find Sys.file_exists [ fname ^ gz_suffix; fname ]
+ else
+ fname
with Not_found -> raise Not_found'
-let strip_gz_suffix fname =
- if extension fname = gz_suffix then
- String.sub fname 0 (String.length fname - gz_suffix_len)
- else
- fname
-
-let remove_duplicates l =
- Http_getter_misc.list_uniq (List.fast_sort Pervasives.compare l)
-
let ls_file_single _ path_prefix =
let is_dir fname = (Unix.stat fname).Unix.st_kind = Unix.S_DIR in
let is_useless dir = try dir.[0] = '.' with _ -> false in
prerr_endline "Http_getter_storage.remove: not implemented for HTTP scheme";
assert false
+(**************************** RESOLUTION OF PREFIXES ************************)
+
+let resolve_prefixes write exists uri =
+ let exists_test new_uri =
+ if is_file_schema new_uri then
+ exists_file () (path_of_file_url new_uri)
+ else if is_http_schema new_uri then
+ exists_http () new_uri
+ else false
+ in
+ let rec aux = function
+ | (rex, _, url_prefix, attrs) :: tl ->
+ (match write, is_readwrite attrs, exists with
+ | true ,false, _ -> aux tl
+ | true ,true ,true
+ | false,_ ,true ->
+ let new_uri = (Pcre.replace_first ~rex ~templ:url_prefix uri) in
+ if exists_test new_uri then new_uri::aux tl else aux tl
+ | true ,true ,false
+ | false,_ ,false ->
+ (Pcre.replace_first ~rex ~templ:url_prefix uri) :: (aux tl))
+ | [] -> []
+ in
+ aux (lookup uri)
+
+let resolve_prefix w e u =
+ match resolve_prefixes w e u with
+ | hd :: _ -> hd
+ | [] ->
+ raise
+ (Resource_not_found
+ (Printf.sprintf "resolve_prefix write:%b exists:%b" w e,u))
+
+(* uncomment to debug prefix resolution *)
+(*
+let resolve_prefix w e u =
+ prerr_endline
+ ("XXX w=" ^ string_of_bool w ^ " e=" ^ string_of_bool e ^" :" ^ u);
+ let rc = resolve_prefix w e u in
+ prerr_endline ("YYY :" ^ rc ^ "\n");
+ rc
+*)
+
+(************************* DISPATCHERS ***************************************)
+
type 'a storage_method = {
name: string;
+ write: bool;
+ exists: bool;
file: string -> string -> 'a; (* unresolved uri, resolved uri *)
http: string -> string -> 'a; (* unresolved uri, resolved uri *)
}
-let normalize_root uri = (* add trailing slash to roots *)
- try
- if uri.[String.length uri - 1] = ':' then uri ^ "/"
- else uri
- with Invalid_argument _ -> uri
-
let invoke_method storage_method uri url =
try
- if Pcre.pmatch ~rex:file_scheme_RE url then
+ if is_file_schema url then
storage_method.file uri (path_of_file_url url)
- else if Pcre.pmatch ~rex:http_scheme_RE url then
+ else if is_http_schema url then
storage_method.http uri url
else
raise (Unsupported_scheme url)
with Not_found' -> raise (Resource_not_found (storage_method.name, uri))
-
+
let dispatch_single storage_method uri =
assert (extension uri <> gz_suffix);
let uri = normalize_root uri in
- let url = resolve_prefix uri in
+ let url = resolve_prefix storage_method.write storage_method.exists uri in
invoke_method storage_method uri url
let dispatch_multi storage_method uri =
- let urls = resolve_prefixes uri in
+ let urls = resolve_prefixes storage_method.write storage_method.exists uri in
let rec aux = function
| [] -> raise (Resource_not_found (storage_method.name, uri))
| url :: tl ->
in
aux urls
-let exists =
- dispatch_single { name = "exists"; file = exists_file; http = exists_http }
-
-let resolve =
- dispatch_single { name = "resolve"; file = resolve_file; http = resolve_http }
-
-let ls_single =
- dispatch_single { name = "ls"; file = ls_file_single; http = ls_http_single }
+let dispatch_all storage_method uri =
+ let urls = resolve_prefixes storage_method.write storage_method.exists uri in
+ List.map (fun url -> invoke_method storage_method uri url) urls
+
+(******************************** EXPORTED FUNCTIONS *************************)
+
+let exists s =
+ try
+ dispatch_single
+ { write = false;
+ name = "exists";
+ exists = true;
+ file = exists_file; http = exists_http; } s
+ with Resource_not_found _ -> false
+
+let resolve ?(must_exists=true) ~writable =
+ dispatch_single
+ { write = writable;
+ name="resolve";
+ exists = must_exists;
+ file = resolve_file ~must_exists;
+ http = resolve_http ~must_exists; }
let remove =
- dispatch_single { name = "remove"; file = remove_file; http = remove_http }
+ dispatch_single
+ { write = false;
+ name = "remove";
+ exists=true;
+ file = remove_file; http = remove_http; }
let filename ?(find = false) =
- if find then
- dispatch_multi { name = "filename"; file = get_file; http = get_http }
- else
- dispatch_single { name = "filename"; file = get_file; http = get_http }
+ (if find then dispatch_multi else dispatch_single)
+ { write = false;
+ name = "filename";
+ exists=true;
+ file = get_file; http = get_http; }
- (* ls_single performs ls only below a single prefix, but prefixes which have
- * common prefix (sorry) with a given one may need to be considered as well
- * for example: when doing "ls cic:/" we would like to see the "cic:/matita"
- * directory *)
let ls uri_prefix =
-(* prerr_endline ("Http_getter_storage.ls " ^ uri_prefix); *)
- let direct_results = ls_single uri_prefix in
+ let ls_all s =
+ try
+ dispatch_all
+ { write=false;
+ name = "ls";
+ exists=true;
+ file = ls_file_single; http = ls_http_single; } s
+ with Resource_not_found _ -> []
+ in
+ let direct_results = List.flatten (ls_all uri_prefix) in
List.fold_left
(fun results (_, uri_prefix', _, _) ->
if Filename.dirname uri_prefix' = strip_trailing_slash uri_prefix then
else
results)
direct_results
- (Lazy.force prefix_map)
+ (Lazy.force (prefix_map ()))
let clean_cache () =
ignore (Sys.command
(sprintf "rm -rf %s/" (Lazy.force Http_getter_env.cache_dir)))
+let list_writable_prefixes _ =
+ HExtlib.filter_map
+ (fun (_,_,url,attrs) ->
+ if is_readwrite attrs then
+ Some url
+ else
+ None)
+ (Lazy.force (prefix_map ()))
+
+let is_legacy uri = List.for_all has_legacy (get_attrs uri)
+
+(* implement this in a fast way! *)
+let is_empty buri =
+ let buri = strip_trailing_slash buri ^ "/" in
+ let files = ls buri in
+ is_empty_listing files
+
+let is_read_only uri =
+ let is_empty_dir path =
+ let files =
+ if is_file_schema path then
+ ls_file_single () (path_of_file_url path)
+ else if is_http_schema path then
+ ls_http_single () path
+ else
+ assert false
+ in
+ is_empty_listing files
+ in
+ let rec aux found_writable = function
+ | (rex, _, url_prefix, attrs)::tl ->
+ let new_url = (Pcre.replace_first ~rex ~templ:url_prefix uri) in
+ let rdonly = has_legacy attrs || has_rdonly attrs in
+ (match rdonly, is_empty_dir new_url, found_writable with
+ | true, false, _ -> true
+ | true, true, _ -> aux found_writable tl
+ | false, _, _ -> aux true tl)
+ | [] -> not found_writable (* if found_writable then false else true *)
+ in
+ aux false (lookup uri)
+
+let activate_system_mode () =
+ let map = Lazy.force (prefix_map ()) in
+ let map =
+ HExtlib.filter_map
+ (fun ((rex, urip, urlp, attrs) as entry) ->
+ if has_legacy attrs then
+ Some entry
+ else if has_rdonly attrs then
+ Some (rex, urip, urlp, List.filter ((<>) `Read_only) attrs)
+ else
+ None)
+ map
+ in
+ let map = map in (* just to remember that ocamlc 'lazy' is a ... *)
+ prefix_map_ref := (lazy map)
+
+(* eof *)
val remove: string -> unit
val exists: string -> bool
-val resolve: string -> string
+val resolve: ?must_exists:bool -> writable:bool -> string -> string
(* val get_attrs: string -> Http_getter_types.prefix_attr list *)
val is_read_only: string -> bool
val is_legacy: string -> bool
+val is_empty: string -> bool
val clean_cache: unit -> unit
+val activate_system_mode: unit -> unit
+val list_writable_prefixes: unit -> string list
grafiteTypes.cmx: grafiteTypes.cmi
grafiteSync.cmo: grafiteTypes.cmi grafiteSync.cmi
grafiteSync.cmx: grafiteTypes.cmx grafiteSync.cmi
-grafiteMisc.cmo: grafiteMisc.cmi
-grafiteMisc.cmx: grafiteMisc.cmi
-grafiteEngine.cmo: grafiteTypes.cmi grafiteSync.cmi grafiteMisc.cmi \
- grafiteEngine.cmi
-grafiteEngine.cmx: grafiteTypes.cmx grafiteSync.cmx grafiteMisc.cmx \
- grafiteEngine.cmi
+grafiteEngine.cmo: grafiteTypes.cmi grafiteSync.cmi grafiteEngine.cmi
+grafiteEngine.cmx: grafiteTypes.cmx grafiteSync.cmx grafiteEngine.cmi
INTERFACE_FILES = \
grafiteTypes.mli \
grafiteSync.mli \
- grafiteMisc.mli \
grafiteEngine.mli \
$(NULL)
IMPLEMENTATION_FILES = $(INTERFACE_FILES:%.mli=%.ml)
}
let eval_coercion status ~add_composites uri =
- let basedir = Helm_registry.get "matita.basedir" in
let status,compounds =
- GrafiteSync.add_coercion ~basedir ~add_composites refinement_toolkit status uri in
+ GrafiteSync.add_coercion ~add_composites refinement_toolkit status uri in
let moo_content = coercion_moo_statement_of uri in
let status = GrafiteTypes.add_moo_content [moo_content] status in
{status with GrafiteTypes.proof_status = GrafiteTypes.No_proof},
lemmas
let add_obj uri obj status =
- let basedir = Helm_registry.get "matita.basedir" in
- let status,lemmas = GrafiteSync.add_obj ~basedir refinement_toolkit uri obj status in
+ let status,lemmas = GrafiteSync.add_obj refinement_toolkit uri obj status in
status, lemmas
let rec eval_command = {ec_go = fun ~disambiguate_command opts status cmd ->
let status,cmd = disambiguate_command status cmd in
- let basedir = Helm_registry.get "matita.basedir" in
let status,uris =
match cmd with
| GrafiteAst.Default (loc, what, uris) as cmd ->
LibraryObjects.set_default what uris;
GrafiteTypes.add_moo_content [cmd] status,[]
| GrafiteAst.Include (loc, baseuri) ->
- let moopath = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in
- if not (Sys.file_exists moopath) then
- raise (IncludedFileNotCompiled moopath);
+ let moopath_rw, moopath_r =
+ LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:true,
+ LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:false
+ in
+ let moopath =
+ if Sys.file_exists moopath_r then moopath_r else
+ if Sys.file_exists moopath_rw then moopath_rw else
+ raise (IncludedFileNotCompiled moopath_rw)
+ in
let status = eval_from_moo.efm_go status moopath in
status,[]
| GrafiteAst.Set (loc, name, value) ->
HLog.error (sprintf "uri %s belongs to a read-only repository" value);
raise (ReadOnlyUri value)
end;
- if not (GrafiteMisc.is_empty value) && opts.clean_baseuri then begin
+ if not (Http_getter_storage.is_empty value) &&
+ opts.clean_baseuri
+ then begin
HLog.message ("baseuri " ^ value ^ " is not empty");
HLog.message ("cleaning baseuri " ^ value);
- LibraryClean.clean_baseuris ~basedir [value];
+ LibraryClean.clean_baseuris [value];
+ assert (Http_getter_storage.is_empty value);
end;
end;
GrafiteTypes.set_option status name value,[]
+++ /dev/null
-(* Copyright (C) 2004-2005, HELM Team.
- *
- * This file is part of HELM, an Hypertextual, Electronic
- * Library of Mathematics, developed at the Computer Science
- * Department, University of Bologna, Italy.
- *
- * HELM is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- * HELM is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with HELM; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
- * MA 02111-1307, USA.
- *
- * For details, see the HELM World-Wide-Web page,
- * http://helm.cs.unibo.it/
- *)
-
-(* $Id$ *)
-
-let is_empty buri =
- List.for_all
- (function
- Http_getter_types.Ls_section _ -> true
- | Http_getter_types.Ls_object _ -> false)
- (Http_getter.ls (Http_getter_misc.strip_trailing_slash buri ^ "/"))
+++ /dev/null
-(* Copyright (C) 2004-2005, HELM Team.
- *
- * This file is part of HELM, an Hypertextual, Electronic
- * Library of Mathematics, developed at the Computer Science
- * Department, University of Bologna, Italy.
- *
- * HELM is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- * HELM is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with HELM; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
- * MA 02111-1307, USA.
- *
- * For details, see the HELM World-Wide-Web page,
- * http://helm.cs.unibo.it/
- *)
-
- (** check whether no objects are defined below a given baseuri *)
-val is_empty: string -> bool
open Printf
-let add_obj refinement_toolkit ~basedir uri obj status =
- let lemmas = LibrarySync.add_obj refinement_toolkit uri obj basedir in
+let add_obj refinement_toolkit uri obj status =
+ let lemmas = LibrarySync.add_obj refinement_toolkit uri obj in
{status with GrafiteTypes.objects = uri::status.GrafiteTypes.objects},
lemmas
-let add_coercion refinement_toolkit ~basedir ~add_composites status uri =
- let compounds = LibrarySync.add_coercion ~add_composites ~basedir refinement_toolkit uri in
+let add_coercion refinement_toolkit ~add_composites status uri =
+ let compounds = LibrarySync.add_coercion ~add_composites refinement_toolkit uri in
{status with GrafiteTypes.coercions = uri :: status.GrafiteTypes.coercions},
compounds
val add_obj:
RefinementTool.kit ->
- basedir:string -> UriManager.uri -> Cic.obj -> GrafiteTypes.status ->
+ UriManager.uri -> Cic.obj -> GrafiteTypes.status ->
GrafiteTypes.status * UriManager.uri list
val add_coercion:
RefinementTool.kit ->
- basedir:string -> add_composites:bool -> GrafiteTypes.status ->
+ add_composites:bool -> GrafiteTypes.status ->
UriManager.uri ->
GrafiteTypes.status * UriManager.uri list
let notation_ids' = CicNotation.process_notation cmd in
let status =
{ status with notation_ids = notation_ids' @ status.notation_ids } in
- let basedir = Helm_registry.get "matita.basedir" in
match cmd with
| LexiconAst.Include (loc, baseuri) ->
- let lexiconpath = LibraryMisc.lexicon_file_of_baseuri ~basedir ~baseuri in
- if not (Sys.file_exists lexiconpath) then
- raise (IncludedFileNotCompiled lexiconpath);
+ let lexiconpath_rw, lexiconpath_r =
+ LibraryMisc.lexicon_file_of_baseuri ~writable:true ~baseuri,
+ LibraryMisc.lexicon_file_of_baseuri ~writable:false ~baseuri
+ in
+ let lexiconpath =
+ if Sys.file_exists lexiconpath_rw then lexiconpath_rw else
+ if Sys.file_exists lexiconpath_r then lexiconpath_r else
+ raise (IncludedFileNotCompiled lexiconpath_rw)
+ in
let lexicon = LexiconMarshal.load_lexicon lexiconpath in
let status = List.fold_left eval_command status lexicon in
if Helm_registry.get_bool "db.nodb" then
- let metadatapath = LibraryMisc.metadata_file_of_baseuri ~basedir ~baseuri in
- if not (Sys.file_exists metadatapath) then
- raise (MetadataNotFound metadatapath)
- else
- add_metadata (LibraryNoDb.load_metadata ~fname:metadatapath) status
+ let metadatapath_rw, metadatapath_r =
+ LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:true,
+ LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:false
+ in
+ let metadatapath =
+ if Sys.file_exists metadatapath_rw then metadatapath_rw else
+ if Sys.file_exists metadatapath_r then metadatapath_r else
+ raise (MetadataNotFound metadatapath_rw)
+ in
+ add_metadata (LibraryNoDb.load_metadata ~fname:metadatapath) status
else
status
| LexiconAst.Alias (loc, spec) ->
String.sub url 7 (String.length url - 7) (* remove heading "file:///" *)
)
-let close_nodb ~basedir buris =
+let close_nodb buris =
let rev_deps = Hashtbl.create 97 in
let all_metadata =
HExtlib.find ~test:(fun name -> Filename.check_suffix name ".metadata")
(fun path ->
let metadata = LibraryNoDb.load_metadata ~fname:path in
let baseuri_of_current_metadata =
+ prerr_endline "ERROR, add to the getter reverse lookup";
+ let basedir = "/fake" in
let dirname = Filename.dirname path in
let basedirlen = String.length basedir in
assert (String.sub dirname 0 basedirlen = basedir);
in
objects_to_remove
-let clean_baseuris ?(verbose=true) ~basedir buris =
+let clean_baseuris ?(verbose=true) buris =
Hashtbl.clear cache_of_processed_baseuri;
let buris = List.map Http_getter_misc.strip_trailing_slash buris in
debug_prerr "clean_baseuris called on:";
List.iter debug_prerr buris;
let l =
if Helm_registry.get_bool "db.nodb" then
- close_nodb ~basedir buris
+ close_nodb buris
else
close_db [] buris
in
if debug then
List.iter (fun u -> debug_prerr (UriManager.string_of_uri u)) l;
List.iter
- (fun buri ->
- HExtlib.safe_remove (LibraryMisc.obj_file_of_baseuri basedir buri);
- HExtlib.safe_remove (LibraryMisc.metadata_file_of_baseuri basedir buri);
- HExtlib.safe_remove (LibraryMisc.lexicon_file_of_baseuri basedir buri))
+ (fun baseuri ->
+ try
+ HExtlib.safe_remove
+ (LibraryMisc.obj_file_of_baseuri ~writable:true ~baseuri);
+ HExtlib.safe_remove
+ (LibraryMisc.metadata_file_of_baseuri ~writable:true ~baseuri);
+ HExtlib.safe_remove
+ (LibraryMisc.lexicon_file_of_baseuri ~writable:true ~baseuri)
+ with Http_getter_types.Key_not_found _ -> ())
(HExtlib.list_uniq (List.fast_sort Pervasives.compare
(List.map (UriManager.buri_of_uri) l)));
List.iter
* http://helm.cs.unibo.it/
*)
-val clean_baseuris : ?verbose:bool -> basedir:string -> string list -> unit
+val clean_baseuris : ?verbose:bool -> string list -> unit
List.iter
(fun suffix ->
try
- HExtlib.safe_remove (Http_getter.resolve (uri ^ suffix))
+ HExtlib.safe_remove
+ (Http_getter.resolve ~writable:true (uri ^ suffix))
with Http_getter_types.Key_not_found _ -> ())
[""; ".body"; ".types"])
owned_uris;
(* $Id$ *)
-let obj_file_of_baseuri ~basedir ~baseuri =
- let path = basedir ^ "/xml" ^ Pcre.replace ~pat:"^cic:" ~templ:"" baseuri in
- path ^ ".moo"
+let resolve ~writable baseuri = Http_getter.filename ~writable baseuri
-let lexicon_file_of_baseuri ~basedir ~baseuri =
- let path = basedir ^ "/xml" ^ Pcre.replace ~pat:"^cic:" ~templ:"" baseuri in
- path ^ ".lexicon"
+let obj_file_of_baseuri ~writable ~baseuri =
+ resolve ~writable baseuri ^ ".moo"
+let lexicon_file_of_baseuri ~writable ~baseuri =
+ resolve ~writable baseuri ^ ".lexicon"
+let metadata_file_of_baseuri~writable ~baseuri =
+ resolve ~writable baseuri ^ ".metadata"
-let metadata_file_of_baseuri ~basedir ~baseuri =
- let path = basedir ^ "/xml" ^ Pcre.replace ~pat:"^cic:" ~templ:"" baseuri in
- path ^ ".metadata"
* http://helm.cs.unibo.it/
*)
-val obj_file_of_baseuri: basedir:string -> baseuri:string -> string
-val lexicon_file_of_baseuri: basedir:string -> baseuri:string -> string
-val metadata_file_of_baseuri: basedir:string -> baseuri:string -> string
+val obj_file_of_baseuri: writable:bool -> baseuri:string -> string
+val lexicon_file_of_baseuri: writable:bool -> baseuri:string -> string
+val metadata_file_of_baseuri: writable:bool -> baseuri:string -> string
let univgraphuri = UriManager.univgraphuri_of_uri uri in
innertypesuri,bodyuri,univgraphuri
-let paths_and_uris_of_obj uri ~basedir =
- let basedir = basedir ^ "/xml" in
+let paths_and_uris_of_obj uri =
+ let resolved = Http_getter.filename' ~writable:true uri in
+ let basepath = Filename.dirname resolved in
let innertypesuri, bodyuri, univgraphuri = uris_of_obj uri in
- let innertypesfilename = Str.replace_first (Str.regexp "^cic:") ""
- (UriManager.string_of_uri innertypesuri) ^ ".xml.gz" in
- let innertypespath = basedir ^ "/" ^ innertypesfilename in
- let xmlfilename = Str.replace_first (Str.regexp "^cic:/") ""
- (UriManager.string_of_uri uri) ^ ".xml.gz" in
- let xmlpath = basedir ^ "/" ^ xmlfilename in
- let xmlbodyfilename = Str.replace_first (Str.regexp "^cic:/") ""
- (UriManager.string_of_uri uri) ^ ".body.xml.gz" in
- let xmlbodypath = basedir ^ "/" ^ xmlbodyfilename in
- let xmlunivgraphfilename = Str.replace_first (Str.regexp "^cic:/") ""
- (UriManager.string_of_uri univgraphuri) ^ ".xml.gz" in
- let xmlunivgraphpath = basedir ^ "/" ^ xmlunivgraphfilename in
+ let innertypesfilename=(UriManager.nameext_of_uri innertypesuri)^".xml.gz"in
+ let innertypespath = basepath ^ "/" ^ innertypesfilename in
+ let xmlfilename = (UriManager.nameext_of_uri uri) ^ ".xml.gz" in
+ let xmlpath = basepath ^ "/" ^ xmlfilename in
+ let xmlbodyfilename = (UriManager.nameext_of_uri uri) ^ ".body.xml.gz" in
+ let xmlbodypath = basepath ^ "/" ^ xmlbodyfilename in
+ let xmlunivgraphfilename=(UriManager.nameext_of_uri univgraphuri)^".xml.gz"in
+ let xmlunivgraphpath = basepath ^ "/" ^ xmlunivgraphfilename in
xmlpath, xmlbodypath, innertypespath, bodyuri, innertypesuri,
xmlunivgraphpath, univgraphuri
-let save_object_to_disk ~basedir uri obj ugraph univlist =
+let save_object_to_disk uri obj ugraph univlist =
let ensure_path_exists path =
let dir = Filename.dirname path in
HExtlib.mkdir dir
in
let xmlpath, xmlbodypath, innertypespath, bodyuri, innertypesuri,
xmlunivgraphpath, univgraphuri =
- paths_and_uris_of_obj uri basedir
+ paths_and_uris_of_obj uri
in
List.iter HExtlib.mkdir (List.map Filename.dirname [xmlpath]);
(* now write to disk *)
fun ~dbd ~uri ->
profiler.HExtlib.profile (fun uri -> MetadataDb.index_obj ~dbd ~uri) uri
-let add_single_obj uri obj refinement_toolkit ~basedir =
+let add_single_obj uri obj refinement_toolkit =
let module RT = RefinementTool in
let obj =
if (*List.mem `Generated (CicUtil.attributes_of_obj obj) &&*)
index_obj ~dbd ~uri; (* 2 must be in the env *)
try
(*3*)
- let new_stuff = save_object_to_disk ~basedir uri obj ugraph univlist in
+ let new_stuff = save_object_to_disk uri obj ugraph univlist in
try
HLog.message
(Printf.sprintf "%s defined" (UriManager.string_of_uri uri))
List.iter
(fun uri ->
(try
- let file = Http_getter.resolve' uri in
+ let file = Http_getter.resolve' ~writable:true uri in
HExtlib.safe_remove file;
HExtlib.rmdir_descend (Filename.dirname file)
with Http_getter_types.Key_not_found _ -> ());
(*** GENERATION OF AUXILIARY LEMMAS ***)
-let generate_elimination_principles ~basedir uri refinement_toolkit =
+let generate_elimination_principles uri refinement_toolkit =
let uris = ref [] in
let elim sort =
try
let uri,obj = CicElim.elim_of ~sort uri 0 in
- add_single_obj uri obj refinement_toolkit ~basedir;
+ add_single_obj uri obj refinement_toolkit;
uris := uri :: !uris
with CicElim.Can_t_eliminate -> ()
in
UriManager.UriHashtbl.clear coercion_hashtbl;
CoercDb.remove_coercion (fun (_,_,u1) -> true)
-let add_coercion ~basedir ~add_composites refinement_toolkit uri =
+let add_coercion ~add_composites refinement_toolkit uri =
let coer_ty,_ =
let coer = CicUtil.term_of_uri uri in
CicTypeChecker.type_of_aux' [] [] coer CicUniv.empty_ugraph
if add_composites then
List.fold_left
(fun acc (_,_,uri,obj) ->
- add_single_obj ~basedir uri obj refinement_toolkit;
+ add_single_obj uri obj refinement_toolkit;
uri::acc)
composite_uris new_coercions
else
Not_found -> () (* mhh..... *)
-let generate_projections ~basedir refinement_toolkit uri fields =
+let generate_projections refinement_toolkit uri fields =
let uris = ref [] in
let projections = CicRecord.projections_of uri (List.map fst fields) in
try
CicTypeChecker.type_of_aux' [] [] bo CicUniv.empty_ugraph in
let attrs = [`Class `Projection; `Generated] in
let obj = Cic.Constant (name,Some bo,ty,[],attrs) in
- add_single_obj ~basedir uri obj refinement_toolkit;
+ add_single_obj uri obj refinement_toolkit;
let composites =
if coercion then
- add_coercion ~basedir ~add_composites:true refinement_toolkit uri
+ add_coercion ~add_composites:true refinement_toolkit uri
else
[]
in
List.iter remove_single_obj !uris;
raise exn
-
let build_inversion_principle = ref (fun a b -> assert false);;
-let generate_inversion refinement_toolkit ~basedir uri obj =
+let generate_inversion refinement_toolkit uri obj =
match !build_inversion_principle uri obj with
None -> []
| Some (ind_uri,ind_obj) ->
- add_single_obj ind_uri ind_obj refinement_toolkit ~basedir;
+ add_single_obj ind_uri ind_obj refinement_toolkit;
[ind_uri]
-
-let add_obj refinement_toolkit uri obj ~basedir =
- add_single_obj uri obj refinement_toolkit ~basedir;
+let add_obj refinement_toolkit uri obj =
+ add_single_obj uri obj refinement_toolkit;
let uris = ref [] in
try
begin
| Cic.Constant _ -> ()
| Cic.InductiveDefinition (_,_,_,attrs) ->
uris := !uris @
- generate_elimination_principles ~basedir uri refinement_toolkit;
- uris := !uris @ generate_inversion refinement_toolkit ~basedir uri obj;
+ generate_elimination_principles uri refinement_toolkit;
+ (* uris := !uris @ generate_inversion refinement_toolkit uri obj; *)
let rec get_record_attrs =
function
| [] -> None
| None -> () (* not a record *)
| Some fields ->
uris := !uris @
- (generate_projections ~basedir refinement_toolkit uri fields))
+ (generate_projections refinement_toolkit uri fields))
| Cic.CurrentProof _
| Cic.Variable _ -> assert false
end;
(* it returns the list of the uris of the auxiliary lemmas generated *)
val add_obj:
RefinementTool.kit ->
- UriManager.uri -> Cic.obj -> basedir:string ->
+ UriManager.uri -> Cic.obj ->
UriManager.uri list
(* inverse of add_obj; *)
(* is true are added to the library. *)
(* The list of added objects is returned. *)
val add_coercion:
- basedir:string -> add_composites:bool ->
+ add_composites:bool ->
RefinementTool.kit -> UriManager.uri ->
UriManager.uri list
hits refObj ]
+let move_content (name1, tbl1) (name2, tbl2) buri =
+ assert (tbl1 = tbl2);
+ sprintf
+ "INSERT INTRO %s SELECT * FROM %s WHERE source LIKE \"%s%%\";"
+ name2 name1 buri
+
+
+
* @param hits name of the hits table *)
val fill_hits: string -> string -> string list
+(** move content [t1] [t2] [buri]
+ * moves all the tuples with 'source' that match regex '^buri' from t1 to t2
+ * *)
+val move_content: (string * tbl) -> (string * tbl) -> string -> string
+
let index1 = String.rindex_from uri xpointer_offset '/' + 1 in
let index2 = String.rindex uri '.' in
String.sub uri index1 (index2 - index1)
+
+let nameext_of_uri (uri, _) =
+ let xpointer_offset, mah =
+ try String.rindex uri '#', 0 with Not_found -> String.length uri - 1, 1
+ in
+ let index1 = String.rindex_from uri xpointer_offset '/' + 1 in
+ String.sub uri index1 (xpointer_offset - index1 + mah)
let buri_of_uri (uri,_) =
let xpointer_offset =
val string_of_uri : uri -> string (* complete uri *)
val name_of_uri : uri -> string (* name only (without extension)*)
+val nameext_of_uri : uri -> string (* name only (with extension)*)
val buri_of_uri : uri -> string (* base uri only, without trailing '/' *)
(* given an uri, returns the uri of the corresponding cic file, *)
[ --with-runtime-dir Runtime directory (current working directory if not given)],
[ RT_BASE_DIR="${withval}" ],
[ RT_BASE_DIR="$RT_BASE_DIR_DEFAULT" ])
-AC_MSG_RESULT($RT_BASE_DIR)
+MSG="$RT_BASE_DIR"
+if test "yes" = "$RT_BASE_DIR"; then
+ MSG="
+** error: **
+** empty --with-runtime-dir argument, please use --with-runtime-dir=value **"
+fi
+AC_MSG_RESULT($MSG)
AC_MSG_CHECKING(--with-dbhost argument)
AC_ARG_WITH(dbhost,
[ --with-dbhost SQL database hostname],
[ DBHOST="${withval}" ],
[ DBHOST="$DEFAULT_DBHOST" ])
-AC_MSG_RESULT($DBHOST)
+MSG="$DBHOST"
+if test "yes" = "$DBHOST"; then
+ MSG="
+** error: **
+** empty --with-dbhost argument, please use --with-dbhost=value **"
+fi
+AC_MSG_RESULT($MSG)
AC_SUBST(CAMLP4O)
AC_SUBST(DBHOST)
AC_OUTPUT([
components/extlib/componentsConf.ml
- matita/matita.conf.xml.devel
- matita/matita.conf.xml.user
- matita/matita.conf.xml.build
+ matita/matita.conf.xml
matita/buildTimeConf.ml
matita/gtkmathview.matita.conf.xml
matita/help/C/version.txt
REQUIRES = helm-getter helm-logger helm-registry netstring
COMMONOPTS = -package "$(REQUIRES)" -pp camlp4o -thread
-OCAMLFIND = OCAMLPATH=../ocaml/METAS ocamlfind
+OCAMLFIND = OCAMLPATH=../../components/METAS ocamlfind
OCAMLC = $(OCAMLFIND) ocamlc -g $(COMMONOPTS)
OCAMLOPT = $(OCAMLFIND) opt $(COMMONOPTS)
let return_help outchan = return_html_raw (Http_getter.help ()) outchan
-let return_resolve uri outchan =
+let return_resolve writable uri outchan =
try
return_xml_raw
- (sprintf "<url value=\"%s\" />\n" (Http_getter.resolve uri))
+ (sprintf "<url value=\"%s\" />\n" (Http_getter.resolve ~writable uri))
outchan
with
| Unresolvable_URI _ -> return_xml_raw "<unresolvable />\n" outchan
| "/getxml" ->
let uri = req#param "uri" in
let fname = Http_getter.getxml uri in (* local name, in cache *)
- let remote_name = Http_getter.resolve uri in (* remote name *)
+ (* remote name *)
+ let remote_name = Http_getter.resolve ~writable:false uri in
let src_enc = if is_gzip fname then `Gzipped else `Normal in
let enc = parse_enc req in
let fname, cleanup = convert_file ~from_enc:src_enc ~to_enc:enc fname in
| "/getdtd" ->
let fname = Http_getter.getdtd (req#param "uri") in
respond_dtd (parse_patch req) fname outchan
- | "/resolve" -> return_resolve (req#param "uri") outchan
+ | "/resolve" ->
+ let writable =
+ match req#param ~default:"false" "writable" with
+ | "true" -> true
+ | _ -> false
+ in
+ return_resolve writable (req#param "uri") outchan
| "/clean_cache" ->
Http_getter.clean_cache ();
return_html_msg "Done." outchan
endif
matita: matita.ml $(LIB_DEPS) $(CMOS)
- @echo "OCAMLC $<"
+ @echo " OCAMLC $<"
$(H)$(OCAMLC) $(PKGS) -linkpkg -o $@ $(CMOS) matita.ml
matita.opt: matita.ml $(LIBX_DEPS) $(CMXS)
- @echo "OCAMLOPT $<"
+ @echo " OCAMLOPT $<"
$(H)$(OCAMLOPT) $(PKGS) -linkpkg -o $@ $(CMXS) matita.ml
dump_moo: dump_moo.ml buildTimeConf.cmo
- @echo "OCAMLC $<"
+ @echo " OCAMLC $<"
$(H)$(OCAMLC) $(PKGS) -linkpkg -o $@ buildTimeConf.cmo $<
dump_moo.opt: dump_moo.ml buildTimeConf.cmx
@echo "OCAMLOPT $<"
$(H)$(OCAMLOPT) $(PKGS) -linkpkg -o $@ buildTimeConf.cmx $<
matitac: matitac.ml $(CLIB_DEPS) $(CCMOS) $(MAINCMOS)
- @echo "OCAMLC $<"
+ @echo " OCAMLC $<"
$(H)$(OCAMLC) $(CPKGS) -linkpkg -o $@ $(CCMOS) $(MAINCMOS) matitac.ml
matitac.opt: matitac.ml $(CLIBX_DEPS) $(CCMXS) $(MAINCMXS)
- @echo "OCAMLOPT $<"
+ @echo " OCAMLOPT $<"
$(H)$(OCAMLOPT) $(CPKGS) -linkpkg -o $@ $(CCMXS) $(MAINCMXS) matitac.ml
matitatop: matitatop.ml $(CLIB_DEPS) $(CCMOS)
- @echo "OCAMLC $<"
+ @echo " OCAMLC $<"
$(H)$(OCAMLC) $(CPKGS) -linkpkg -o $@ toplevellib.cma $(CCMOS) $<
matitadep: matitac
ifeq ($(DISTRIBUTED),yes)
dist_library: dist_library@library
-dist_library_clean:
- @echo "MATITACLEAN -system all"
- $(H)./matitaclean$(BEST_EXT) \
- -system -conffile `pwd`/matita.conf.xml.build all
dist_library@%:
- @echo "MATITAMAKE -system init"
+ @echo "MATITAMAKE init $*"
$(H)MATITA_RT_BASE_DIR=`pwd` \
- MATITA_FLAGS="-system -conffile `pwd`/matita.conf.xml.build" \
- ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml.build \
+ MATITA_FLAGS="-conffile `pwd`/matita.conf.xml" \
+ ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml \
init $* `pwd`/$*
- @echo "MATITAMAKE -system build"
+ @echo "MATITAMAKE publish $*"
$(H)MATITA_RT_BASE_DIR=`pwd` \
- MATITA_FLAGS="-system -conffile `pwd`/matita.conf.xml.build" \
- ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml.build \
- build $*
+ MATITA_FLAGS="-conffile `pwd`/matita.conf.xml" \
+ ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml \
+ publish $*
+ @echo "MATITAMAKE destroy $*"
+ $(H)MATITA_RT_BASE_DIR=`pwd` \
+ MATITA_FLAGS="-conffile `pwd`/matita.conf.xml" \
+ ./matitamake$(BEST_EXT) -conffile `pwd`/matita.conf.xml \
+ destroy $*
touch $@
endif
matita.ma.templ \
core_notation.moo \
matita.conf.xml \
- matita.conf.xml.user \
closed.xml \
gtkmathview.matita.conf.xml \
template_makefile.in \
AUTHORS \
LICENSE \
$(NULL)
+
+
ifeq ($(HAVE_OCAMLOPT),yes)
-INSTALL_STUFF += $(PROGRAMS_OPT)
+INSTALL_STUFF_BIN = $(PROGRAMS_OPT)
else
-INSTALL_STUFF += $(PROGRAMS_BYTE)
+INSTALL_STUFF_BIN = $(PROGRAMS_BYTE)
endif
-install:
- # install main dir and executables
- install -d $(DESTDIR)
+install: install_preliminaries
+ #dist_library
+
+install_preliminaries:
+ install -d $(DESTDIR)/ma/
cp -a $(INSTALL_STUFF) $(DESTDIR)
- # install the library and corresponding scripts
- if [ -d $(DESTDIR)/library ]; then rm -rf $(DESTDIR)/library; fi
- cp -a .matita/xml/matita/ $(DESTDIR)/library/
- if [ -d $(DESTDIR)/ma ]; then rm -rf $(DESTDIR)/ma; fi
- install -d $(DESTDIR)/ma
+ install -s $(INSTALL_STUFF_BIN) $(DESTDIR)
ifeq ($(HAVE_OCAMLOPT),yes)
- for p in $(PROGRAMS_BYTE); do ln -s $$p.opt $(DESTDIR)/$$p; done
+ for p in $(PROGRAMS_BYTE); do ln -fs $$p.opt $(DESTDIR)/$$p; done
endif
- cp -a library/ $(DESTDIR)/ma/stdlib/
- cp -a contribs/ $(DESTDIR)/ma/contribs/
+ cp -a library/ $(DESTDIR)/ma/standard-library
+ cp -a contribs/ $(DESTDIR)/ma/
+
uninstall:
rm -rf $(DESTDIR)
include .depend
%.cmi: %.mli
- @echo "OCAMLC $<"
+ @echo " OCAMLC $<"
$(H)$(OCAMLC) $(PKGS) -c $<
%.cmo %.cmi: %.ml
- @echo "OCAMLC $<"
+ @echo " OCAMLC $<"
$(H)$(OCAMLC) $(PKGS) -c $<
%.cmx: %.ml
- @echo "OCAMLOPT $<"
+ @echo " OCAMLOPT $<"
$(H)$(OCAMLOPT) $(PKGS) -c $<
%.annot: %.ml
- @echo "OCAMLC -dtypes $<"
+ @echo " OCAMLC -dtypes $<"
$(H)$(OCAMLC) -dtypes $(PKGS) -c $<
$(CMOS): $(LIB_DEPS)
let closed_xml = runtime_base_dir ^ "/closed.xml"
let gtkmathview_conf = runtime_base_dir ^ "/gtkmathview.matita.conf.xml"
let matitamake_makefile_template = runtime_base_dir ^ "/template_makefile.in"
-let stdlib_dir = runtime_base_dir ^ "/library"
+let stdlib_dir_devel = runtime_base_dir ^ "/library"
+let stdlib_dir_installed = runtime_base_dir ^ "/ma/standard-library"
let help_dir = runtime_base_dir ^ "/help"
val runtime_base_dir : string
val script_font : string
val script_template : string
-val stdlib_dir : string
+val stdlib_dir_devel : string
+val stdlib_dir_installed : string
val undo_history_size : int
val version : string
theorem foo:
∀G.∀H:normal_subgroup G.∀x.x*H=H*x.
*)
-
+(*
theorem member_of_left_coset_mk_left_coset_x_H_a_to_member_of_left_coset_mk_left_coset_y_H_b_to_member_of_left_coset_mk_left_coset_op_x_y_H_op_a_b:
∀G.∀H:normal_subgroup G.∀x,y,a,b.
a ∈ (x*H) → b ∈ (y*H) → (a·b) ∈ ((x·y)*H).
intros;
simplify;
-qed.
\ No newline at end of file
+qed.
+*)
+++ /dev/null
-matita.conf.xml.devel
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="utf-8"?>
-<helm_registry>
- <section name="user">
- <key name="home">$(HOME)</key>
- </section>
- <section name="matita">
- <key name="basedir">@SRCROOT@/matita/.matita</key>
- <key name="owner">nobody</key>
- </section>
- <section name="db">
- <key name="host">@DBHOST@</key>
- <key name="user">helm</key>
- <key name="database">matita</key>
- </section>
- <section name="getter">
- <key name="cache_dir">.matita/getter/cache</key>
- <key name="prefix">
- cic:/matita/
- file://$(matita.basedir)/xml/matita/
- </key>
- <key name="prefix">
- cic:/
- file:///does_not_exists/
- legacy
- </key>
- </section>
-</helm_registry>
+++ /dev/null
-<?xml version="1.0" encoding="utf-8"?>
-<helm_registry>
- <section name="user">
- <!-- User home directory. Here a ".matita" directory will be created
- and used to store the part of the library developed by the user. -->
- <key name="home">$(HOME)</key>
- <!-- User name. It is used down in this configuration file. If left
- unspecified, name of the user executing matita will be used (as per
- getent) -->
- <!-- <key name="name">foo</key> -->
- </section>
- <section name="matita">
- <!-- Debug only. Stay away. -->
- <!-- <key name="auto_disambiguation">true</key> -->
- <!-- Debug only. Stay away. -->
- <!-- <key name="environment_trust">true</key> -->
- <key name="basedir">$(user.home)/.matita</key>
- <!-- Metadata owner. It will be used to create user-specific tables
- in the SQL database. -->
- <key name="owner">$(user.name)</key>
- <!-- Initial GUI font size. -->
- <!-- <key name="font_size">10</key> -->
- </section>
- <section name="db">
- <!-- Access parameter to the (MySql) metadata database. They are not
- needed if Matita is always run with -nodb, but this is _not_
- recommended since a lot of features wont work.
- Hint. The simplest way to create a database is:
- 0) # become an user with database administration privileges
- 1) mysqladmin create matita
- 2) echo "grant all privileges on matita.* to helm;" | mysql matita
- Note that this way the database will be open to anyone, apply
- stricter permissions if needed.
- -->
- <key name="host">@DBHOST@</key>
- <key name="user">helm</key>
- <key name="database">matita</key>
- </section>
- <section name="getter">
- <!-- Cache dir for CIC XML documents downloaded from the net.
- Beware that this dir may become really space-consuming. It wont be
- used if all prefexises below are local (i.e. "file:///" URI scheme).
- -->
- <key name="cache_dir">$(user.home)/.matita/getter/cache</key>
- <!-- "Prefixes", i.e.: mappings URI -> URL of the global library
- Each prefix mapps an URI of the cic:/ namespace to an URL where the
- documents can actually be accessed. URL can be in the "file://" or
- "http://" scheme. Only "file://" scheme can be used to store
- documents created by the user.
- Each prefix may be given a list of attributes. Currently supported
- attributes are:
- - "legacy" for parts of the library not generated by Matita (e.g.
- exported from Coq)
- - "ro" for parts of the library which are not writable by the user
- (e.g. the Matita standard library)
- "legacy" implies "ro"
- -->
- <key name="prefix">
- cic:/matita/
- file://$(user.home)/.matita/xml/matita/
- </key>
- <key name="prefix">
- cic:/
- file:///projects/helm/library/coq_contribs/
- legacy
- </key>
- </section>
-</helm_registry>
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<helm_registry>
+ <section name="user">
+ <!-- User home directory. Here a ".matita" directory will be created
+ and used to store the part of the library developed by the user. -->
+ <key name="home">$(HOME)</key>
+ <!-- User name. It is used down in this configuration file. If left
+ unspecified, name of the user executing matita will be used (as per
+ getent) -->
+ <!-- <key name="name">foo</key> -->
+ </section>
+ <section name="matita">
+ <!-- Debug only. Stay away. -->
+ <!-- <key name="auto_disambiguation">true</key> -->
+ <!-- Debug only. Stay away. -->
+ <!-- <key name="environment_trust">true</key> -->
+ <key name="basedir">$(user.home)/.matita</key>
+ <!-- Metadata owner. It will be used to create user-specific tables
+ in the SQL database. -->
+ <key name="owner">$(user.name)</key>
+ <!-- Initial GUI font size. -->
+ <!-- <key name="font_size">10</key> -->
+ </section>
+ <section name="db">
+ <!-- Access parameter to the (MySql) metadata database. They are not
+ needed if Matita is always run with -nodb, but this is _not_
+ recommended since a lot of features wont work.
+ Hint. The simplest way to create a database is:
+ 0) # become an user with database administration privileges
+ 1) mysqladmin create matita
+ 2) echo "grant all privileges on matita.* to helm;" | mysql matita
+ Note that this way the database will be open to anyone, apply
+ stricter permissions if needed.
+ -->
+ <key name="host">@DBHOST@</key>
+ <key name="user">helm</key>
+ <key name="database">matita</key>
+ </section>
+ <section name="getter">
+ <!-- Cache dir for CIC XML documents downloaded from the net.
+ Beware that this dir may become really space-consuming. It wont be
+ used if all prefexises below are local (i.e. "file:///" URI scheme).
+ -->
+ <key name="cache_dir">$(user.home)/.matita/getter/cache</key>
+ <!-- "Prefixes", i.e.: mappings URI -> URL of the global library
+ Each prefix mapps an URI of the cic:/ namespace to an URL where the
+ documents can actually be accessed. URL can be in the "file://" or
+ "http://" scheme. Only "file://" scheme can be used to store
+ documents created by the user.
+ Each prefix may be given a list of attributes. Currently supported
+ attributes are:
+ - "legacy" for parts of the library not generated by Matita (e.g.
+ exported from Coq)
+ - "ro" for parts of the library which are not writable by the user
+ (e.g. the Matita standard library)
+ "legacy" implies "ro"
+ -->
+ <key name="prefix">
+ cic:/matita/
+ file://@RT_BASE_DIR@/stantard-library/
+ ro
+ </key>
+ <key name="prefix">
+ cic:/matita/
+ file://$(user.home)/.matita/xml/matita/
+ </key>
+ <key name="prefix">
+ cic:/
+ file://@RT_BASE_DIR@/legacy-library/coq/
+ legacy
+ </key>
+ </section>
+</helm_registry>
+++ /dev/null
-<?xml version="1.0" encoding="utf-8"?>
-<helm_registry>
- <section name="user">
- <!-- User home directory. Here a ".matita" directory will be created
- and used to store the part of the library developed by the user. -->
- <key name="home">$(HOME)</key>
- <!-- User name. It is used down in this configuration file. If left
- unspecified, name of the user executing matita will be used (as per
- getent) -->
- <!-- <key name="name">foo</key> -->
- </section>
- <section name="matita">
- <!-- Debug only. Stay away. -->
- <!-- <key name="auto_disambiguation">true</key> -->
- <!-- Debug only. Stay away. -->
- <!-- <key name="environment_trust">true</key> -->
- <key name="basedir">$(user.home)/.matita</key>
- <!-- Metadata owner. It will be used to create user-specific tables
- in the SQL database. -->
- <key name="owner">$(user.name)</key>
- <!-- Initial GUI font size. -->
- <!-- <key name="font_size">10</key> -->
- </section>
- <section name="db">
- <!-- Access parameter to the (MySql) metadata database. They are not
- needed if Matita is always run with -nodb, but this is _not_
- recommended since a lot of features wont work.
- Hint. The simplest way to create a database is:
- 0) # become an user with database administration privileges
- 1) mysqladmin create matita
- 2) echo "grant all privileges on matita.* to helm;" | mysql matita
- Note that this way the database will be open to anyone, apply
- stricter permissions if needed.
- -->
- <key name="host">@DBHOST@</key>
- <key name="user">helm</key>
- <key name="database">matita</key>
- </section>
- <section name="getter">
- <!-- Cache dir for CIC XML documents downloaded from the net.
- Beware that this dir may become really space-consuming. It wont be
- used if all prefexises below are local (i.e. "file:///" URI scheme).
- -->
- <key name="cache_dir">$(user.home)/.matita/getter/cache</key>
- <!-- "Prefixes", i.e.: mappings URI -> URL of the global library
- Each prefix mapps an URI of the cic:/ namespace to an URL where the
- documents can actually be accessed. URL can be in the "file://" or
- "http://" scheme. Only "file://" scheme can be used to store
- documents created by the user.
- Each prefix may be given a list of attributes. Currently supported
- attributes are:
- - "legacy" for parts of the library not generated by Matita (e.g.
- exported from Coq)
- - "ro" for parts of the library which are not writable by the user
- (e.g. the Matita standard library)
- "legacy" implies "ro"
- -->
- <key name="prefix">
- cic:/matita/
- file://@RT_BASE_DIR@/library/
- ro
- </key>
- <key name="prefix">
- cic:/matita/$(user.name)/
- file://$(user.home)/.matita/xml/matita/
- </key>
- <key name="prefix">
- cic:/
- file://@RT_BASE_DIR@/legacy/coq/
- legacy
- </key>
- </section>
-</helm_registry>
<property name="type_hint">GDK_WINDOW_TYPE_HINT_NORMAL</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<child>
<widget class="GtkEventBox" id="BrowserWinEventBox">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<property name="has_separator">True</property>
<child internal-child="vbox">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<property name="has_separator">True</property>
<child internal-child="vbox">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<property name="show_fileops">True</property>
<child internal-child="cancel_button">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<property name="has_separator">True</property>
<child internal-child="vbox">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_NORMAL</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<child>
<widget class="GtkEventBox" id="MainWinEventBox">
<child>
<widget class="GtkMenuBar" id="menubar1">
<property name="visible">True</property>
+ <property name="pack_direction">GTK_PACK_DIRECTION_LTR</property>
+ <property name="child_pack_direction">GTK_PACK_DIRECTION_LTR</property>
<child>
<widget class="GtkMenuItem" id="fileMenu">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<property name="has_separator">True</property>
<child internal-child="vbox">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<property name="has_separator">True</property>
<child internal-child="vbox">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<child>
<widget class="GtkTable" id="table1">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_UTILITY</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<child>
<widget class="GtkVBox" id="vbox10">
<property name="type_hint">GDK_WINDOW_TYPE_HINT_NORMAL</property>
<property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
<property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
<child>
<widget class="GtkVBox" id="vbox12">
</packing>
</child>
+ <child>
+ <widget class="GtkButton" id="publishButton">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="relief">GTK_RELIEF_NORMAL</property>
+ <property name="focus_on_click">True</property>
+
+ <child>
+ <widget class="GtkAlignment" id="alignment16">
+ <property name="visible">True</property>
+ <property name="xalign">0.5</property>
+ <property name="yalign">0.5</property>
+ <property name="xscale">0</property>
+ <property name="yscale">0</property>
+ <property name="top_padding">0</property>
+ <property name="bottom_padding">0</property>
+ <property name="left_padding">0</property>
+ <property name="right_padding">0</property>
+
+ <child>
+ <widget class="GtkHBox" id="hbox25">
+ <property name="visible">True</property>
+ <property name="homogeneous">False</property>
+ <property name="spacing">2</property>
+
+ <child>
+ <widget class="GtkImage" id="image907">
+ <property name="visible">True</property>
+ <property name="stock">gtk-convert</property>
+ <property name="icon_size">4</property>
+ <property name="xalign">0.5</property>
+ <property name="yalign">0.5</property>
+ <property name="xpad">0</property>
+ <property name="ypad">0</property>
+ </widget>
+ <packing>
+ <property name="padding">0</property>
+ <property name="expand">False</property>
+ <property name="fill">False</property>
+ </packing>
+ </child>
+
+ <child>
+ <widget class="GtkLabel" id="label24">
+ <property name="visible">True</property>
+ <property name="label" translatable="yes">Publish</property>
+ <property name="use_underline">True</property>
+ <property name="use_markup">False</property>
+ <property name="justify">GTK_JUSTIFY_LEFT</property>
+ <property name="wrap">False</property>
+ <property name="selectable">False</property>
+ <property name="xalign">0.5</property>
+ <property name="yalign">0.5</property>
+ <property name="xpad">0</property>
+ <property name="ypad">0</property>
+ <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
+ <property name="width_chars">-1</property>
+ <property name="single_line_mode">False</property>
+ <property name="angle">0</property>
+ </widget>
+ <packing>
+ <property name="padding">0</property>
+ <property name="expand">False</property>
+ <property name="fill">False</property>
+ </packing>
+ </child>
+ </widget>
+ </child>
+ </widget>
+ </child>
+ </widget>
+ <packing>
+ <property name="padding">0</property>
+ <property name="expand">False</property>
+ <property name="fill">False</property>
+ </packing>
+ </child>
+
<child>
<widget class="GtkButton" id="closeButton">
<property name="visible">True</property>
let clean_current_baseuri grafite_status =
try
let baseuri = GrafiteTypes.get_string_option grafite_status "baseuri" in
- let basedir = Helm_registry.get "matita.basedir" in
- LibraryClean.clean_baseuris ~basedir [baseuri]
+ LibraryClean.clean_baseuris [baseuri]
with GrafiteTypes.Option_error _ -> ()
let ask_and_save_moo_if_needed parent fname lexicon_status grafite_status =
- let basedir = Helm_registry.get "matita.basedir" in
let baseuri = DependenciesParser.baseuri_of_script ~include_paths:[] fname in
- let moo_fname = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in
+ let moo_fname = LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:true in
let save () =
let metadata_fname =
- LibraryMisc.metadata_file_of_baseuri ~basedir ~baseuri in
+ LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:true in
let lexicon_fname =
- LibraryMisc.lexicon_file_of_baseuri ~basedir ~baseuri
+ LibraryMisc.lexicon_file_of_baseuri ~baseuri ~writable:true
in
GrafiteMarshal.save_moo moo_fname
grafite_status.GrafiteTypes.moo_content_rev;
(fun () -> MatitamakeLib.clean_development_in_bg refresh d)
in
ignore(clean ())));
+ connect_button develList#publishButton
+ (locker (fun () ->
+ match get_devel_selected () with
+ | None -> ()
+ | Some d ->
+ let clean = locker
+ (fun () -> MatitamakeLib.publish_development_in_bg refresh d)
+ in
+ ignore(clean ())));
connect_button develList#closeButton
(fun () -> develList#toplevel#misc#hide());
ignore(develList#toplevel#event#connect#delete
HLog.set_log_callback self#console#log_callback;
GtkSignal.user_handler :=
(function
- | MatitaScript.ActionCancelled -> ()
+ | MatitaScript.ActionCancelled s -> HLog.error s
| exn ->
if not (Helm_registry.get_bool "matita.debug") then
let floc, msg = MatitaExcPp.to_string exn in
if not (already_configured [Getter;Environment] init_status) then
begin
Http_getter.init ();
+ if Helm_registry.get_bool "matita.system" then
+ Http_getter_storage.activate_system_mode ();
CicEnvironment.set_trust (* environment trust *)
(let trust =
Helm_registry.get_opt_default Helm_registry.get_bool
build
Parameters: name (the name of the development to build, required)
Description: completely builds the develpoment.
+ publish
+ Parameters: name (the name of the development to publish, required)
+ Description: cleans the development in the user space, rebuilds it
+ in the system space ('ro' repositories, that for this operation
+ becames writable).
Notes:
If target is omitted an 'all' will be used as the default.
With -build you can build a development wherever it is.
let parse_cmdline init_status =
if not (already_configured [CmdLine] init_status) then begin
- let includes = ref [ BuildTimeConf.stdlib_dir ] in
+ let includes = ref [
+ BuildTimeConf.stdlib_dir_installed ;
+ BuildTimeConf.stdlib_dir_devel ]
+ in
let args = ref [] in
let add_l l = fun s -> l := s :: !l in
let reduce_verbosity () =
let fill_registry () =
status := fill_registry !status
+;;
+Inversion_principle.init ()
(** raised when one of the script margins (top or bottom) is reached *)
exception Margin
exception NoUnfinishedProof
-exception ActionCancelled
+exception ActionCancelled of string
let safe_substring s i j =
try String.sub s i j with Invalid_argument _ -> assert false
else
f arg
in
- let do_nothing () = raise ActionCancelled in
+ let do_nothing () = raise (ActionCancelled "Inclusion not performed") in
let handle_with_devel d =
let name = MatitamakeLib.name_for_development d in
let title = "Unable to include " ^ what in
begin
match ex with
| TA.Command (_,TA.Set (_,"baseuri",u)) ->
- if not (GrafiteMisc.is_empty u) then
+ if Http_getter_storage.is_read_only u then
+ raise (ActionCancelled ("baseuri " ^ u ^ " is readonly"));
+ if not (Http_getter_storage.is_empty u) then
(match
guistuff.ask_confirmation
~title:"Baseuri redefinition"
"Do you want to redefine the corresponding "^
"part of the library?")
with
- | `YES ->
- let basedir = Helm_registry.get "matita.basedir" in
- LibraryClean.clean_baseuris ~basedir [u]
+ | `YES -> LibraryClean.clean_baseuris [u]
| `NO -> ()
| `CANCEL -> raise MatitaTypes.Cancel)
| _ -> ()
*)
exception NoUnfinishedProof
-exception ActionCancelled
+exception ActionCancelled of string
class type script =
object
| Some grafite_status ->
try
let baseuri = GrafiteTypes.get_string_option grafite_status "baseuri" in
- let basedir = Helm_registry.get "matita.basedir" in
- LibraryClean.clean_baseuris ~basedir ~verbose:false [baseuri];
+ LibraryClean.clean_baseuris ~verbose:false [baseuri];
opt_exit n
with GrafiteTypes.Option_error("baseuri", "not found") ->
(* no baseuri ==> nothing to clean yet *)
MatitaInit.initialize_all ();
(* must be called after init since args are set by cmdline parsing *)
let fname = fname () in
+ let system_mode = Helm_registry.get_bool "matita.system" in
let include_paths =
Helm_registry.get_list Helm_registry.string "matita.includes" in
grafite_status := Some (GrafiteSync.init ());
BuildTimeConf.core_notation_script);
Sys.catch_break true;
let origcb = HLog.get_log_callback () in
+ let origcb t s = origcb t ((if system_mode then "[S] " else "") ^ s) in
let newcb tag s =
match tag with
| `Debug | `Message -> ()
end
else
begin
- let basedir = Helm_registry.get "matita.basedir" in
let baseuri =
DependenciesParser.baseuri_of_script ~include_paths fname in
- let moo_fname = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in
- let lexicon_fname= LibraryMisc.lexicon_file_of_baseuri ~basedir ~baseuri in
+ let moo_fname =
+ LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:true
+ in
+ let lexicon_fname=
+ LibraryMisc.lexicon_file_of_baseuri ~baseuri ~writable:true
+ in
let metadata_fname =
- LibraryMisc.metadata_file_of_baseuri ~basedir ~baseuri
+ LibraryMisc.metadata_file_of_baseuri ~baseuri ~writable:true
in
GrafiteMarshal.save_moo moo_fname moo_content_rev;
LibraryNoDb.save_metadata metadata_fname metadata;
let main () =
let _ = MatitaInit.initialize_all () in
- let basedir = Helm_registry.get "matita.basedir" in
match Helm_registry.get_list Helm_registry.string "matita.args" with
| [ "all" ] ->
LibraryDb.clean_owner_environment ();
- let xmldir = basedir ^ "/xml" in
- let clean_pat =
- String.concat " -o "
- (List.map (fun suf -> "-name \\*" ^ suf) clean_suffixes) in
- let clean_cmd =
- sprintf "find %s \\( %s \\) -exec rm \\{\\} \\; 2> /dev/null"
- xmldir clean_pat in
- ignore (Sys.command clean_cmd);
- ignore
- (Sys.command ("find " ^ xmldir ^
- " -type d -exec rmdir -p {} \\; 2> /dev/null"));
+ let prefixes =
+ HExtlib.filter_map
+ (fun s ->
+ if String.sub s 0 5 = "file:" then
+ Some (Str.replace_first (Str.regexp "^file://") "" s)
+ else
+ None)
+ (Http_getter_storage.list_writable_prefixes ())
+ in
+ List.iter
+ (fun xmldir ->
+ let clean_pat =
+ String.concat " -o "
+ (List.map (fun suf -> "-name \\*" ^ suf) clean_suffixes) in
+ let clean_cmd =
+ sprintf "find %s \\( %s \\) -exec rm \\{\\} \\; 2> /dev/null"
+ xmldir clean_pat in
+ ignore (Sys.command clean_cmd);
+ ignore
+ (Sys.command ("find " ^ xmldir ^
+ " -type d -exec rmdir -p {} \\; 2> /dev/null")))
+ prefixes;
exit 0
| [] -> MatitaInit.die_usage ()
| files ->
in
uri::uris_to_remove) [] files
in
- LibraryClean.clean_baseuris ~basedir uris_to_remove
+ LibraryClean.clean_baseuris uris_to_remove
MatitaInit.load_configuration_file ();
let include_paths =
Helm_registry.get_list Helm_registry.string "matita.includes" in
- let basedir = Helm_registry.get "matita.basedir" in
List.iter
(fun ma_file ->
let ic = open_in ma_file in
DependenciesParser.baseuri_of_script ~include_paths path in
if not (Http_getter_storage.is_legacy baseuri) then
let moo_file =
- LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in
+ LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:false in
Hashtbl.add include_deps ma_file moo_file
with Sys_error _ ->
HLog.warn
| None -> ()
| Some u ->
Hashtbl.add include_deps file
- (LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri:u))
+ (LibraryMisc.obj_file_of_baseuri ~baseuri:u ~writable:false))
uri_deps;
List.iter
(fun ma_file ->
let deps = HExtlib.list_uniq deps in
let deps = ma_file :: deps in
let baseuri = Hashtbl.find baseuri_of ma_file in
- let moo = LibraryMisc.obj_file_of_baseuri ~basedir ~baseuri in
+ let moo = LibraryMisc.obj_file_of_baseuri ~baseuri ~writable:false in
Printf.printf "%s: %s\n" moo (String.concat " " deps);
Printf.printf "%s: %s\n" (Pcre.replace ~pat:"ma$" ~templ:"mo" ma_file) moo)
(Helm_registry.get_list Helm_registry.string "matita.args")
| true -> exit 0
| false -> exit 1
in
+ let publish_dev args =
+ if List.length args <> 1 then !usage ();
+ let name = (List.hd args) in
+ let dev = dev_of_name name in
+ match MK.publish_development dev with
+ | true -> exit 0
+ | false -> exit 1
+ in
let target args =
if List.length args < 1 then !usage ();
let dev = dev_for_dir (Unix.getcwd ()) in
"list", list_dev;
"destroy", destroy_dev;
"build", build_dev;
+ "publish", publish_dev;
]
in
usage := MatitaInit.die_usage;
match args with
| [] -> target [ "all" ]
| s :: tl ->
- try (List.assoc s params) tl
- with Not_found -> if s.[0] = '-' then !usage () else target args
+ let f, args =
+ try
+ (List.assoc s params), tl
+ with Not_found ->
+ if s.[0] = '-' then (!usage ();assert false) else target, args
+ in
+ f args
in
parse (Helm_registry.get_list Helm_registry.string "matita.args")
-
try
Unix.chdir chdir;
let cmd = String.concat " " ("make" :: List.map Filename.quote args) in
- if Helm_registry.get_int "matita.verbosity" > 1 then
- HLog.debug (sprintf "MATITAMAKE: %s" cmd);
let rc = Unix.system cmd in
Unix.chdir old;
match rc with
logger `Warning ("Unix Error: " ^ cmd ^ ": " ^ err);
false
-let call_make development target make =
+let call_make ?matita_flags development target make =
+ let matita_flags =
+ match matita_flags with
+ | None -> (try Sys.getenv "MATITA_FLAGS" with Not_found -> "")
+ | Some s -> s
+ in
rebuild_makefile development;
let makefile = makefile_for_development development in
let nodb =
let flags = flags @ if nodb then ["NODB=true"] else [] in
let flags =
try
- flags @ [ sprintf "MATITA_FLAGS=%s" (Sys.getenv "MATITA_FLAGS") ]
+ flags @ [ sprintf "MATITA_FLAGS=%s" matita_flags ]
with Not_found -> flags in
- make development.root
- (["--no-print-directory"; "-s"; "-k"; "-f"; makefile; target]
- @ flags)
+ let args =
+ ["--no-print-directory"; "-s"; "-k"; "-f"; makefile; target] @ flags
+ in
+(*
+ prerr_endline
+ ("callink make from "^development.root^" args: "^String.concat " " args);
+*)
+ make development.root args
-let build_development ?(target="all") development =
- call_make development target make
+let build_development ?matita_flags ?(target="all") development =
+ call_make ?matita_flags development target make
(* not really good vt100 *)
let vt100 s =
| Unix.Unix_error (_,"read",_)
| Unix.Unix_error (_,"select",_) -> true)
-let build_development_in_bg ?(target="all") refresh_cb development =
- call_make development target (mk_maker refresh_cb)
+let build_development_in_bg ?matita_flags ?(target="all") refresh_cb development =
+ call_make ?matita_flags development target (mk_maker refresh_cb)
;;
-let clean_development development =
- call_make development "clean" make
+let clean_development ?matita_flags development =
+ call_make ?matita_flags development "clean" make
-let clean_development_in_bg refresh_cb development =
- call_make development "clean" (mk_maker refresh_cb)
+let clean_development_in_bg ?matita_flags refresh_cb development =
+ call_make development ?matita_flags "clean" (mk_maker refresh_cb)
let destroy_development_aux development clean_development =
let delete_development development =
end;
delete_development development
-let destroy_development development =
- destroy_development_aux development clean_development
+let destroy_development ?matita_flags development =
+ destroy_development_aux development (clean_development ?matita_flags)
-let destroy_development_in_bg refresh development =
- destroy_development_aux development (clean_development_in_bg refresh)
+let destroy_development_in_bg ?matita_flags refresh development =
+ destroy_development_aux development
+ (clean_development_in_bg refresh ?matita_flags )
let root_for_development development = development.root
let name_for_development development = development.name
+let publish_development_bstract build clean devel =
+ let matita_flags = "\"-system\"" in
+ HLog.message "cleaning the development before publishing";
+ if clean ~matita_flags:"" devel then
+ begin
+ HLog.message "rebuilding the development in 'system' space";
+ if build ~matita_flags devel then
+ begin
+ HLog.message "publishing succeded";
+ true
+ end
+ else
+ begin
+ HLog.error "building process failed, reverting";
+ if not (clean ~matita_flags devel) then
+ HLog.error "cleaning failed, end of the world (2)";
+ false
+ end
+ end
+ else
+ (HLog.error "unable to clean the development, publishing failed"; false)
+
+let publish_development devel =
+ publish_development_bstract
+ (fun ~matita_flags devel -> build_development ~matita_flags devel)
+ (fun ~matita_flags devel -> clean_development ~matita_flags devel) devel
+let publish_development_in_bg cb devel =
+ publish_development_bstract
+ (fun ~matita_flags devel -> build_development_in_bg cb ~matita_flags devel)
+ (fun ~matita_flags devel -> clean_development_in_bg cb ~matita_flags devel)
+ devel
+
* ask matitamake to recorder [dir] as the root for thedevelopment [name] *)
val initialize_development: string -> string -> development option
(* make target [default all] *)
-val build_development: ?target:string -> development -> bool
+val build_development: ?matita_flags:string -> ?target:string -> development -> bool
(* make target [default all], the refresh cb is called after every output *)
val build_development_in_bg:
- ?target:string -> (unit -> unit) -> development -> bool
+ ?matita_flags:string -> ?target:string -> (unit -> unit) -> development -> bool
(* make clean *)
-val clean_development: development -> bool
-val clean_development_in_bg: (unit -> unit) -> development -> bool
+val clean_development: ?matita_flags:string -> development -> bool
+val clean_development_in_bg: ?matita_flags:string -> (unit -> unit) -> development -> bool
+
+val publish_development_in_bg: (unit -> unit) -> development -> bool
+val publish_development: development -> bool
+
(* return the development that handles dir *)
val development_for_dir: string -> development option
(* return the development *)
(* return the known list of name, development_root *)
val list_known_developments: unit -> (string * string ) list
(* cleans the development, forgetting about it *)
-val destroy_development: development -> unit
-val destroy_development_in_bg: (unit -> unit) -> development -> unit
+val destroy_development: ?matita_flags:string -> development -> unit
+val destroy_development_in_bg: ?matita_flags:string -> (unit -> unit) -> development -> unit
(* initiale internal data structures *)
val initialize : unit -> unit
(* gives back the root *)