--- /dev/null
+v8Parser.cmi: types.cmx
+grafite.cmi: types.cmx
+v8Parser.cmo: types.cmx v8Parser.cmi
+v8Parser.cmx: types.cmx v8Parser.cmi
+v8Lexer.cmo: v8Parser.cmi
+v8Lexer.cmx: v8Parser.cmx
+grafite.cmo: types.cmx grafite.cmi
+grafite.cmx: types.cmx grafite.cmi
+engine.cmo: v8Parser.cmi v8Lexer.cmx types.cmx grafite.cmi engine.cmi
+engine.cmx: v8Parser.cmx v8Lexer.cmx types.cmx grafite.cmx engine.cmi
+top.cmo: engine.cmi
+top.cmx: engine.cmx
let get_coercion st str =
try List.assoc str st.coercions with Not_found -> ""
+let make_path path =
+ List.fold_left Filename.concat "" (List.rev path)
+
+let make_prefix path =
+ String.concat "__" (List.rev path) ^ "__"
+
let commit st name =
let i = get_index st name in
let script = st.scripts.(i) in
let produce st name =
let in_base_uri = Filename.concat st.input_base_uri name in
let out_base_uri = Filename.concat st.output_base_uri name in
- let filter = function
- | T.Inline (k, obj) ->
+ let filter path = function
+ | T.Inline (b, k, obj, p) ->
+ let obj, p =
+ if b then Filename.concat (make_path path) obj, make_prefix path
+ else obj, p
+ in
let s = obj ^ G.string_of_inline_kind k in
- Some (T.Inline (k, Filename.concat in_base_uri s))
- | T.Include s ->
+ path, Some (T.Inline (b, k, Filename.concat in_base_uri s, p))
+ | T.Include s ->
begin
- try Some (T.Include (List.assoc s st.requires))
- with Not_found -> None
+ try path, Some (T.Include (List.assoc s st.requires))
+ with Not_found -> path, None
end
- | T.Coercion (b, obj) ->
+ | T.Coercion (b, obj) ->
let str = get_coercion st obj in
let base_uri =
if str <> "" then str else
if b then out_base_uri else in_base_uri
in
let s = obj ^ G.string_of_inline_kind T.Con in
- Some (T.Coercion (b, Filename.concat base_uri s))
- | item -> Some item
+ path, Some (T.Coercion (b, Filename.concat base_uri s))
+ | T.Section (b, id, _) as item ->
+ let path = if b then id :: path else List.tl path in
+ path, Some item
+ | item -> path, Some item
in
Printf.eprintf "processing file name: %s ...\n" name; flush stderr;
let file = Filename.concat st.input_path (name ^ st.script_ext) in
let lexbuf = Lexing.from_channel ich in
try
let items = V8Parser.items V8Lexer.token lexbuf in
- close_in ich;
- let items = List.rev (X.list_rev_map_filter filter items) in
+ close_in ich;
+ let _, rev_items = X.list_rev_map_filter_fold filter [] items in
+ let items = List.rev rev_items in
let local_items, global_items = List.partition partition items in
let comment = T.Line (Printf.sprintf "From %s" name) in
if global_items <> [] then
let coercion value =
command_of_obj (G.Coercion (floc, UM.uri_of_string value, true, 0))
-let inline value =
- command_of_macro (G.Inline (floc, value))
+let inline (uri, prefix) =
+ command_of_macro (G.Inline (floc, uri, prefix))
let commit och items =
+ let trd (_, _, x) = x in
+ let trd_rth (_, _, x, y) = x, y in
let commit = function
| T.Heading heading -> out_preamble och heading
| T.Line line -> out_line_comment och line
| T.Include script -> out_command och (require script)
| T.Coercion specs -> out_command och (coercion (snd specs))
| T.Notation specs -> out_unexported och "NOTATION" (snd specs) (**)
- | T.Inline specs -> out_command och (inline (snd specs))
+ | T.Inline specs -> out_command och (inline (trd_rth specs))
+ | T.Section specs -> out_unexported och "UNEXPORTED" (trd specs)
| T.Comment comment -> out_comment och comment
| T.Unexport unexport -> out_unexported och "UNEXPORTED" unexport
| T.Verbatim verbatim -> out_verbatim och verbatim
type inline_kind = Con | Ind | Var
+type source = string
+
+type prefix = string
+
type item = Heading of (string * int)
| Line of string
| Comment of string
| Include of string
| Coercion of (local * string)
| Notation of (local * string)
- | Inline of (inline_kind * string)
+ | Section of (local * string * string)
+ | Inline of (local * inline_kind * source * prefix)
| Verbatim of string
| Discard of string
let NUM = "-"? FIG+
rule token = parse
- | "Let" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "Let" { let s = Lexing.lexeme lexbuf in out "LET" s; P.LET s }
| "Remark" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
| "Lemma" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
| "Theorem" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
| "Variables" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
| "Hypothesis" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
| "Hypotheses" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
- | "Axiom" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
+ | "Axiom" { let s = Lexing.lexeme lexbuf in out "AX" s; P.AX s }
| "Inductive" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
| "Record" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
| "Section" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "End" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Hint" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Unset" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Print" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Opaque" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Transparent" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Ltac" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Tactic" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "Declare" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
+ | "End" { let s = Lexing.lexeme lexbuf in out "END" s; P.END s }
+ | "Hint" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Unset" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Print" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Opaque" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Transparent" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Ltac" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Tactic" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Declare" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
| "Require" { let s = Lexing.lexeme lexbuf in out "REQ" s; P.REQ s }
| "Export" { let s = Lexing.lexeme lexbuf in out "XP" s; P.XP s }
| "Import" { let s = Lexing.lexeme lexbuf in out "IP" s; P.IP s }
| "Infix" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
| "Identity" { let s = Lexing.lexeme lexbuf in out "ID" s; P.ID s }
| "Coercion" { let s = Lexing.lexeme lexbuf in out "CO" s; P.COERC s }
- | "let" { let s = Lexing.lexeme lexbuf in out "LET" s; P.LET s }
+ | "let" { let s = Lexing.lexeme lexbuf in out "ABBR" s; P.ABBR s }
| "in" { let s = Lexing.lexeme lexbuf in out "IN" s; P.IN s }
| SPC { let s = Lexing.lexeme lexbuf in out "SPC" s; P.SPC s }
| ID { let s = Lexing.lexeme lexbuf in out "ID" s; P.IDENT s }
let cat x = String.concat " " x
- let mk_vars idents =
- let map ident = T.Inline (T.Var, trim ident) in
+ let mk_vars local idents =
+ let map ident = T.Inline (local, T.Var, trim ident, "") in
List.map map idents
let strip2 s =
[T.Notation (false, str); T.Notation (true, str)]
%}
%token <string> SPC STR IDENT INT EXTRA AC OP CP OC CC OQ CQ DEF FS COE CN SC
- %token <string> LET IN TH PROOF QED VAR IND SEC REQ XP IP SET NOT LOAD ID COERC
+ %token <string> ABBR IN LET TH PROOF QED VAR AX IND SEC END UNX REQ XP IP SET NOT LOAD ID COERC
%token EOF
%start items
| { "" }
| SPC rspcs { $1 ^ $2 }
;
+ xident:
+ | IDENT { $1 }
+ | LET { $1 }
+ | TH { $1 }
+ | QED { $1 }
+ | PROOF { $1 }
+ | VAR { $1 }
+ | AX { $1 }
+ | IND { $1 }
+ | SEC { $1 }
+ | END { $1 }
+ | UNX { $1 }
+ | REQ { $1 }
+ | LOAD { $1 }
+ | NOT { $1 }
+ | ID { $1 }
+ | COERC { $1 }
+ ;
fs: FS spcs { $1 ^ $2 };
- ident: IDENT spcs { $1 ^ $2 };
+ ident: xident spcs { $1 ^ $2 };
th: TH spcs { $1 ^ $2 };
+ xlet: LET spcs { $1 ^ $2 };
proof: PROOF spcs { $1 ^ $2 };
qed: QED spcs { $1 ^ $2 };
sec: SEC spcs { $1 ^ $2 };
+ xend: END spcs { $1 ^ $2 };
+ unx: UNX spcs { $1 ^ $2 };
def: DEF spcs { $1 ^ $2 };
op: OP spcs { $1 ^ $2 };
- xlet: LET spcs { $1 ^ $2 };
+ abbr: ABBR spcs { $1 ^ $2 };
var: VAR spcs { $1 ^ $2 };
+ ax: AX spcs { $1 ^ $2 };
req: REQ spcs { $1 ^ $2 };
load: LOAD spcs { $1 ^ $2 };
xp: XP spcs { $1 ^ $2 };
| CC { $1 }
| COE { $1 }
| STR { $1 }
- | xlet extends0 IN { $1 ^ $2 ^ $3 }
+ | abbr extends0 IN { $1 ^ $2 ^ $3 }
| op extends1 CP { $1 ^ $2 ^ $3 }
;
| COE { $1 }
| STR { $1 }
| OP { $1 }
- | LET { $1 }
+ | ABBR { $1 }
| IN { $1 }
| CP { $1 }
| DEF { $1 }
| CC { $1 }
| SC { $1 }
| TH { $1 }
+ | LET { $1 }
| VAR { $1 }
+ | AX { $1 }
| IND { $1 }
| SEC { $1 }
+ | END { $1 }
+ | UNX { $1 }
| REQ { $1 }
| XP { $1 }
| IP { $1 }
;
qid:
- | IDENT { [$1] }
+ | xident { [$1] }
| qid AC { strip1 $2 :: $1 }
;
macro_step:
| th ident restricts fs proof FS steps qed FS
- { out "TH" $2; $7 @ [T.Inline (T.Con, trim $2)] }
+ { out "TH" $2; $7 @ [T.Inline (false, T.Con, trim $2, "")] }
| th ident restricts fs proof restricts FS
- { out "TH" $2; [T.Inline (T.Con, trim $2)] }
+ { out "TH" $2; [T.Inline (false, T.Con, trim $2, "")] }
| th ident restricts fs steps qed FS
- { out "TH" $2; $5 @ [T.Inline (T.Con, trim $2)] }
+ { out "TH" $2; $5 @ [T.Inline (false, T.Con, trim $2, "")] }
| th ident restricts def restricts FS
- { out "TH" $2; [T.Inline (T.Con, trim $2)] }
+ { out "TH" $2; [T.Inline (false, T.Con, trim $2, "")] }
| th ident def restricts FS
- { out "TH" $2; [T.Inline (T.Con, trim $2)] }
+ { out "TH" $2; [T.Inline (false, T.Con, trim $2, "")] }
+ | xlet ident restricts fs proof FS steps qed FS
+ { out "LET" $2; $7 @ [T.Inline (true, T.Con, trim $2, "")] }
+ | xlet ident restricts fs proof restricts FS
+ { out "LET" $2; [T.Inline (true, T.Con, trim $2, "")] }
+ | xlet ident restricts fs steps qed FS
+ { out "LET" $2; $5 @ [T.Inline (true, T.Con, trim $2, "")] }
+ | xlet ident restricts def restricts FS
+ { out "LET" $2; [T.Inline (true, T.Con, trim $2, "")] }
+ | xlet ident def restricts FS
+ { out "LET" $2; [T.Inline (true, T.Con, trim $2, "")] }
| var idents xres FS
- { out "VAR" (cat $2); mk_vars $2 }
+ { out "VAR" (cat $2); mk_vars true $2 }
+ | ax idents xres FS
+ { out "AX" (cat $2); mk_vars false $2 }
| ind ident unexports FS
- { out "IND" $2; T.Inline (T.Ind, trim $2) :: snd $3 }
- | sec unexports FS
- { out "UNX" (fst $2); [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
+ { out "IND" $2; T.Inline (false, T.Ind, trim $2, "") :: snd $3 }
+ | sec ident FS
+ { out "SEC" $2; [T.Section (true, trim $2, $1 ^ $2)] }
+ | xend ident FS
+ { out "END" $2; [T.Section (false, trim $2, $1 ^ $2)] }
+ | unx unexports FS
+ { out "UNX" (fst $2); [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
| req xp ident FS
- { out "REQ" $3; [T.Include (trim $3)] }
+ { out "REQ" $3; [T.Include (trim $3)] }
| req ip ident FS
- { out "REQ" $3; [T.Include (trim $3)] }
+ { out "REQ" $3; [T.Include (trim $3)] }
| req ident FS
- { out "REQ" $2; [T.Include (trim $2)] }
+ { out "REQ" $2; [T.Include (trim $2)] }
| load str FS
- { out "REQ" $2; [T.Include (strip2 (trim $2))] }
+ { out "REQ" $2; [T.Include (strip2 (trim $2))] }
| coerc qid spcs cn unexports FS
- { out "COERCE" (hd $2); coercion (hd $2) }
+ { out "COERCE" (hd $2); coercion (hd $2) }
| id coerc qid spcs cn unexports FS
- { out "COERCE" (hd $3); coercion (hd $3) }
+ { out "COERCE" (hd $3); coercion (hd $3) }
| th ident error
- { out "ERROR" $2; failwith ("macro_step " ^ $2) }
+ { out "ERROR" $2; failwith ("macro_step " ^ $2) }
| ind ident error
- { out "ERROR" $2; failwith ("macro_step " ^ $2) }
+ { out "ERROR" $2; failwith ("macro_step " ^ $2) }
| var idents error
{ let vs = cat $2 in
- out "ERROR" vs; failwith ("macro_step " ^ vs) }
+ out "ERROR" vs; failwith ("macro_step " ^ vs) }
+ | ax idents error
+ { let vs = cat $2 in
+ out "ERROR" vs; failwith ("macro_step " ^ vs) }
;
item:
| OQ verbatims CQ { [T.Comment $2] }
in
aux [] l
+let list_rev_map_filter_fold f v l =
+ let rec aux v a = function
+ | [] -> v, a
+ | hd :: tl ->
+ begin match f v hd with
+ | v, None -> aux v a tl
+ | v, Some b -> aux v (b :: a) tl
+ end
+ in
+ aux v [] l
+
let list_concat ?(sep = []) =
let rec aux acc =
function
?eq:('a->'a->bool) -> 'a list -> 'a list (** uniq unix filter on lists *)
val filter_map: ('a -> 'b option) -> 'a list -> 'b list (** filter + map *)
val list_rev_map_filter: ('a -> 'b option) -> 'a list -> 'b list
+val list_rev_map_filter_fold: ('c -> 'a -> 'c * 'b option) -> 'c -> 'a list -> 'c * 'b list
val list_concat: ?sep:'a list -> 'a list list -> 'a list (**String.concat-like*)
val list_findopt: ('a -> 'b option) -> 'a list -> 'b option
val flatten_map: ('a -> 'b list) -> 'a list -> 'b list
(* real macros *)
| Check of loc * 'term
| Hint of loc
- | Inline of loc * string (* the string is a URI or a base-uri *)
+ | Inline of loc * string * string (* URI or base-uri, name prefix *)
(** To be increased each time the command type below changes, used for "safe"
* marshalling *)
(* real macros *)
| Check (_, term) -> sprintf "check %s" (term_pp term)
| Hint _ -> "hint"
- | Inline (_,suri) -> sprintf "inline \"%s\"" suri
+ | Inline (_, suri, "") -> sprintf "inline \"%s\"" suri
+ | Inline (_, suri, prefix) -> sprintf "inline \"%s\" \"%s\"" suri prefix
let pp_associativity = function
| Gramext.LeftA -> "left associative"
Tactics.cut ~mk_fresh_name_callback:(namer_of names) term
| GrafiteAst.Decompose (_, types, what, names) ->
let to_type = function
- | GrafiteAst.Type (uri, typeno) -> uri, typeno
+ | GrafiteAst.Type (uri, typeno) -> uri, Some typeno
| GrafiteAst.Ident _ -> assert false
in
let user_types = List.rev_map to_type types in
macro: [
[ [ IDENT "check" ]; t = term ->
GrafiteAst.Check (loc, t)
- | [ IDENT "inline"]; suri = QSTRING ->
- GrafiteAst.Inline (loc,suri)
+ | [ IDENT "inline"]; suri = QSTRING; prefix = OPT QSTRING ->
+ let prefix = match prefix with None -> "" | Some prefix -> prefix in
+ GrafiteAst.Inline (loc,suri,prefix)
| [ IDENT "hint" ] -> GrafiteAst.Hint loc
| [ IDENT "whelp"; "match" ] ; t = term ->
GrafiteAst.WMatch (loc,t)
module F = FreshNamesGenerator
module E = ProofEngineTypes
module H = ProofEngineHelpers
+module R = ReductionTactics
+
+(*
+(* search in term the Inductive Types and return a list of uris as triples like this: (uri,typeno,exp_named_subst) *)
+let search_inductive_types ty =
+ let rec aux types = function
+ | C.MutInd (uri, typeno, _) when (not (List.mem (uri, typeno) types)) ->
+ (uri, typeno) :: types
+ | C.Appl applist -> List.fold_left aux types applist
+ | _ -> types
+ in
+ aux [] ty
+(* N.B: in un caso tipo (and A forall C:Prop.(or B C)) l'or *non* viene selezionato! *)
+*)
(* unexported tactics *******************************************************)
+type type_class = Other
+ | Ind
+ | Con of C.lazy_term
+
+let premise_pattern what = None, [what, C.Implicit (Some `Hole)], None
+
let rec scan_tac ~old_context_length ~index ~tactic =
let scan_tac status =
let (proof, goal) = status in
in
try E.apply_tactic tac status
with E.Fail _ -> aux (pred index)
- in aux (index + context_length - old_context_length - 1)
+ in aux (index + context_length - old_context_length)
in
E.mk_tactic scan_tac
-let rec check_inductive_types types = function
- | C.MutInd (uri, typeno, _) -> List.mem (uri, typeno) types
- | C.Appl (hd :: tl) -> check_inductive_types types hd
- | _ -> false
+let rec check_types types = function
+ | C.MutInd (uri, typeno, _) ->
+ if List.mem (uri, Some typeno) types then Ind else Other
+ | C.Const (uri, _) as t ->
+ if List.mem (uri, None) types then Con (E.const_lazy_term t) else Other
+ | C.Appl (hd :: tl) -> check_types types hd
+ | _ -> Other
-let elim_clear_tac ~mk_fresh_name_callback ~types ~what =
- let elim_clear_tac status =
+let elim_clear_unfold_tac ~mk_fresh_name_callback ~types ~what =
+ let elim_clear_unfold_tac status =
let (proof, goal) = status in
let _, metasenv, _, _ = proof in
let _, context, _ = CicUtil.lookup_meta goal metasenv in
let index, ty = H.lookup_type metasenv context what in
- if check_inductive_types types ty then
- let tac = T.then_ ~start:(P.elim_intros_tac ~mk_fresh_name_callback (C.Rel index))
- ~continuation:(S.clear [what])
- in
- E.apply_tactic tac status
- else raise (E.Fail (lazy "unexported elim_clear: not an eliminable type"))
+ match check_types types ty with
+ | Ind ->
+ let tac = T.then_ ~start:(P.elim_intros_tac ~mk_fresh_name_callback (C.Rel index))
+ ~continuation:(S.clear [what])
+ in
+ E.apply_tactic tac status
+ | Con t ->
+ let tac = R.unfold_tac (Some t) ~pattern:(premise_pattern what) in
+ E.apply_tactic tac status
+ | Other ->
+ raise (E.Fail (lazy "unexported elim_clear: not an eliminable type"))
in
- E.mk_tactic elim_clear_tac
+ E.mk_tactic elim_clear_unfold_tac
(* elim type ****************************************************************)
(** debugging print *)
let warn s = debug_print (lazy ("DECOMPOSE: " ^ (Lazy.force s)))
-(* search in term the Inductive Types and return a list of uris as triples like this: (uri,typeno,exp_named_subst) *)
-let search_inductive_types ty =
- let rec aux types = function
- | C.MutInd (uri, typeno, _) when (not (List.mem (uri, typeno) types)) ->
- (uri, typeno) :: types
- | C.Appl applist -> List.fold_left aux types applist
- | _ -> types
- in
- aux [] ty
-(* N.B: in un caso tipo (and A forall C:Prop.(or B C)) l'or *non* viene selezionato! *)
-
(* roba seria ------------------------------------------------------------- *)
let decompose_tac ?(mk_fresh_name_callback = F.mk_fresh_name ~subst:[])
let _, metasenv,_,_ = proof in
let _, context, _ = CicUtil.lookup_meta goal metasenv in
let types = List.rev_append user_types (FwdQueries.decomposables dbd) in
- let tactic = elim_clear_tac ~mk_fresh_name_callback ~types in
+ let tactic = elim_clear_unfold_tac ~mk_fresh_name_callback ~types in
let old_context_length = List.length context in
let tac = match what with
| Some what ->
val decompose_tac:
?mk_fresh_name_callback:ProofEngineTypes.mk_fresh_name_type ->
- ?user_types:((UriManager.uri * int) list) ->
+ ?user_types:((UriManager.uri * int option) list) ->
?what:string -> dbd:HMysql.dbd -> ProofEngineTypes.tactic
let t' =
match t with
| None -> None
- | Some t -> Some (fun _ m u -> t, m, u)
+ | Some t -> Some (const_lazy_term t)
in
t',[],Some (Cic.Implicit (Some `Hole))
+(* GENERATED FILE, DO NOT EDIT. STAMP:Tue Nov 28 11:13:28 CET 2006 *)
val absurd : term:Cic.term -> ProofEngineTypes.tactic
val apply : term:Cic.term -> ProofEngineTypes.tactic
val applyS :
dbd:HMysql.dbd ->
- term:Cic.term ->
- params:(string * string) list ->
- universe:Universe.universe ->
- ProofEngineTypes.tactic
+ term:Cic.term ->
+ params:(string * string) list ->
+ universe:Universe.universe -> ProofEngineTypes.tactic
val assumption : ProofEngineTypes.tactic
val auto :
- params:(string * string) list ->
- dbd:HMysql.dbd ->
- universe:Universe.universe ->
- ProofEngineTypes.tactic
+ params:(string * string) list ->
+ dbd:HMysql.dbd -> universe:Universe.universe -> ProofEngineTypes.tactic
val change :
pattern:ProofEngineTypes.lazy_pattern ->
Cic.lazy_term -> ProofEngineTypes.tactic
Cic.term -> ProofEngineTypes.tactic
val decompose :
?mk_fresh_name_callback:ProofEngineTypes.mk_fresh_name_type ->
- ?user_types:(UriManager.uri * int) list ->
+ ?user_types:(UriManager.uri * int option) list ->
?what:string -> dbd:HMysql.dbd -> ProofEngineTypes.tactic
val demodulate : dbd:HMysql.dbd ->
universe:Universe.universe -> ProofEngineTypes.tactic
| None -> None
| Some str ->
match CicUtil.term_of_uri (UriManager.uri_of_string str) with
- | Cic.MutInd (uri, typeno, _) -> Some (uri, typeno)
+ | Cic.MutInd (uri, typeno, _) -> Some (uri, Some typeno)
+ | Cic.Const (uri, _) -> Some (uri, None)
| _ ->
raise (UriManager.IllFormedUri str)
in
let query = Printf.sprintf "SELECT %s FROM %s" select from in
let decomposables = HMysql.map ~f:map (HMysql.exec dbd query) in
filter_map_n (fun _ x -> x) 0 decomposables
-
*)
val fwd_simpl: dbd:HMysql.dbd -> Cic.term -> UriManager.uri list
-val decomposables: dbd:HMysql.dbd -> (UriManager.uri * int) list
+val decomposables: dbd:HMysql.dbd -> (UriManager.uri * int option) list