let map v (cl, b) =
if I.overlaps synth cl && b then v else meta ""
in
- let rec aux = function
- | [] -> []
- | hd :: tl -> if hd = meta "" then aux tl else List.rev (hd :: tl)
+ let rec aux b = function
+ | [] -> b, []
+ | hd :: tl ->
+ if hd = meta "" then aux true tl else b, List.rev (hd :: tl)
in
let args = T.list_rev_map2 map tl classes in
- let args = aux args in
- if args = [] then hd else C.AAppl ("", hd :: args)
+ let b, args = aux false args in
+ if args = [] then b, hd else b, C.AAppl ("", hd :: args)
let mk_convert st ?name sty ety note =
let e = Cn.hole "" in
let synth = mk_synth I.S.empty decurry in
let text = "" (* Printf.sprintf "%u %s" parsno (Cl.to_string h) *) in
let script = List.rev (mk_arg st hd) in
+ let tactic b t n = if b then T.Apply (t, n) else T.Exact (t, n) in
match rc with
| Some (i, j, uri, tyno) ->
let classes2, tl2, _, where = split2_last classes tl in
let qs = proc_bkd_proofs (next st) synth2 names classes2 tl2 in
if List.length qs <> List.length names then
let qs = proc_bkd_proofs (next st) synth [] classes tl in
- let hd = mk_exp_args hd tl classes synth in
- script @ [T.Apply (hd, dtext ^ text); T.Branch (qs, "")]
+ let b, hd = mk_exp_args hd tl classes synth in
+ script @ [tactic b hd (dtext ^ text); T.Branch (qs, "")]
else if is_rewrite_right hd then
script2 @ mk_rewrite st dtext where qs tl2 false what
else if is_rewrite_left hd then
| None ->
let names = get_sub_names hd tl in
let qs = proc_bkd_proofs (next st) synth names classes tl in
- let hd = mk_exp_args hd tl classes synth in
- script @ [T.Apply (hd, dtext ^ text); T.Branch (qs, "")]
+ let b, hd = mk_exp_args hd tl classes synth in
+ script @ [tactic b hd (dtext ^ text); T.Branch (qs, "")]
else
[T.Exact (what, dtext)]
in
let rec count_step a = function
| Note _
- | Statement _
+ | Statement _
+ | Inductive _
+ | Qed _
+(* level 0 *)
+ | Intros (Some 0, [], _)
| Id _
| Exact _
- | Qed _ -> a
- | Branch (pps, _) -> List.fold_left count_steps a pps
- | _ -> succ a
+ | Change _
+ | Clear _
+ | ClearBody _ -> a
+ | Branch (pps, _) -> List.fold_left count_steps a pps
+(* level 1 *)
+ | _ -> succ a
and count_steps a = List.fold_left count_step a
| Note _
| Inductive _
| Statement _
- | Qed _
+ | Qed _
| Id _
| Intros _
| Clear _
-v8Parser.cmi: types.cmo
+gallina8Parser.cmi: types.cmo
+grafiteParser.cmi: types.cmo
grafite.cmi: types.cmo
-engine.cmi:
-types.cmo:
-types.cmx:
-options.cmo:
-options.cmx:
-v8Parser.cmo: types.cmo options.cmo v8Parser.cmi
-v8Parser.cmx: types.cmx options.cmx v8Parser.cmi
-v8Lexer.cmo: v8Parser.cmi options.cmo
-v8Lexer.cmx: v8Parser.cmx options.cmx
+gallina8Parser.cmo: types.cmo options.cmo gallina8Parser.cmi
+gallina8Parser.cmx: types.cmx options.cmx gallina8Parser.cmi
+gallina8Lexer.cmo: options.cmo gallina8Parser.cmi
+gallina8Lexer.cmx: options.cmx gallina8Parser.cmx
+grafiteParser.cmo: types.cmo options.cmo grafiteParser.cmi
+grafiteParser.cmx: types.cmx options.cmx grafiteParser.cmi
+grafiteLexer.cmo: options.cmo grafiteParser.cmi
+grafiteLexer.cmx: options.cmx grafiteParser.cmx
grafite.cmo: types.cmo options.cmo grafite.cmi
grafite.cmx: types.cmx options.cmx grafite.cmi
-engine.cmo: v8Parser.cmi v8Lexer.cmo types.cmo options.cmo grafite.cmi \
- engine.cmi
-engine.cmx: v8Parser.cmx v8Lexer.cmx types.cmx options.cmx grafite.cmx \
- engine.cmi
+engine.cmo: types.cmo options.cmo grafite.cmi gallina8Parser.cmi \
+ gallina8Lexer.cmo engine.cmi
+engine.cmx: types.cmx options.cmx grafite.cmx gallina8Parser.cmx \
+ gallina8Lexer.cmx engine.cmi
top.cmo: options.cmo engine.cmi
top.cmx: options.cmx engine.cmx
-v8Parser.cmi: types.cmx
+gallina8Parser.cmi: types.cmx
+grafiteParser.cmi: types.cmx
grafite.cmi: types.cmx
-engine.cmi:
-types.cmo:
-types.cmx:
-options.cmo:
-options.cmx:
-v8Parser.cmo: types.cmx options.cmx v8Parser.cmi
-v8Parser.cmx: types.cmx options.cmx v8Parser.cmi
-v8Lexer.cmo: v8Parser.cmi options.cmx
-v8Lexer.cmx: v8Parser.cmx options.cmx
+gallina8Parser.cmo: types.cmx options.cmx gallina8Parser.cmi
+gallina8Parser.cmx: types.cmx options.cmx gallina8Parser.cmi
+gallina8Lexer.cmo: options.cmx gallina8Parser.cmi
+gallina8Lexer.cmx: options.cmx gallina8Parser.cmx
+grafiteParser.cmo: types.cmx options.cmx grafiteParser.cmi
+grafiteParser.cmx: types.cmx options.cmx grafiteParser.cmi
+grafiteLexer.cmo: options.cmx grafiteParser.cmi
+grafiteLexer.cmx: options.cmx grafiteParser.cmx
grafite.cmo: types.cmx options.cmx grafite.cmi
grafite.cmx: types.cmx options.cmx grafite.cmi
-engine.cmo: v8Parser.cmi v8Lexer.cmx types.cmx options.cmx grafite.cmi \
- engine.cmi
-engine.cmx: v8Parser.cmx v8Lexer.cmx types.cmx options.cmx grafite.cmx \
- engine.cmi
+engine.cmo: types.cmx options.cmx grafiteParser.cmi grafiteLexer.cmx \
+ grafite.cmi gallina8Parser.cmi gallina8Lexer.cmx engine.cmi
+engine.cmx: types.cmx options.cmx grafiteParser.cmx grafiteLexer.cmx \
+ grafite.cmx gallina8Parser.cmx gallina8Lexer.cmx engine.cmi
top.cmo: options.cmx engine.cmi
top.cmx: options.cmx engine.cmx
REQUIRES = unix str helm-grafite_parser
-MLS = types.ml options.ml v8Parser.ml v8Lexer.ml grafite.ml engine.ml top.ml
-MLIS = v8Parser.mli grafite.mli engine.mli
-CLEAN = v8Parser.ml v8Parser.mli v8Lexer.ml
+MLS = types.ml options.ml \
+ gallina8Parser.ml gallina8Lexer.ml \
+ grafiteParser.ml grafiteLexer.ml \
+ grafite.ml engine.ml top.ml
+MLIS = gallina8Parser.mli grafiteParser.mli grafite.mli engine.mli
+CLEAN = gallina8Parser.ml gallina8Parser.mli gallina8Lexer.ml \
+ grafiteParser.ml grafiteParser.mli grafiteLexer.ml
PACKAGES = CoRN
output_base_uri: string;
input_path: string;
output_path: string;
- input_type: string;
+ input_type: T.input_kind;
output_type: T.output_kind;
+ input_ext: string;
includes: (string * string) list;
coercions: (string * string) list;
files: string list;
let set_files st =
let eof ich = try Some (input_line ich) with End_of_file -> None in
- let trim l = Filename.chop_extension (Str.string_after l 2) in
- let cmd = Printf.sprintf "cd %s && find -name *%s" st.input_path st.input_type in
+ let trim l = Filename.chop_extension (Str.string_after l 2) in
+ let cmd = Printf.sprintf "cd %s && find -name *%s" st.input_path st.input_ext in
let ich = Unix.open_process_in cmd in
let rec aux files =
match eof ich with
let make registry =
let id x = x in
let get_pairs = R.get_list (R.pair id id) in
+ let get_input_type key =
+ match R.get_string key with
+ | "gallina8" -> T.Gallina8, ".v"
+ | "grafite" -> T.Grafite, ".ma"
+ | _ -> failwith "unknown input type"
+ in
let get_output_type key =
match R.get_string key with
| "procedural" -> T.Procedural
| _ -> failwith "unknown output type"
in
load_registry registry;
+ let input_type, input_ext = get_input_type "package.input_type" in
+ let heading_lines = match input_type with
+ | T.Grafite -> 0
+ | _ -> R.get_int "transcript.heading_lines"
+ in
let st = {
heading_path = R.get_string "transcript.heading_path";
- heading_lines = R.get_int "transcript.heading_lines";
+ heading_lines = heading_lines;
input_package = R.get_string "package.input_name";
output_package = R.get_string "package.output_name";
input_base_uri = R.get_string "package.input_base_uri";
output_base_uri = R.get_string "package.output_base_uri";
input_path = R.get_string "package.input_path";
output_path = R.get_string "package.output_path";
- input_type = R.get_string "package.input_type";
+ input_type = input_type;
output_type = get_output_type "package.output_type";
+ input_ext = input_ext;
includes = get_pairs "package.include";
coercions = get_pairs "package.coercion";
files = [];
} in
let st = {st with
heading_path = Filename.concat !O.cwd st.heading_path;
- output_path = Filename.concat !O.cwd st.output_path;
+ input_path = Filename.concat !O.cwd st.input_path;
+ output_path = Filename.concat !O.cwd st.output_path
} in
prerr_endline "reading file names ...";
let st = set_files st in
let contents = List.rev_append items script.contents in
st.scripts.(i) <- {script with name = name; contents = contents}
-let set_heading st name =
+let set_heading st name =
let heading = st.heading_path, st.heading_lines in
set_items st name [T.Heading heading]
| T.Notation (false, _) -> false
| _ -> true
in
+ let get_items = match st.input_type with
+ | T.Gallina8 -> Gallina8Parser.items Gallina8Lexer.token
+ | T.Grafite -> GrafiteParser.items GrafiteLexer.token
+ in
let produce st name =
let in_base_uri = Filename.concat st.input_base_uri name in
let out_base_uri = Filename.concat st.output_base_uri name in
with Not_found -> ()
in
Printf.eprintf "processing file name: %s ...\n" name; flush stderr;
- let file = Filename.concat st.input_path (name ^ st.input_type) in
+ let file = Filename.concat st.input_path (name ^ st.input_ext) in
let ich = open_in file in
let lexbuf = Lexing.from_channel ich in
try
- let items = V8Parser.items V8Lexer.token lexbuf in
- close_in ich;
+ let items = get_items lexbuf in close_in ich;
let _, rev_items = X.list_rev_map_filter_fold filter [] items in
let items = List.rev rev_items in
let local_items, global_items = List.partition partition items in
--- /dev/null
+(* Copyright (C) 2000, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://cs.unibo.it/helm/.
+ *)
+
+{
+ module O = Options
+ module P = Gallina8Parser
+
+ let out t s = if !O.verbose_lexer then prerr_endline (t ^ " " ^ s)
+
+ let check s =
+ let c = Char.code s.[0] in
+ if c <= 127 then s else
+ let escaped = Printf.sprintf "\\%3u\\" c in
+ begin
+ if !O.verbose_escape then Printf.eprintf "ESCAPED: %s %s\n" s escaped;
+ escaped
+ end
+}
+
+let QT = '"'
+let SPC = [' ' '\t' '\n' '\r']+
+let ALPHA = ['A'-'Z' 'a'-'z' '_']
+let FIG = ['0'-'9']
+let ID = ALPHA (ALPHA | FIG | "\'")*
+let RAWID = QT [^ '"']* QT
+let NUM = "-"? FIG+
+
+rule token = parse
+ | "Let" { let s = Lexing.lexeme lexbuf in out "LET" s; P.LET s }
+ | "Remark" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "Lemma" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "Theorem" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "Definition" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "Fixpoint" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "CoFixpoint" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "Qed" { let s = Lexing.lexeme lexbuf in out "QED" s; P.QED s }
+ | "Save" { let s = Lexing.lexeme lexbuf in out "QED" s; P.QED s }
+ | "Defined" { let s = Lexing.lexeme lexbuf in out "QED" s; P.QED s }
+ | "Proof" { let s = Lexing.lexeme lexbuf in out "PRF" s; P.PROOF s }
+ | "Variable" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
+ | "Variables" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
+ | "Hypothesis" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
+ | "Hypotheses" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
+ | "Axiom" { let s = Lexing.lexeme lexbuf in out "AX" s; P.AX s }
+ | "Parameter" { let s = Lexing.lexeme lexbuf in out "AX" s; P.AX s }
+ | "Parameters" { let s = Lexing.lexeme lexbuf in out "AX" s; P.AX s }
+ | "Inductive" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
+ | "CoInductive" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
+ | "Record" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
+ | "Scheme" { let s = Lexing.lexeme lexbuf in out "GEN" s; P.GEN s }
+ | "Section" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
+ | "End" { let s = Lexing.lexeme lexbuf in out "END" s; P.END s }
+ | "Hint" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Hints" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Unset" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Print" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Opaque" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Transparent" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Ltac" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Tactic" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Declare" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "Require" { let s = Lexing.lexeme lexbuf in out "REQ" s; P.REQ s }
+ | "Export" { let s = Lexing.lexeme lexbuf in out "XP" s; P.XP s }
+ | "Import" { let s = Lexing.lexeme lexbuf in out "XP" s; P.XP s }
+ | "Load" { let s = Lexing.lexeme lexbuf in out "LOAD" s; P.LOAD s }
+ | "Set" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "Reset" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "Implicit" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "Delimit" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "Bind" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "Arguments" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "Open" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "V7only" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
+ | "Notation" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
+ | "Reserved" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
+ | "Infix" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
+ | "Grammar" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
+ | "Syntax" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
+ | "Add" SPC "Morphism"
+ { let s = Lexing.lexeme lexbuf in out "MOR" s; P.MOR s }
+ | "Add" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
+ | "Identity" { let s = Lexing.lexeme lexbuf in out "ID" s; P.ID s }
+ | "Coercion" { let s = Lexing.lexeme lexbuf in out "CO" s; P.COERC s }
+ | "let" { let s = Lexing.lexeme lexbuf in out "ABBR" s; P.ABBR s }
+ | "in" { let s = Lexing.lexeme lexbuf in out "IN" s; P.IN s }
+ | "with" { let s = Lexing.lexeme lexbuf in out "WITH" s; P.WITH s }
+ | SPC { let s = Lexing.lexeme lexbuf in out "SPC" s; P.SPC s }
+ | ID { let s = Lexing.lexeme lexbuf in out "ID" s; P.IDENT s }
+ | RAWID { let s = Lexing.lexeme lexbuf in out "STR" s; P.STR s }
+ | NUM { let s = Lexing.lexeme lexbuf in out "INT" s; P.INT s }
+ | ":=" { let s = Lexing.lexeme lexbuf in out "DEF" s; P.DEF s }
+ | ":>" { let s = Lexing.lexeme lexbuf in out "COE" s; P.COE s }
+ | "." ID { let s = Lexing.lexeme lexbuf in out "AC" s; P.AC s }
+ | "." SPC { let s = Lexing.lexeme lexbuf in out "FS" s; P.FS s }
+ | "." eof { let s = Lexing.lexeme lexbuf in out "FS" s; P.FS s }
+ | "(*" { let s = Lexing.lexeme lexbuf in out "OQ" s; P.OQ s }
+ | "*)" { let s = Lexing.lexeme lexbuf in out "CQ" s; P.CQ s }
+ | "(" { let s = Lexing.lexeme lexbuf in out "OP" s; P.OP s }
+ | ")" { let s = Lexing.lexeme lexbuf in out "CP" s; P.CP s }
+ | "{" { let s = Lexing.lexeme lexbuf in out "OC" s; P.OC s }
+ | "}" { let s = Lexing.lexeme lexbuf in out "CC" s; P.CC s }
+ | ";" { let s = Lexing.lexeme lexbuf in out "SC" s; P.SC s }
+ | ":" { let s = Lexing.lexeme lexbuf in out "CN" s; P.CN s }
+ | "," { let s = Lexing.lexeme lexbuf in out "CM" s; P.CM s }
+ | _
+ { let s = check (Lexing.lexeme lexbuf) in out "SPEC" s; P.EXTRA s }
+ | eof { let s = Lexing.lexeme lexbuf in out "EOF" s; P.EOF }
--- /dev/null
+/* Copyright (C) 2000, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://cs.unibo.it/helm/.
+ */
+
+%{
+ module T = Types
+ module O = Options
+
+ let out t s = if !O.verbose_parser then prerr_endline ("-- " ^ t ^ " " ^ s)
+
+ let trim = HExtlib.trim_blanks
+
+ let hd = List.hd
+
+ let cat x = String.concat " " x
+
+ let mk_vars local idents =
+ let kind = if local then T.Var else T.Con in
+ let map ident = T.Inline (local, kind, trim ident, "", None) in
+ List.map map idents
+
+ let strip2 s =
+ String.sub s 1 (String.length s - 2)
+
+ let strip1 s =
+ String.sub s 1 (String.length s - 1)
+
+ let coercion str =
+ [T.Coercion (false, str); T.Coercion (true, str)]
+
+ let notation str =
+ [T.Notation (false, str); T.Notation (true, str)]
+
+ let mk_flavour str =
+ match trim str with
+ | "Remark" -> Some `Remark
+ | "Lemma" -> Some `Lemma
+ | "Theorem" -> Some `Theorem
+ | "Definition" -> Some `Definition
+ | "Fixpoint" -> Some `Definition
+ | "CoFixpoint" -> Some `Definition
+ | "Let" -> Some `Definition
+ | "Scheme" -> Some `Theorem
+ | _ -> assert false
+
+ let mk_morphism ext =
+ let generate s = T.Inline (false, T.Con, ext ^ s, "", Some `Theorem) in
+ List.rev_map generate ["2"; "1"]
+
+%}
+ %token <string> SPC STR IDENT INT EXTRA AC OP CP OC CC OQ CQ DEF FS COE CN SC CM
+ %token <string> WITH ABBR IN LET TH PROOF QED VAR AX IND GEN SEC END UNX REQ XP SET NOT LOAD ID COERC MOR
+ %token EOF
+
+ %start items
+ %type <Types.items> items
+%%
+/* SPACED TOKENS ************************************************************/
+
+ ident: xident spcs { $1 ^ $2 };
+ fs: FS spcs { $1 ^ $2 };
+ mor: MOR spcs { $1 ^ $2 };
+ th: TH spcs { $1 ^ $2 };
+ xlet: LET spcs { $1 ^ $2 };
+ proof: PROOF spcs { $1 ^ $2 };
+ qed: QED spcs { $1 ^ $2 };
+ gen: GEN spcs { $1 ^ $2 };
+ sec: SEC spcs { $1 ^ $2 };
+ xend: END spcs { $1 ^ $2 };
+ unx: UNX spcs { $1 ^ $2 };
+ def: DEF spcs { $1 ^ $2 };
+ op: OP spcs { $1 ^ $2 };
+ abbr: ABBR spcs { $1 ^ $2 };
+ var: VAR spcs { $1 ^ $2 };
+ ax: AX spcs { $1 ^ $2 };
+ req: REQ spcs { $1 ^ $2 };
+ load: LOAD spcs { $1 ^ $2 };
+ xp: XP spcs { $1 ^ $2 };
+ ind: IND spcs { $1 ^ $2 };
+ set: SET spcs { $1 ^ $2 };
+ notation: NOT spcs { $1 ^ $2 };
+ cn: CN spcs { $1 ^ $2 };
+ str: STR spcs { $1 ^ $2 };
+ id: ID spcs { $1 ^ $2 };
+ coerc: COERC spcs { $1 ^ $2 };
+ cm: CM spcs { $1 ^ $2 } | { "" }
+ wh: WITH spcs { $1 ^ $2 };
+
+/* SPACES *******************************************************************/
+
+ comment:
+ | OQ verbatims CQ { $1 ^ $2 ^ $3 }
+ ;
+ spc:
+ | SPC { $1 }
+ | comment { $1 }
+ ;
+ spcs:
+ | { "" }
+ | spc spcs { $1 ^ $2 }
+ ;
+ rspcs:
+ | { "" }
+ | SPC rspcs { $1 ^ $2 }
+ ;
+
+/* IDENTIFIERS **************************************************************/
+
+ outer_keyword:
+ | LET { $1 }
+ | TH { $1 }
+ | VAR { $1 }
+ | AX { $1 }
+ | IND { $1 }
+ | GEN { $1 }
+ | SEC { $1 }
+ | END { $1 }
+ | UNX { $1 }
+ | REQ { $1 }
+ | LOAD { $1 }
+ | SET { $1 }
+ | NOT { $1 }
+ | ID { $1 }
+ | COERC { $1 }
+ | MOR { $1 }
+ ;
+ inner_keyword:
+ | XP { $1 }
+ ;
+ xident:
+ | IDENT { $1 }
+ | outer_keyword { $1 }
+ | WITH { $1 }
+ ;
+ qid:
+ | xident { [$1] }
+ | qid AC { strip1 $2 :: $1 }
+ ;
+ idents:
+ | ident { [$1] }
+ | ident cm idents { $1 :: $3 }
+ ;
+
+/* PLAIN SKIP ***************************************************************/
+
+ plain_skip:
+ | IDENT { $1 }
+ | inner_keyword { $1 }
+ | STR { $1 }
+ | INT { $1 }
+ | COE { $1 }
+ | OC { $1 }
+ | CC { $1 }
+ | SC { $1 }
+ | CN { $1 }
+ | CM { $1 }
+ | EXTRA { $1 }
+ ;
+ inner_skip:
+ | plain_skip { $1 }
+ | outer_keyword { $1 }
+ | AC { $1 }
+ | DEF { $1 }
+ | PROOF { $1 }
+ | QED { $1 }
+ | FS { $1 }
+ | WITH { $1 }
+ ;
+
+/* LEFT SKIP ****************************************************************/
+
+ rlskip:
+ | plain_skip { $1, [] }
+ | abbr li_skips IN { $1 ^ fst $2 ^ $3, snd $2 }
+ | op ocp_skips CP { $1 ^ fst $2 ^ $3, snd $2 }
+ | IN { $1, [] }
+ | CP { $1, [] }
+ ;
+ xlskip:
+ | outer_keyword { $1, [] }
+ | AC { $1, [] }
+ | WITH { $1, [] }
+ | rlskip { $1 }
+ ;
+ xlskips:
+ | xlskip spcs { fst $1 ^ $2, snd $1 }
+ | xlskip spcs xlskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+ lskips:
+ | rlskip spcs { fst $1 ^ $2, snd $1 }
+ | rlskip spcs xlskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+ opt_lskips:
+ | lskips { $1 }
+ | { "", [] }
+ ;
+
+/* GENERAL SKIP *************************************************************/
+
+ rskip:
+ | rlskip { $1 }
+ | DEF { $1, [] }
+ ;
+ xskip:
+ | outer_keyword { $1, [] }
+ | AC { $1, [] }
+ | WITH { $1, [] }
+ | rskip { $1 }
+ ;
+ xskips:
+ | xskip spcs { fst $1 ^ $2, snd $1 }
+ | xskip spcs xskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+ skips:
+ | rskip spcs { fst $1 ^ $2, snd $1 }
+ | rskip spcs xskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+
+/* ABBREVIATION SKIP ********************************************************/
+
+ li_skip:
+ | inner_skip { $1, [] }
+ | abbr li_skips IN { $1 ^ fst $2 ^ $3, snd $2 }
+ | op ocp_skips CP { $1 ^ fst $2 ^ $3, snd $2 }
+ ;
+ li_skips:
+ | li_skip spcs { fst $1 ^ $2, snd $1 }
+ | li_skip spcs li_skips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+
+/* PARENTETIC SKIP **********************************************************/
+
+ ocp_skip:
+ | inner_skip { $1, [] }
+ | abbr li_skips IN { $1 ^ fst $2 ^ $3, snd $2 }
+ | op ocp_skips CP { $1 ^ fst $2 ^ $3, snd $2 }
+ | IN { $1, [] }
+ ;
+ ocp_skips:
+ | ocp_skip spcs { fst $1 ^ $2, snd $1 }
+ | ocp_skip spcs ocp_skips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+
+/* VERBATIM SKIP ************************************************************/
+
+ verbatim:
+ | comment { $1 }
+ | inner_skip { $1 }
+ | ABBR { $1 }
+ | IN { $1 }
+ | OP { $1 }
+ | CP { $1 }
+ | SPC { $1 }
+ ;
+ verbatims:
+ | { "" }
+ | verbatim verbatims { $1 ^ $2 }
+ ;
+
+/* PROOF STEPS **************************************************************/
+
+ step:
+ | macro_step { $1 }
+ | skips FS { snd $1 }
+ ;
+ steps:
+ | step spcs { $1 }
+ | step spcs steps { $1 @ $3 }
+ ;
+ just:
+ | steps qed { $1 }
+ | proof fs steps qed { $3 }
+ | proof skips { snd $2 }
+ | proof wh skips fs steps qed { snd $3 @ $5 }
+ ;
+ macro_step:
+ | th ident opt_lskips def xskips FS
+ { out "TH" $2;
+ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | th ident lskips fs just FS
+ { out "TH" $2;
+ $5 @ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | gen ident def xskips FS
+ { out "TH" $2;
+ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | mor ident cn ident fs just FS
+ { out "MOR" $4; $6 @ mk_morphism (trim $4) }
+ | xlet ident opt_lskips def xskips FS
+ { out "TH" $2;
+ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | xlet ident lskips fs just FS
+ { out "TH" $2;
+ $5 @ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | var idents cn xskips FS
+ { out "VAR" (cat $2); mk_vars true $2 }
+ | ax idents cn xskips FS
+ { out "AX" (cat $2); mk_vars false $2 }
+ | ind ident skips FS
+ { out "IND" $2;
+ T.Inline (false, T.Ind, trim $2, "", None) :: snd $3
+ }
+ | sec ident FS
+ { out "SEC" $2; [T.Section (true, trim $2, $1 ^ $2)] }
+ | xend ident FS
+ { out "END" $2; [T.Section (false, trim $2, $1 ^ $2)] }
+ | unx xskips FS
+ { out "UNX" (fst $2); [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
+ | req xp ident FS
+ { out "REQ" $3; [T.Include (trim $3)] }
+ | req ident FS
+ { out "REQ" $2; [T.Include (trim $2)] }
+ | load str FS
+ { out "REQ" $2; [T.Include (strip2 (trim $2))] }
+ | coerc qid spcs skips FS
+ { out "COERCE" (hd $2); coercion (hd $2) }
+ | id coerc qid spcs skips FS
+ { out "COERCE" (hd $3); coercion (hd $3) }
+ | th ident error
+ { out "ERROR" $2; failwith ("macro_step " ^ $2) }
+ | ind ident error
+ { out "ERROR" $2; failwith ("macro_step " ^ $2) }
+ | var idents error
+ { let vs = cat $2 in
+ out "ERROR" vs; failwith ("macro_step " ^ vs) }
+ | ax idents error
+ { let vs = cat $2 in
+ out "ERROR" vs; failwith ("macro_step " ^ vs) }
+ ;
+
+/* VERNACULAR ITEMS *********************************************************/
+
+ item:
+ | OQ verbatims CQ { [T.Comment $2] }
+ | macro_step { $1 }
+ | set xskips FS { [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
+ | notation xskips FS { notation ($1 ^ fst $2 ^ trim $3) }
+ | error { out "ERROR" "item"; failwith "item" }
+ ;
+ items:
+ | rspcs EOF { [] }
+ | rspcs item items { $2 @ $3 }
+ ;
+
+
+/*
+ oc: OC spcs { $1 ^ $2 };
+ coe: COE spcs { $1 ^ $2 };
+ sc: SC spcs { $1 ^ $2 };
+
+ cnot:
+ | EXTRA { $1 }
+ | INT { $1 }
+ ;
+ cnots:
+ | cnot spcs { $1 ^ $2 }
+ | cnot spcs cnots { $1 ^ $2 ^ $3 }
+ ;
+
+ xstrict:
+ | AC { $1 }
+ | INT { $1 }
+ | EXTRA { $1 }
+ | CN { $1 }
+ | SC { $1 }
+ | OC { $1 }
+ | CC { $1 }
+ | COE { $1 }
+ | STR { $1 }
+ | abbr extends0 IN { $1 ^ $2 ^ $3 }
+ | op extends1 CP { $1 ^ $2 ^ $3 }
+ ;
+ strict:
+ | xstrict { $1 }
+ | IDENT { $1 }
+ | SET { $1 }
+ | NOT { $1 }
+ ;
+ restrict:
+ | strict { $1 }
+ | IN { $1 }
+ | CP { $1 }
+ ;
+ xre:
+ | xstrict { $1 }
+ | IN { $1 }
+ | CP { $1 }
+ ;
+ restricts:
+ | restrict spcs { $1 ^ $2 }
+ | restrict spcs restricts { $1 ^ $2 ^ $3 }
+ ;
+ xres:
+ | xre spcs { $1 ^ $2 }
+ | xre spcs restricts { $1 ^ $2 ^ $3 }
+ ;
+ extend0:
+ | strict { $1 }
+ | DEF { $1 }
+ ;
+ extends0:
+ | extend0 spcs { $1 ^ $2 }
+ | extend0 spcs extends0 { $1 ^ $2 ^ $3 }
+ ;
+ extend1:
+ | strict { $1 }
+ | DEF { $1 }
+ | IN { $1 }
+ ;
+ extends1:
+ | extend1 spcs { $1 ^ $2 }
+ | extend1 spcs extends1 { $1 ^ $2 ^ $3 }
+ ;
+ unexport_ff:
+ | IDENT { $1 }
+ | AC { $1 }
+ | STR { $1 }
+ | OP { $1 }
+ | ABBR { $1 }
+ | IN { $1 }
+ | CP { $1 }
+ | SET { $1 }
+ ;
+ unexport_rr:
+ | unexport_ff { $1 }
+ | INT { $1 }
+ | EXTRA { $1 }
+ | CN { $1 }
+ | COE { $1 }
+ | DEF { $1 }
+ ;
+ unexport_r:
+ | unexport_rr { $1, [] }
+ | oc fields CC { $1 ^ fst $2 ^ $3, snd $2 }
+ | oc unexport_ff CC { $1, [] }
+ ;
+ unexports_r:
+ | unexport_r spcs { fst $1 ^ $2, snd $1 }
+ | unexport_r spcs unexports_r { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+ field:
+ | ident cn unexports_r
+ { $1 ^ $2 ^ fst $3, snd $3 }
+ | ident def unexports_r
+ { $1 ^ $2 ^ fst $3, snd $3 }
+ | ident coe unexports_r
+ { $1 ^ $2 ^ fst $3, snd $3 @ coercion (trim $1) }
+ ;
+ fields:
+ | field { $1 }
+ | field sc fields { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ | cnots { $1, [] }
+ | error { out "ERROR" "fields"; failwith "fields" }
+ ;
+ unexport:
+ | unexport_r { $1 }
+ | SC { $1, [] }
+ | CC { $1, [] }
+ | IP { $1, [] }
+ | LET { $1, [] }
+ | VAR { $1, [] }
+
+ ;
+ unexports:
+ | unexport spcs { fst $1 ^ $2, snd $1 }
+ | unexport spcs unexports { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
+ ;
+ verbatim:
+ | unexport_rr { $1 }
+ | reserved_ident { $1 }
+ | comment { $1 }
+ | OC { $1 }
+ | CC { $1 }
+ | SC { $1 }
+ | XP { $1 }
+ | IP { $1 }
+ | FS { $1 }
+ | SPC { $1 }
+ ;
+ verbatims:
+ | { "" }
+ | verbatim verbatims { $1 ^ $2 }
+ ;
+ step:
+ | macro_step { $1 }
+ | restricts FS { [] }
+ ;
+ steps:
+ | step spcs { [] }
+ | step spcs steps { $1 @ $3 }
+ ;
+
+ macro_step:
+ | th ident restricts fs proof FS steps qed FS
+ { out "TH" $2;
+ $7 @ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | th ident restricts fs proof restricts FS
+ { out "TH" $2;
+ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | th ident restricts fs steps qed FS
+ { out "TH" $2;
+ $5 @ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | th ident restricts def restricts FS
+ { out "TH" $2;
+ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | th ident def restricts FS
+ { out "TH" $2;
+ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | gen ident def restricts FS
+ { out "TH" $2;
+ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | xlet ident restricts fs proof FS steps qed FS
+ { out "LET" $2;
+ $7 @ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | xlet ident restricts fs proof restricts FS
+ { out "LET" $2;
+ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | xlet ident restricts fs steps qed FS
+ { out "LET" $2;
+ $5 @ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | xlet ident restricts def restricts FS
+ { out "LET" $2;
+ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | xlet ident def restricts FS
+ { out "LET" $2;
+ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
+ }
+ | var idents xres FS
+ { out "VAR" (cat $2); mk_vars true $2 }
+ | ax idents xres FS
+ { out "AX" (cat $2); mk_vars false $2 }
+ | ind ident unexports FS
+ { out "IND" $2;
+ T.Inline (false, T.Ind, trim $2, "", None) :: snd $3
+ }
+ | sec ident FS
+ { out "SEC" $2; [T.Section (true, trim $2, $1 ^ $2)] }
+ | xend ident FS
+ { out "END" $2; [T.Section (false, trim $2, $1 ^ $2)] }
+ | unx unexports FS
+ { out "UNX" (fst $2); [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
+ | req xp ident FS
+ { out "REQ" $3; [T.Include (trim $3)] }
+ | req ip ident FS
+ { out "REQ" $3; [T.Include (trim $3)] }
+ | req ident FS
+ { out "REQ" $2; [T.Include (trim $2)] }
+ | load str FS
+ { out "REQ" $2; [T.Include (strip2 (trim $2))] }
+ | coerc qid spcs cn unexports FS
+ { out "COERCE" (hd $2); coercion (hd $2) }
+ | id coerc qid spcs cn unexports FS
+ { out "COERCE" (hd $3); coercion (hd $3) }
+ | th ident error
+ { out "ERROR" $2; failwith ("macro_step " ^ $2) }
+ | ind ident error
+ { out "ERROR" $2; failwith ("macro_step " ^ $2) }
+ | var idents error
+ { let vs = cat $2 in
+ out "ERROR" vs; failwith ("macro_step " ^ vs) }
+ | ax idents error
+ { let vs = cat $2 in
+ out "ERROR" vs; failwith ("macro_step " ^ vs) }
+ ;
+ item:
+ | OQ verbatims CQ { [T.Comment $2] }
+ | macro_step { $1 }
+ | set unexports FS { [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
+ | notation unexports FS { notation ($1 ^ fst $2 ^ trim $3) }
+ | error { out "ERROR" "item"; failwith "item" }
+ ;
+ items:
+ | rspcs EOF { [] }
+ | rspcs item items { $2 @ $3 }
+ ;
+*/
--- /dev/null
+(* Copyright (C) 2000, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://cs.unibo.it/helm/.
+ *)
+
+{
+ module O = Options
+ module P = GrafiteParser
+
+ let out t s = if !O.verbose_lexer then prerr_endline (t ^ " " ^ s)
+}
+
+let SPC = [' ' '\t' '\n' '\r']+
+let ALPHA = ['A'-'Z' 'a'-'z']
+let FIG = ['0'-'9']
+let DECOR = "?" | "'" | "`"
+let IDENT = ALPHA (ALPHA | '_' | FIG )* DECOR*
+
+rule token = parse
+ | "(*" { let s = Lexing.lexeme lexbuf in out "OK" s; P.OK s }
+ | "*)" { let s = Lexing.lexeme lexbuf in out "CK" s; P.CK s }
+ | SPC { let s = Lexing.lexeme lexbuf in out "SPC" s; P.SPC s }
+ | "axiom" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "definition" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "theorem" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "lemma" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+(*
+ | "fact" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+*)
+ | "remark" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "variant" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "inductive" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "coinductive" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "record" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "let rec" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "let corec" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
+ | "notation" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "interpretation" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "alias" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "coercion" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "prefer coercion" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "default" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "include" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "inline" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
+ | "qed" { let s = Lexing.lexeme lexbuf in out "QED" s; P.QED s }
+ | "elim" { let s = Lexing.lexeme lexbuf in out "PS" s; P.PS s }
+ | "apply" { let s = Lexing.lexeme lexbuf in out "PS" s; P.PS s }
+ | "intros" { let s = Lexing.lexeme lexbuf in out "PS" s; P.PS s }
+ | "assume" { let s = Lexing.lexeme lexbuf in out "PS" s; P.PS s }
+ | "the" { let s = Lexing.lexeme lexbuf in out "PS" s; P.PS s }
+ | IDENT { let s = Lexing.lexeme lexbuf in out "ID" s; P.ID s }
+ | "." SPC { let s = Lexing.lexeme lexbuf in out "FS" s; P.FS s }
+ | "." { let s = Lexing.lexeme lexbuf in out "FS" s; P.FS s }
+ | _ { let s = Lexing.lexeme lexbuf in out "RAW" s; P.RAW s }
+ | eof { let s = Lexing.lexeme lexbuf in out "EOF" s; P.EOF }
--- /dev/null
+/* Copyright (C) 2000, HELM Team.
+ *
+ * This file is part of HELM, an Hypertextual, Electronic
+ * Library of Mathematics, developed at the Computer Science
+ * Department, University of Bologna, Italy.
+ *
+ * HELM is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * HELM is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with HELM; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
+ * MA 02111-1307, USA.
+ *
+ * For details, see the HELM World-Wide-Web page,
+ * http://cs.unibo.it/helm/.
+ */
+
+%{
+ module T = Types
+ module O = Options
+
+ let out t s = if !O.verbose_parser then prerr_endline ("-- " ^ t ^ " " ^ s)
+
+ let mk_flavour str =
+ match str with
+ | "record" -> T.Ind, None
+ | "inductive" -> T.Ind, None
+ | "coinductive" -> T.Ind, None
+ | "axiom" -> T.Con, None
+ | "definition" -> T.Con, Some `Definition
+ | "let rec" -> T.Con, Some `Definition
+ | "let corec" -> T.Con, Some `Definition
+ | "theorem" -> T.Con, Some `Theorem
+ | "lemma" -> T.Con, Some `Lemma
+ | "fact" -> T.Con, Some `Fact
+ | "remark" -> T.Con, Some `Remark
+ | "variant" -> T.Con, None
+ | _ -> assert false
+
+%}
+ %token <string> SPC OK CK FS QED TH UNX PS ID RAW
+ %token EOF
+
+ %start items
+ %type <Types.items> items
+%%
+
+/* LINES ITEMS ***************************************************************/
+
+ extra:
+ | ID { $1 }
+ | QED { $1 }
+ | RAW { $1 }
+ ;
+ text:
+ | extra { $1 }
+ | TH { $1 }
+ | UNX { $1 }
+ | PS { $1 }
+ | comment { $1 }
+ ;
+ texts:
+ | { "" }
+ | text texts { $1 ^ $2 }
+ ;
+ line:
+ | texts FS { $1 ^ $2 }
+ ;
+ drop:
+ | extra line { $1 ^ $2 }
+ ;
+ drops:
+ | { "" }
+ | drop drops { $1 ^ $2 }
+ ;
+
+/* SPACE ITEMS *************************************************************/
+
+ verbatim:
+ | text { $1 }
+ | FS { $1 }
+ ;
+ verbatims:
+ | { "" }
+ | verbatim verbatims { $1 ^ $2 }
+ ;
+ comment:
+ | SPC { $1 }
+ | OK verbatims CK { $1 ^ $2 ^ $3 }
+ ;
+
+/* STEPS ********************************************************************/
+
+ step:
+ | comment { $1 }
+ | FS { $1 }
+ | TH { $1 }
+ | UNX { $1 }
+ | PS { $1 }
+ | ID { $1 }
+ | RAW { $1 }
+ ;
+ steps:
+ | QED FS { $1 ^ $2 }
+ | step steps { $1 ^ $2 }
+ ;
+
+/* ITEMS ********************************************************************/
+
+ id:
+ | ID { $1 }
+ | TH { $1 }
+ | UNX { $1 }
+ | PS { $1 }
+ ;
+
+ item:
+ | comment
+ { out "OK" $1; [T.Verbatim $1] }
+ | TH SPC id line drops
+ { out "TH" $3;
+ let a, b = mk_flavour $1 in [T.Inline (true, a, $3, "", b)]
+ }
+ | UNX line drops
+ { out "UNX" $1; [T.Verbatim ($1 ^ $2 ^ $3)] }
+ | PS steps
+ { [] }
+ ;
+ items:
+ | EOF { [] }
+ | item items { $1 @ $2 }
+/* | error { out "ERROR" ""; failwith ("item id " ^ "") } */
+ ;
type inline_kind = Con | Ind | Var
+type input_kind = Gallina8 | Grafite
+
type output_kind = Declarative | Procedural
type source = string
+++ /dev/null
-{
- module O = Options
- module P = V8Parser
-
- let out t s = if !O.verbose_lexer then prerr_endline (t ^ " " ^ s)
-
- let check s =
- let c = Char.code s.[0] in
- if c <= 127 then s else
- let escaped = Printf.sprintf "\\%3u\\" c in
- begin
- if !O.verbose_escape then Printf.eprintf "ESCAPED: %s %s\n" s escaped;
- escaped
- end
-}
-
-let QT = '"'
-let SPC = [' ' '\t' '\n' '\r']+
-let ALPHA = ['A'-'Z' 'a'-'z' '_']
-let FIG = ['0'-'9']
-let ID = ALPHA (ALPHA | FIG | "\'")*
-let RAWID = QT [^ '"']* QT
-let NUM = "-"? FIG+
-
-rule token = parse
- | "Let" { let s = Lexing.lexeme lexbuf in out "LET" s; P.LET s }
- | "Remark" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
- | "Lemma" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
- | "Theorem" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
- | "Definition" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
- | "Fixpoint" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
- | "CoFixpoint" { let s = Lexing.lexeme lexbuf in out "TH" s; P.TH s }
- | "Qed" { let s = Lexing.lexeme lexbuf in out "QED" s; P.QED s }
- | "Save" { let s = Lexing.lexeme lexbuf in out "QED" s; P.QED s }
- | "Defined" { let s = Lexing.lexeme lexbuf in out "QED" s; P.QED s }
- | "Proof" { let s = Lexing.lexeme lexbuf in out "PRF" s; P.PROOF s }
- | "Variable" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
- | "Variables" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
- | "Hypothesis" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
- | "Hypotheses" { let s = Lexing.lexeme lexbuf in out "VAR" s; P.VAR s }
- | "Axiom" { let s = Lexing.lexeme lexbuf in out "AX" s; P.AX s }
- | "Parameter" { let s = Lexing.lexeme lexbuf in out "AX" s; P.AX s }
- | "Parameters" { let s = Lexing.lexeme lexbuf in out "AX" s; P.AX s }
- | "Inductive" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
- | "CoInductive" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
- | "Record" { let s = Lexing.lexeme lexbuf in out "IND" s; P.IND s }
- | "Scheme" { let s = Lexing.lexeme lexbuf in out "GEN" s; P.GEN s }
- | "Section" { let s = Lexing.lexeme lexbuf in out "SEC" s; P.SEC s }
- | "End" { let s = Lexing.lexeme lexbuf in out "END" s; P.END s }
- | "Hint" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Hints" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Unset" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Print" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Opaque" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Transparent" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Ltac" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Tactic" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Declare" { let s = Lexing.lexeme lexbuf in out "UNX" s; P.UNX s }
- | "Require" { let s = Lexing.lexeme lexbuf in out "REQ" s; P.REQ s }
- | "Export" { let s = Lexing.lexeme lexbuf in out "XP" s; P.XP s }
- | "Import" { let s = Lexing.lexeme lexbuf in out "XP" s; P.XP s }
- | "Load" { let s = Lexing.lexeme lexbuf in out "LOAD" s; P.LOAD s }
- | "Set" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "Reset" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "Implicit" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "Delimit" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "Bind" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "Arguments" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "Open" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "V7only" { let s = Lexing.lexeme lexbuf in out "SET" s; P.SET s }
- | "Notation" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
- | "Reserved" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
- | "Infix" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
- | "Grammar" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
- | "Syntax" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
- | "Add" SPC "Morphism"
- { let s = Lexing.lexeme lexbuf in out "MOR" s; P.MOR s }
- | "Add" { let s = Lexing.lexeme lexbuf in out "NOT" s; P.NOT s }
- | "Identity" { let s = Lexing.lexeme lexbuf in out "ID" s; P.ID s }
- | "Coercion" { let s = Lexing.lexeme lexbuf in out "CO" s; P.COERC s }
- | "let" { let s = Lexing.lexeme lexbuf in out "ABBR" s; P.ABBR s }
- | "in" { let s = Lexing.lexeme lexbuf in out "IN" s; P.IN s }
- | "with" { let s = Lexing.lexeme lexbuf in out "WITH" s; P.WITH s }
- | SPC { let s = Lexing.lexeme lexbuf in out "SPC" s; P.SPC s }
- | ID { let s = Lexing.lexeme lexbuf in out "ID" s; P.IDENT s }
- | RAWID { let s = Lexing.lexeme lexbuf in out "STR" s; P.STR s }
- | NUM { let s = Lexing.lexeme lexbuf in out "INT" s; P.INT s }
- | ":=" { let s = Lexing.lexeme lexbuf in out "DEF" s; P.DEF s }
- | ":>" { let s = Lexing.lexeme lexbuf in out "COE" s; P.COE s }
- | "." ID { let s = Lexing.lexeme lexbuf in out "AC" s; P.AC s }
- | "." SPC { let s = Lexing.lexeme lexbuf in out "FS" s; P.FS s }
- | "." eof { let s = Lexing.lexeme lexbuf in out "FS" s; P.FS s }
- | "(*" { let s = Lexing.lexeme lexbuf in out "OQ" s; P.OQ s }
- | "*)" { let s = Lexing.lexeme lexbuf in out "CQ" s; P.CQ s }
- | "(" { let s = Lexing.lexeme lexbuf in out "OP" s; P.OP s }
- | ")" { let s = Lexing.lexeme lexbuf in out "CP" s; P.CP s }
- | "{" { let s = Lexing.lexeme lexbuf in out "OC" s; P.OC s }
- | "}" { let s = Lexing.lexeme lexbuf in out "CC" s; P.CC s }
- | ";" { let s = Lexing.lexeme lexbuf in out "SC" s; P.SC s }
- | ":" { let s = Lexing.lexeme lexbuf in out "CN" s; P.CN s }
- | "," { let s = Lexing.lexeme lexbuf in out "CM" s; P.CM s }
- | _
- { let s = check (Lexing.lexeme lexbuf) in out "SPEC" s; P.EXTRA s }
- | eof { let s = Lexing.lexeme lexbuf in out "EOF" s; P.EOF }
+++ /dev/null
-/* Copyright (C) 2000, HELM Team.
- *
- * This file is part of HELM, an Hypertextual, Electronic
- * Library of Mathematics, developed at the Computer Science
- * Department, University of Bologna, Italy.
- *
- * HELM is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- * HELM is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with HELM; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place - Suite 330, Boston,
- * MA 02111-1307, USA.
- *
- * For details, see the HELM World-Wide-Web page,
- * http://cs.unibo.it/helm/.
- */
-
-%{
- module T = Types
- module O = Options
-
- let out t s = if !O.verbose_parser then prerr_endline ("-- " ^ t ^ " " ^ s)
-
- let trim = HExtlib.trim_blanks
-
- let hd = List.hd
-
- let cat x = String.concat " " x
-
- let mk_vars local idents =
- let kind = if local then T.Var else T.Con in
- let map ident = T.Inline (local, kind, trim ident, "", None) in
- List.map map idents
-
- let strip2 s =
- String.sub s 1 (String.length s - 2)
-
- let strip1 s =
- String.sub s 1 (String.length s - 1)
-
- let coercion str =
- [T.Coercion (false, str); T.Coercion (true, str)]
-
- let notation str =
- [T.Notation (false, str); T.Notation (true, str)]
-
- let mk_flavour str =
- match trim str with
- | "Remark" -> Some `Remark
- | "Lemma" -> Some `Lemma
- | "Theorem" -> Some `Theorem
- | "Definition" -> Some `Definition
- | "Fixpoint" -> Some `Definition
- | "CoFixpoint" -> Some `Definition
- | "Let" -> Some `Definition
- | "Scheme" -> Some `Theorem
- | _ -> assert false
-
- let mk_morphism ext =
- let generate s = T.Inline (false, T.Con, ext ^ s, "", Some `Theorem) in
- List.rev_map generate ["2"; "1"]
-
-%}
- %token <string> SPC STR IDENT INT EXTRA AC OP CP OC CC OQ CQ DEF FS COE CN SC CM
- %token <string> WITH ABBR IN LET TH PROOF QED VAR AX IND GEN SEC END UNX REQ XP SET NOT LOAD ID COERC MOR
- %token EOF
-
- %start items
- %type <Types.items> items
-%%
-/* SPACED TOKENS ************************************************************/
-
- ident: xident spcs { $1 ^ $2 };
- fs: FS spcs { $1 ^ $2 };
- mor: MOR spcs { $1 ^ $2 };
- th: TH spcs { $1 ^ $2 };
- xlet: LET spcs { $1 ^ $2 };
- proof: PROOF spcs { $1 ^ $2 };
- qed: QED spcs { $1 ^ $2 };
- gen: GEN spcs { $1 ^ $2 };
- sec: SEC spcs { $1 ^ $2 };
- xend: END spcs { $1 ^ $2 };
- unx: UNX spcs { $1 ^ $2 };
- def: DEF spcs { $1 ^ $2 };
- op: OP spcs { $1 ^ $2 };
- abbr: ABBR spcs { $1 ^ $2 };
- var: VAR spcs { $1 ^ $2 };
- ax: AX spcs { $1 ^ $2 };
- req: REQ spcs { $1 ^ $2 };
- load: LOAD spcs { $1 ^ $2 };
- xp: XP spcs { $1 ^ $2 };
- ind: IND spcs { $1 ^ $2 };
- set: SET spcs { $1 ^ $2 };
- notation: NOT spcs { $1 ^ $2 };
- cn: CN spcs { $1 ^ $2 };
- str: STR spcs { $1 ^ $2 };
- id: ID spcs { $1 ^ $2 };
- coerc: COERC spcs { $1 ^ $2 };
- cm: CM spcs { $1 ^ $2 } | { "" }
- wh: WITH spcs { $1 ^ $2 };
-
-/* SPACES *******************************************************************/
-
- comment:
- | OQ verbatims CQ { $1 ^ $2 ^ $3 }
- ;
- spc:
- | SPC { $1 }
- | comment { $1 }
- ;
- spcs:
- | { "" }
- | spc spcs { $1 ^ $2 }
- ;
- rspcs:
- | { "" }
- | SPC rspcs { $1 ^ $2 }
- ;
-
-/* IDENTIFIERS **************************************************************/
-
- outer_keyword:
- | LET { $1 }
- | TH { $1 }
- | VAR { $1 }
- | AX { $1 }
- | IND { $1 }
- | GEN { $1 }
- | SEC { $1 }
- | END { $1 }
- | UNX { $1 }
- | REQ { $1 }
- | LOAD { $1 }
- | SET { $1 }
- | NOT { $1 }
- | ID { $1 }
- | COERC { $1 }
- | MOR { $1 }
- ;
- inner_keyword:
- | XP { $1 }
- ;
- xident:
- | IDENT { $1 }
- | outer_keyword { $1 }
- | WITH { $1 }
- ;
- qid:
- | xident { [$1] }
- | qid AC { strip1 $2 :: $1 }
- ;
- idents:
- | ident { [$1] }
- | ident cm idents { $1 :: $3 }
- ;
-
-/* PLAIN SKIP ***************************************************************/
-
- plain_skip:
- | IDENT { $1 }
- | inner_keyword { $1 }
- | STR { $1 }
- | INT { $1 }
- | COE { $1 }
- | OC { $1 }
- | CC { $1 }
- | SC { $1 }
- | CN { $1 }
- | CM { $1 }
- | EXTRA { $1 }
- ;
- inner_skip:
- | plain_skip { $1 }
- | outer_keyword { $1 }
- | AC { $1 }
- | DEF { $1 }
- | PROOF { $1 }
- | QED { $1 }
- | FS { $1 }
- | WITH { $1 }
- ;
-
-/* LEFT SKIP ****************************************************************/
-
- rlskip:
- | plain_skip { $1, [] }
- | abbr li_skips IN { $1 ^ fst $2 ^ $3, snd $2 }
- | op ocp_skips CP { $1 ^ fst $2 ^ $3, snd $2 }
- | IN { $1, [] }
- | CP { $1, [] }
- ;
- xlskip:
- | outer_keyword { $1, [] }
- | AC { $1, [] }
- | WITH { $1, [] }
- | rlskip { $1 }
- ;
- xlskips:
- | xlskip spcs { fst $1 ^ $2, snd $1 }
- | xlskip spcs xlskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
- lskips:
- | rlskip spcs { fst $1 ^ $2, snd $1 }
- | rlskip spcs xlskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
- opt_lskips:
- | lskips { $1 }
- | { "", [] }
- ;
-
-/* GENERAL SKIP *************************************************************/
-
- rskip:
- | rlskip { $1 }
- | DEF { $1, [] }
- ;
- xskip:
- | outer_keyword { $1, [] }
- | AC { $1, [] }
- | WITH { $1, [] }
- | rskip { $1 }
- ;
- xskips:
- | xskip spcs { fst $1 ^ $2, snd $1 }
- | xskip spcs xskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
- skips:
- | rskip spcs { fst $1 ^ $2, snd $1 }
- | rskip spcs xskips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
-
-/* ABBREVIATION SKIP ********************************************************/
-
- li_skip:
- | inner_skip { $1, [] }
- | abbr li_skips IN { $1 ^ fst $2 ^ $3, snd $2 }
- | op ocp_skips CP { $1 ^ fst $2 ^ $3, snd $2 }
- ;
- li_skips:
- | li_skip spcs { fst $1 ^ $2, snd $1 }
- | li_skip spcs li_skips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
-
-/* PARENTETIC SKIP **********************************************************/
-
- ocp_skip:
- | inner_skip { $1, [] }
- | abbr li_skips IN { $1 ^ fst $2 ^ $3, snd $2 }
- | op ocp_skips CP { $1 ^ fst $2 ^ $3, snd $2 }
- | IN { $1, [] }
- ;
- ocp_skips:
- | ocp_skip spcs { fst $1 ^ $2, snd $1 }
- | ocp_skip spcs ocp_skips { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
-
-/* VERBATIM SKIP ************************************************************/
-
- verbatim:
- | comment { $1 }
- | inner_skip { $1 }
- | ABBR { $1 }
- | IN { $1 }
- | OP { $1 }
- | CP { $1 }
- | SPC { $1 }
- ;
- verbatims:
- | { "" }
- | verbatim verbatims { $1 ^ $2 }
- ;
-
-/* PROOF STEPS **************************************************************/
-
- step:
- | macro_step { $1 }
- | skips FS { snd $1 }
- ;
- steps:
- | step spcs { $1 }
- | step spcs steps { $1 @ $3 }
- ;
- just:
- | steps qed { $1 }
- | proof fs steps qed { $3 }
- | proof skips { snd $2 }
- | proof wh skips fs steps qed { snd $3 @ $5 }
- ;
- macro_step:
- | th ident opt_lskips def xskips FS
- { out "TH" $2;
- [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | th ident lskips fs just FS
- { out "TH" $2;
- $5 @ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | gen ident def xskips FS
- { out "TH" $2;
- [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | mor ident cn ident fs just FS
- { out "MOR" $4; $6 @ mk_morphism (trim $4) }
- | xlet ident opt_lskips def xskips FS
- { out "TH" $2;
- [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
- }
- | xlet ident lskips fs just FS
- { out "TH" $2;
- $5 @ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
- }
- | var idents cn xskips FS
- { out "VAR" (cat $2); mk_vars true $2 }
- | ax idents cn xskips FS
- { out "AX" (cat $2); mk_vars false $2 }
- | ind ident skips FS
- { out "IND" $2;
- T.Inline (false, T.Ind, trim $2, "", None) :: snd $3
- }
- | sec ident FS
- { out "SEC" $2; [T.Section (true, trim $2, $1 ^ $2)] }
- | xend ident FS
- { out "END" $2; [T.Section (false, trim $2, $1 ^ $2)] }
- | unx xskips FS
- { out "UNX" (fst $2); [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
- | req xp ident FS
- { out "REQ" $3; [T.Include (trim $3)] }
- | req ident FS
- { out "REQ" $2; [T.Include (trim $2)] }
- | load str FS
- { out "REQ" $2; [T.Include (strip2 (trim $2))] }
- | coerc qid spcs skips FS
- { out "COERCE" (hd $2); coercion (hd $2) }
- | id coerc qid spcs skips FS
- { out "COERCE" (hd $3); coercion (hd $3) }
- | th ident error
- { out "ERROR" $2; failwith ("macro_step " ^ $2) }
- | ind ident error
- { out "ERROR" $2; failwith ("macro_step " ^ $2) }
- | var idents error
- { let vs = cat $2 in
- out "ERROR" vs; failwith ("macro_step " ^ vs) }
- | ax idents error
- { let vs = cat $2 in
- out "ERROR" vs; failwith ("macro_step " ^ vs) }
- ;
-
-/* VERNACULAR ITEMS *********************************************************/
-
- item:
- | OQ verbatims CQ { [T.Comment $2] }
- | macro_step { $1 }
- | set xskips FS { [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
- | notation xskips FS { notation ($1 ^ fst $2 ^ trim $3) }
- | error { out "ERROR" "item"; failwith "item" }
- ;
- items:
- | rspcs EOF { [] }
- | rspcs item items { $2 @ $3 }
- ;
-
-
-/*
- oc: OC spcs { $1 ^ $2 };
- coe: COE spcs { $1 ^ $2 };
- sc: SC spcs { $1 ^ $2 };
-
- cnot:
- | EXTRA { $1 }
- | INT { $1 }
- ;
- cnots:
- | cnot spcs { $1 ^ $2 }
- | cnot spcs cnots { $1 ^ $2 ^ $3 }
- ;
-
- xstrict:
- | AC { $1 }
- | INT { $1 }
- | EXTRA { $1 }
- | CN { $1 }
- | SC { $1 }
- | OC { $1 }
- | CC { $1 }
- | COE { $1 }
- | STR { $1 }
- | abbr extends0 IN { $1 ^ $2 ^ $3 }
- | op extends1 CP { $1 ^ $2 ^ $3 }
- ;
- strict:
- | xstrict { $1 }
- | IDENT { $1 }
- | SET { $1 }
- | NOT { $1 }
- ;
- restrict:
- | strict { $1 }
- | IN { $1 }
- | CP { $1 }
- ;
- xre:
- | xstrict { $1 }
- | IN { $1 }
- | CP { $1 }
- ;
- restricts:
- | restrict spcs { $1 ^ $2 }
- | restrict spcs restricts { $1 ^ $2 ^ $3 }
- ;
- xres:
- | xre spcs { $1 ^ $2 }
- | xre spcs restricts { $1 ^ $2 ^ $3 }
- ;
- extend0:
- | strict { $1 }
- | DEF { $1 }
- ;
- extends0:
- | extend0 spcs { $1 ^ $2 }
- | extend0 spcs extends0 { $1 ^ $2 ^ $3 }
- ;
- extend1:
- | strict { $1 }
- | DEF { $1 }
- | IN { $1 }
- ;
- extends1:
- | extend1 spcs { $1 ^ $2 }
- | extend1 spcs extends1 { $1 ^ $2 ^ $3 }
- ;
- unexport_ff:
- | IDENT { $1 }
- | AC { $1 }
- | STR { $1 }
- | OP { $1 }
- | ABBR { $1 }
- | IN { $1 }
- | CP { $1 }
- | SET { $1 }
- ;
- unexport_rr:
- | unexport_ff { $1 }
- | INT { $1 }
- | EXTRA { $1 }
- | CN { $1 }
- | COE { $1 }
- | DEF { $1 }
- ;
- unexport_r:
- | unexport_rr { $1, [] }
- | oc fields CC { $1 ^ fst $2 ^ $3, snd $2 }
- | oc unexport_ff CC { $1, [] }
- ;
- unexports_r:
- | unexport_r spcs { fst $1 ^ $2, snd $1 }
- | unexport_r spcs unexports_r { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
- field:
- | ident cn unexports_r
- { $1 ^ $2 ^ fst $3, snd $3 }
- | ident def unexports_r
- { $1 ^ $2 ^ fst $3, snd $3 }
- | ident coe unexports_r
- { $1 ^ $2 ^ fst $3, snd $3 @ coercion (trim $1) }
- ;
- fields:
- | field { $1 }
- | field sc fields { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- | cnots { $1, [] }
- | error { out "ERROR" "fields"; failwith "fields" }
- ;
- unexport:
- | unexport_r { $1 }
- | SC { $1, [] }
- | CC { $1, [] }
- | IP { $1, [] }
- | LET { $1, [] }
- | VAR { $1, [] }
-
- ;
- unexports:
- | unexport spcs { fst $1 ^ $2, snd $1 }
- | unexport spcs unexports { fst $1 ^ $2 ^ fst $3, snd $1 @ snd $3 }
- ;
- verbatim:
- | unexport_rr { $1 }
- | reserved_ident { $1 }
- | comment { $1 }
- | OC { $1 }
- | CC { $1 }
- | SC { $1 }
- | XP { $1 }
- | IP { $1 }
- | FS { $1 }
- | SPC { $1 }
- ;
- verbatims:
- | { "" }
- | verbatim verbatims { $1 ^ $2 }
- ;
- step:
- | macro_step { $1 }
- | restricts FS { [] }
- ;
- steps:
- | step spcs { [] }
- | step spcs steps { $1 @ $3 }
- ;
-
- macro_step:
- | th ident restricts fs proof FS steps qed FS
- { out "TH" $2;
- $7 @ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | th ident restricts fs proof restricts FS
- { out "TH" $2;
- [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | th ident restricts fs steps qed FS
- { out "TH" $2;
- $5 @ [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | th ident restricts def restricts FS
- { out "TH" $2;
- [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | th ident def restricts FS
- { out "TH" $2;
- [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | gen ident def restricts FS
- { out "TH" $2;
- [T.Inline (false, T.Con, trim $2, "", mk_flavour $1)]
- }
- | xlet ident restricts fs proof FS steps qed FS
- { out "LET" $2;
- $7 @ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
- }
- | xlet ident restricts fs proof restricts FS
- { out "LET" $2;
- [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
- }
- | xlet ident restricts fs steps qed FS
- { out "LET" $2;
- $5 @ [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
- }
- | xlet ident restricts def restricts FS
- { out "LET" $2;
- [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
- }
- | xlet ident def restricts FS
- { out "LET" $2;
- [T.Inline (true, T.Con, trim $2, "", mk_flavour $1)]
- }
- | var idents xres FS
- { out "VAR" (cat $2); mk_vars true $2 }
- | ax idents xres FS
- { out "AX" (cat $2); mk_vars false $2 }
- | ind ident unexports FS
- { out "IND" $2;
- T.Inline (false, T.Ind, trim $2, "", None) :: snd $3
- }
- | sec ident FS
- { out "SEC" $2; [T.Section (true, trim $2, $1 ^ $2)] }
- | xend ident FS
- { out "END" $2; [T.Section (false, trim $2, $1 ^ $2)] }
- | unx unexports FS
- { out "UNX" (fst $2); [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
- | req xp ident FS
- { out "REQ" $3; [T.Include (trim $3)] }
- | req ip ident FS
- { out "REQ" $3; [T.Include (trim $3)] }
- | req ident FS
- { out "REQ" $2; [T.Include (trim $2)] }
- | load str FS
- { out "REQ" $2; [T.Include (strip2 (trim $2))] }
- | coerc qid spcs cn unexports FS
- { out "COERCE" (hd $2); coercion (hd $2) }
- | id coerc qid spcs cn unexports FS
- { out "COERCE" (hd $3); coercion (hd $3) }
- | th ident error
- { out "ERROR" $2; failwith ("macro_step " ^ $2) }
- | ind ident error
- { out "ERROR" $2; failwith ("macro_step " ^ $2) }
- | var idents error
- { let vs = cat $2 in
- out "ERROR" vs; failwith ("macro_step " ^ vs) }
- | ax idents error
- { let vs = cat $2 in
- out "ERROR" vs; failwith ("macro_step " ^ vs) }
- ;
- item:
- | OQ verbatims CQ { [T.Comment $2] }
- | macro_step { $1 }
- | set unexports FS { [T.Unexport ($1 ^ fst $2 ^ trim $3)] }
- | notation unexports FS { notation ($1 ^ fst $2 ^ trim $3) }
- | error { out "ERROR" "item"; failwith "item" }
- ;
- items:
- | rspcs EOF { [] }
- | rspcs item items { $2 @ $3 }
- ;
-*/
-utf8Macro.cmi:
-utf8MacroTable.cmo:
-utf8MacroTable.cmx:
utf8Macro.cmo: utf8MacroTable.cmo utf8Macro.cmi
utf8Macro.cmx: utf8MacroTable.cmx utf8Macro.cmi
in
PET.mk_tactic (letin_tac ~mk_fresh_name_callback term)
- (** functional part of the "exact" tactic *)
-let exact_tac ~term =
- let exact_tac ~term (proof, goal) =
- (* Assumption: the term bo must be closed in the current context *)
- let (_,metasenv,_subst,_,_, _) = proof in
- let metano,context,ty = CicUtil.lookup_meta goal metasenv in
- let module T = CicTypeChecker in
- let module R = CicReduction in
- let ty_term,u = T.type_of_aux' metasenv context term CicUniv.oblivion_ugraph in
- let b,_ = R.are_convertible context ty_term ty u in (* TASSI: FIXME *)
- if b then
- begin
- let (newproof, metasenv') =
- ProofEngineHelpers.subst_meta_in_proof proof metano term [] in
- (newproof, [])
- end
- else
- raise (PET.Fail (lazy "The type of the provided term is not the one expected."))
- in
- PET.mk_tactic (exact_tac ~term)
+(* FG: exact_tac := apply_tac as in NTactics *)
+let exact_tac ~term = apply_tac ~term
(* not really "primitive" tactics .... *)
--- /dev/null
+DEVEL = library
+
+include ../Makefile.common
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<helm_registry>
+ <section name="package">
+ <key name="input_name">library</key>
+ <key name="output_name">library</key>
+ <key name="input_base_uri">cic:/matita</key>
+ <key name="output_base_uri">cic:/matita/procedural/library</key>
+ <key name="input_path">library</key>
+ <key name="output_path">contribs/procedural/library</key>
+ <key name="input_type">grafite</key>
+ <key name="output_type">procedural</key>
+ </section>
+</helm_registry>
--- /dev/null
+(**************************************************************************)
+(* ___ *)
+(* ||M|| *)
+(* ||A|| A project by Andrea Asperti *)
+(* ||T|| *)
+(* ||I|| Developers: *)
+(* ||T|| The HELM team. *)
+(* ||A|| http://helm.cs.unibo.it *)
+(* \ / *)
+(* \ / This file is distributed under the terms of the *)
+(* v GNU General Public License Version 2 *)
+(* *)
+(**************************************************************************)
+
+(*
+
+default "equality"
+ cic:/Coq/Init/Logic/eq.ind
+ cic:/Coq/Init/Logic/sym_eq.con
+ cic:/Coq/Init/Logic/trans_eq.con
+ cic:/Coq/Init/Logic/eq_ind.con
+ cic:/Coq/Init/Logic/eq_ind_r.con
+ cic:/Coq/Init/Logic/eq_rec.con
+ cic:/Coq/Init/Logic/eq_rec_r.con
+ cic:/Coq/Init/Logic/eq_rect.con
+ cic:/Coq/Init/Logic/eq_rect_r.con
+ cic:/Coq/Init/Logic/f_equal.con
+ cic:/matita/procedural/Coq/preamble/f_equal1.con.
+
+default "true"
+ cic:/Coq/Init/Logic/True.ind.
+default "false"
+ cic:/Coq/Init/Logic/False.ind.
+default "absurd"
+ cic:/Coq/Init/Logic/absurd.con.
+
+interpretation "Coq's leibnitz's equality" 'eq x y = (cic:/Coq/Init/Logic/eq.ind#xpointer(1/1) _ x y).
+
+theorem f_equal1 : \forall A,B:Type.\forall f:A\to B.\forall x,y:A.
+ x = y \to (f y) = (f x).
+ intros.
+ symmetry.
+ apply cic:/Coq/Init/Logic/f_equal.con.
+ assumption.
+qed.
+
+alias id "land" = "cic:/matita/procedural/Coq/Init/Logic/and.ind#xpointer(1/1)".
+
+*)
--- /dev/null
+baseuri=cic:/matita/procedural/library