parser.cmi: ast.cmo
+parserTHF.cmi: astTHF.cmo
tptp2grafite.cmi:
ast.cmo:
ast.cmx:
lexer.cmo: parser.cmi
lexer.cmx: parser.cmx
+astTHF.cmo:
+astTHF.cmx:
+lexerTHF.cmo: parserTHF.cmi
+lexerTHF.cmx: parserTHF.cmx
parser.cmo: ast.cmo parser.cmi
parser.cmx: ast.cmx parser.cmi
+parserTHF.cmo: astTHF.cmo parserTHF.cmi
+parserTHF.cmx: astTHF.cmx parserTHF.cmi
tptp2grafite.cmo: parser.cmi lexer.cmo ast.cmo tptp2grafite.cmi
tptp2grafite.cmx: parser.cmx lexer.cmx ast.cmx tptp2grafite.cmi
PACKAGE = tptp_grafite
PREDICATES =
-INTERFACE_FILES= parser.mli tptp2grafite.mli
+INTERFACE_FILES= parser.mli parserTHF.mli tptp2grafite.mli
-IMPLEMENTATION_FILES = ast.ml lexer.ml $(INTERFACE_FILES:%.mli=%.ml)
+IMPLEMENTATION_FILES = ast.ml lexer.ml astTHF.ml lexerTHF.ml $(INTERFACE_FILES:%.mli=%.ml)
EXTRA_OBJECTS_TO_INSTALL =
EXTRA_OBJECTS_TO_CLEAN =
TPTPDIR= /home/$(USER)/work-area/TPTP-v3.2.0/
-all: tptp2grafite
+all: tptp2grafite mainTHF
clean: clean_tests
clean_tests:
rm -f tptp2grafite
-parser.mli parser.ml:parser.mly
- ocamlyacc parser.mly
-lexer.ml:
- ocamllex lexer.mll
+%.mli %.ml: %.mly
+ ocamlyacc $*.mly
+%.ml:%.mll
+ ocamllex $*.mll
LOCAL_LINKOPTS = -package helm-$(PACKAGE) -linkpkg
tptp2grafite: main.ml tptp_grafite.cma
@echo " OCAMLC $<"
@$(OCAMLC) $(LOCAL_LINKOPTS) -o $@ $<
-test: tptp2grafite
+mainTHF: mainTHF.ml tptp_grafite.cma
+ @echo " OCAMLC $<"
+ @$(OCAMLC) $(LOCAL_LINKOPTS) -o $@ $<
+
+test: tptp2grafite mainTHF
testall: tptp2grafite
for X in `cat unit_equality_problems`; do\
--- /dev/null
+type role =
+ Axiom
+| Hypothesis
+| Definition
+| Assumption
+| Lemma
+| Theorem
+| Conjecture
+| Negated_conjecture
+| Lemma_conjecture
+| Plain
+| Fi_domain
+| Fi_functors
+| Fi_predicates
+| Type
+| Unknown
+
+
+type ast =
+ | Formula of string * role * CicNotationPt.term
+ | Comment of string
+ | Inclusion of string * (string list)
--- /dev/null
+{
+ open ParserTHF
+ exception BadToken of string
+
+ let incr_linenum lexbuf =
+ let pos = lexbuf.Lexing.lex_curr_p in
+ lexbuf.Lexing.lex_curr_p <- { pos with
+ Lexing.pos_lnum = pos.Lexing.pos_lnum + 1;
+ Lexing.pos_bol = pos.Lexing.pos_cnum;
+ }
+ ;;
+
+}
+
+let dust = ' ' | '\t'
+let comment = '%' [^ '\n' ] * '\n'
+let lname =
+ ['a'-'z'] ['a'-'z''A'-'Z''0'-'9''_']*
+let uname =
+ ['A'-'Z'] ['a'-'z''A'-'Z''0'-'9''_']*
+let qstring = ''' [^ ''' ]+ '''
+let type_ =
+ "axiom" | "hypothesis" | "definition" | "lemma" | "theorem" |
+ "conjecture" | "lemma_conjecture" | "negated_conjecture" |
+ "plain" | "unknown" | "type"
+
+let ieq = "="
+let peq = "equal"
+let nieq = "!="
+
+rule yylex = parse
+ | dust { yylex lexbuf }
+ | '\n' { incr_linenum lexbuf; yylex lexbuf }
+ | comment { incr_linenum lexbuf; COMMENT (Lexing.lexeme lexbuf) }
+ | "include" { INCLUSION }
+ | type_ { TYPE (Lexing.lexeme lexbuf) }
+ | "thf" { THF }
+ | "$true" { TRUE }
+ | "$false" { FALSE }
+ | "equal" { PEQ }
+
+ | "$i" { TYPE_I }
+ | "$o" { TYPE_O }
+ | "$tType" { TYPE_U }
+ | ">" { TO }
+
+ | lname { LNAME (Lexing.lexeme lexbuf) }
+ | uname { UNAME (Lexing.lexeme lexbuf) }
+
+ | ['0' - '9']+ { NUM (int_of_string (Lexing.lexeme lexbuf)) }
+ | ',' { COMMA }
+ | '.' { DOT }
+ | '(' { LPAREN }
+ | ')' { RPAREN }
+ | '|' { IOR }
+ | '&' { IAND }
+ | '~' { NOT }
+ | '=' { IEQ }
+ | "=>" { IMPLY }
+ | "!=" { NIEQ }
+ | "!" { FORALL }
+ | "?" { EXISTS }
+ | "!" { FORALL }
+ | "^" { LAMBDA }
+ | "[" { BEGINVARLIST }
+ | "]" { ENDVARLIST }
+ | ":" { COLON }
+ | "," { COMMA }
+ | "@" { APPLY }
+ | qstring { QSTRING (Lexing.lexeme lexbuf) }
+ | eof { EOF }
+ | _ { raise (BadToken (Lexing.lexeme lexbuf)) }
+
+{ (* trailer *) }
--- /dev/null
+(* OPTIONS *)
+let tptppath = ref "./";;
+let ng = ref false;;
+let spec = [
+ ("-ng",Arg.Set ng,"Matita ng syntax");
+ ("-tptppath",
+ Arg.String (fun x -> tptppath := x),
+ "Where to find the Axioms/ and Problems/ directory")
+]
+
+(* MAIN *)
+let _ =
+ let usage = "Usage: tptpTHF2grafite [options] file" in
+ let inputfile = ref "" in
+ Arg.parse spec (fun s -> inputfile := s) usage;
+ if !inputfile = "" then
+ begin
+ prerr_endline usage;
+ exit 1
+ end;
+ let lexbuf = Lexing.from_channel (open_in !inputfile) in
+ List.iter
+ (function
+ | AstTHF.Comment _ -> ()
+ | AstTHF.Inclusion _ -> ()
+ | AstTHF.Formula(name,role,term) ->
+ prerr_endline (name ^ "===" ^ CicNotationPp.pp_term term)
+
+ )
+ (ParserTHF.main LexerTHF.yylex lexbuf);
+ exit 0
+
--- /dev/null
+%{
+ (* header *)
+ open AstTHF
+ open Parsing
+ open Lexing
+ module T = CicNotationPt
+
+ let parse_error s = Printf.eprintf "%s: " s ;;
+ let rm_q s = String.sub s 1 (String.length s - 2) ;;
+
+%}
+ %token <string> TYPE
+ %token <string> COMMENT
+ %token <int> NUM
+ %token <string> LNAME
+ %token <string> UNAME
+ %token <string> QSTRING
+ %token COMMA
+ %token INCLUSION
+ %token LPAREN
+ %token RPAREN
+ %token CNF
+ %token TRUE
+ %token FALSE
+ %token NOT
+ %token IAND
+ %token IOR
+ %token NIEQ
+ %token IEQ
+ %token IMPLY
+ %token PEQ
+ %token DOT
+ %token EOF
+ %token FORALL
+ %token EXISTS
+ %token LAMBDA
+ %token COLON
+ %token BEGINVARLIST
+ %token ENDVARLIST
+ %token APPLY
+ %token TYPE_I
+ %token TYPE_O
+ %token TYPE_U
+ %token TO
+ %token THF
+
+ %left IOR
+ %left IAND
+ %nonassoc NOT
+ %right TO
+ %left APPLY
+
+ %type <AstTHF.ast list> main
+ %start main
+%%
+ main:
+ | tptp_input EOF {[$1]}
+ | tptp_input main {$1::$2}
+ | error {
+ let start_pos = rhs_start_pos 1 in
+ let end_pos = rhs_end_pos 1 in
+ Printf.eprintf "from line %d char %d to line %d char %d\n"
+ start_pos.pos_lnum (start_pos.pos_cnum - start_pos.pos_bol)
+ end_pos.pos_lnum (end_pos.pos_cnum - end_pos.pos_bol);
+ exit 1
+ }
+ ;
+
+ tptp_input:
+ | annot_formula {$1}
+ | include_ {$1}
+ | comment {$1}
+ ;
+
+ formula_source_and_infos:
+ | { () }
+ | COMMA { assert false }
+ ;
+
+ annot_formula:
+ | THF LPAREN
+ name COMMA formula_type COMMA term formula_source_and_infos
+ RPAREN DOT {
+ Formula($3,$5,$7)
+ }
+ ;
+
+ formula_type:
+ | TYPE {
+ match $1 with
+ | "axiom" -> Axiom
+ | "hypothesis" -> Hypothesis
+ | "definition" -> Definition
+ | "lemma" -> Lemma
+ | "theorem" -> Theorem
+ | "conjecture" -> Conjecture
+ | "lemma_conjecture" -> Lemma_conjecture
+ | "negated_conjecture" -> Negated_conjecture
+ | "plain" -> Plain
+ | "unknown" -> Unknown
+ | "type" -> Type
+ | _ -> assert false
+ }
+ ;
+
+ quantifier :
+ | FORALL {`Forall}
+ | EXISTS {`Exists}
+ | LAMBDA {`Lambda}
+
+ vardecl :
+ | UNAME { T.Implicit `JustOne,Some (T.Ident($1,None)) }
+ | UNAME COLON term { $3,Some (T.Ident($1,None)) }
+
+ varlist :
+ | vardecl { [$1] }
+ | vardecl COMMA varlist { $1 :: $3 }
+
+ term:
+ | quantifier BEGINVARLIST varlist ENDVARLIST COLON term {
+ List.fold_right (fun v t -> T.Binder($1,v,t)) $3 $6
+ }
+ | term IMPLY term {
+ match $1 with
+ | T.Appl l -> T.Appl (l @ [$3])
+ | x -> T.Appl ([$1; $3])
+ }
+ | term APPLY term {
+ match $1 with
+ | T.Appl l -> T.Appl (l @ [$3])
+ | x -> T.Appl ([$1; $3])
+ }
+ | LPAREN term COLON term RPAREN { T.Cast ($2,$4) }
+ | term TO term { T.Binder (`Pi,($1,None),$3) }
+ | term IEQ term { T.Appl [T.Symbol("'eq",0);$1;$3] }
+ | term IAND term { T.Appl [T.Symbol("'and",0);$1;$3] }
+ | term IOR term { T.Appl [T.Symbol("'or",0);$1;$3] }
+ | NOT term { T.Appl [T.Symbol("'not",0);$2] }
+ | LPAREN term RPAREN {$2}
+ | LNAME { T.Ident($1,None) }
+ | UNAME { T.Ident($1,None) }
+ | TYPE_I { T.Symbol("'i",0) }
+ | TYPE_O { T.Symbol("'o",0) }
+ | TYPE_U { T.Sort `Set }
+ | FALSE { T.Symbol("'false",0)}
+ | TRUE { T.Symbol("'true",0)}
+ ;
+
+ include_:
+ | INCLUSION LPAREN QSTRING selection_of_formulae RPAREN DOT {
+ let fname = rm_q $3 in
+ Inclusion (fname,$4)
+ }
+ ;
+
+ selection_of_formulae:
+ | { [] }
+ | COMMA name selection_of_formulae { $2::$3 }
+ ;
+
+ comment: COMMENT {Comment $1} ;
+
+ name: NUM { string_of_int $1} | LNAME { $1 } | UNAME { $1 } ;
+%%
+ (* trailer *)