X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Focaml%2Fmathql_interpreter%2FmQueryTLexer.mll;h=c4abce8e8bb74ba28dac48047fb32d6e203d3d1f;hb=b2bde540ce5ec2c8731f0353815583bd3d4eba26;hp=6969fcb3ea4aad407d490f3f0e27a088742be460;hpb=5bef9ac5a9bfe07b11ce0e44fab51ea4b6eb4057;p=helm.git diff --git a/helm/ocaml/mathql_interpreter/mQueryTLexer.mll b/helm/ocaml/mathql_interpreter/mQueryTLexer.mll index 6969fcb3e..c4abce8e8 100644 --- a/helm/ocaml/mathql_interpreter/mQueryTLexer.mll +++ b/helm/ocaml/mathql_interpreter/mQueryTLexer.mll @@ -29,6 +29,8 @@ { open MQueryTParser + let strip s = String.sub s 1 (pred (String.length s)) + let debug = false let out s = if debug then prerr_endline s @@ -39,6 +41,8 @@ let ALPHA = ['A'-'Z' 'a'-'z' '_'] let NUM = ['0'-'9'] let IDEN = ALPHA (NUM | ALPHA)* let QSTR = [^ '"' '\\']+ +let Q = ['\\' '^' '\"'] +let NQ = [^ '\\' '^' '\"'] rule comm_token = parse | "(*" { comm_token lexbuf; comm_token lexbuf } @@ -47,7 +51,8 @@ rule comm_token = parse | [^ '*' '(']* { comm_token lexbuf } and string_token = parse | '"' { DQ } - | '\\' _ { STR (String.sub (Lexing.lexeme lexbuf) 1 1) } + | '\\' Q '^' { STR (String.sub (Lexing.lexeme lexbuf) 1 1) } + | '\\' NQ '^' { STR (Lexing.lexeme lexbuf) } | QSTR { STR (Lexing.lexeme lexbuf) } | eof { EOF } and query_token = parse @@ -61,12 +66,16 @@ and query_token = parse | ']' { out "RB"; RB } | '{' { out "LC"; LC } | '}' { out "RC"; RC } - | '@' { out "AT"; AT } - | '%' { out "PC"; PC } | '.' { out "FS"; FS } | ',' { out "CM"; CM } | ';' { out "SC"; SC } | '/' { out "SL"; SL } + | ';' { out "SC"; SC } + | "@" IDEN { let id = Lexing.lexeme lexbuf in + out ("AVAR " ^ id); AVAR (strip id) } + | "$" IDEN { let id = Lexing.lexeme lexbuf in + out ("SVAR " ^ id); SVAR (strip id) } + | ";;" { out ";;" ; SEQ } | "add" { out "ADD" ; ADD } | "align" { out "ALIGN" ; ALIGN } | "allbut" { out "BUT" ; BUT } @@ -84,6 +93,7 @@ and query_token = parse | "false" { out "FALSE" ; FALSE } | "for" { out "FOR" ; FOR } | "from" { out "FROM" ; FROM } + | "gen" { out "GEN" ; GEN } | "if" { out "IF" ; IF } | "in" { out "IN" ; IN } | "inf" { out "INF" ; INF } @@ -103,8 +113,11 @@ and query_token = parse | "of" { out "OF" ; OF } | "or" { out "OR" ; OR } | "pattern" { out "PAT" ; PAT } + | "peek" { out "PEEK" ; PEEK } | "proj" { out "PROJ" ; PROJ } | "property" { out "PROP" ; PROP } + | "read" { out "READ" ; READ } + | "render" { out "RENDER"; RENDER } | "select" { out "SELECT"; SELECT } | "source" { out "SOURCE"; SOURCE } | "stat" { out "STAT" ; STAT } @@ -115,18 +128,31 @@ and query_token = parse | "true" { out "TRUE" ; TRUE } | "union" { out "UNION" ; UNION } | "where" { out "WHERE" ; WHERE } + | "while" { out "WHILE" ; WHILE } | "xor" { out "XOR" ; XOR } - | IDEN { let id = Lexing.lexeme lexbuf in - out ("ID " ^ id); ID id } | eof { out "EOF" ; EOF } + | "=" { out "BE" ; BE } + | "#" { out "COUNT" ; COUNT } + | "!" { out "NOT" ; NOT } + | "<" { out "LT" ; LT } + | "<=" { out "LE" ; LE } + | "==" { out "EQ" ; EQ } + | "&&" { out "AND" ; AND } + | "||" { out "OR" ; OR } + | "\\/" { out "UNION" ; UNION } + | "/\\" { out "INTER" ; INTER } + | "begin" { out "LP" ; LP } + | "end" { out "RP" ; RP } and result_token = parse | SPC { result_token lexbuf } | "(*" { comm_token lexbuf; result_token lexbuf } - | '"' { STR (qstr string_token lexbuf) } - | '{' { LC } - | '}' { RC } - | ',' { CM } - | ';' { SC } - | '=' { IS } - | "attr" { ATTR } - | eof { EOF } + | '"' { let str = qstr string_token lexbuf in + out ("STR " ^ str); STR str } + | '/' { out "SL" ; SL } + | '{' { out "LC" ; LC } + | '}' { out "RC" ; RC } + | ',' { out "CM" ; CM } + | ';' { out "SC" ; SC } + | '=' { out "BE" ; BE } + | "attr" { out "ATTR"; ATTR } + | eof { out "EOF" ; EOF }