X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2FDEVEL%2Fmathml_editor%2Fsrc%2FTTokenizer.cc;h=7a8736cee0080d6b50805604f81cd595a89aec71;hb=85ccebb566c36671ca753debe09e6dd5c9dd0df7;hp=cf74c1d4f2237e52601990fb4d6ee9e6c01e928d;hpb=89262281b6e83bd2321150f81f1a0583645eb0c8;p=helm.git diff --git a/helm/DEVEL/mathml_editor/src/TTokenizer.cc b/helm/DEVEL/mathml_editor/src/TTokenizer.cc index cf74c1d4f..7a8736cee 100644 --- a/helm/DEVEL/mathml_editor/src/TTokenizer.cc +++ b/helm/DEVEL/mathml_editor/src/TTokenizer.cc @@ -7,14 +7,15 @@ std::vector TTokenizer::tokenize(const std::string& s) { - TPushLexer lexer(*this); + TPushLexer lexer(logger, *this); tokens.clear(); for (std::string::const_iterator p = s.begin(); p != s.end(); p++) lexer.push(*p); - //lexer.push('\n'); + + lexer.flush(); std::vector res; res.reserve(tokens.size()); @@ -23,9 +24,21 @@ TTokenizer::tokenize(const std::string& s) return res; } +void +TTokenizer::reset() +{ + assert(0); +} + void TTokenizer::push(const TToken& token) { tokens.push_back(token); } +std::string +TTokenizer::drop(bool alt) +{ + assert(0); + return ""; +}