std::vector<TToken>
TTokenizer::tokenize(const std::string& s)
{
- TPushLexer lexer(*this);
+ TPushLexer lexer(logger, *this);
tokens.clear();
for (std::string::const_iterator p = s.begin();
p != s.end();
p++)
lexer.push(*p);
- //lexer.push('\n');
+
+ lexer.flush();
std::vector<TToken> res;
res.reserve(tokens.size());
return res;
}
+void
+TTokenizer::reset()
+{
+ assert(0);
+}
+
void
TTokenizer::push(const TToken& token)
{
tokens.push_back(token);
}
+std::string
+TTokenizer::drop(bool alt)
+{
+ assert(0);
+ return "";
+}