X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2FDEVEL%2Fmathml_editor%2Fsrc%2FTTokenizer.cc;h=14eeaadb009557541cb86e0c849d46840d6919e6;hb=4167cea65ca58897d1a3dbb81ff95de5074700cc;hp=cf74c1d4f2237e52601990fb4d6ee9e6c01e928d;hpb=89262281b6e83bd2321150f81f1a0583645eb0c8;p=helm.git diff --git a/helm/DEVEL/mathml_editor/src/TTokenizer.cc b/helm/DEVEL/mathml_editor/src/TTokenizer.cc index cf74c1d4f..14eeaadb0 100644 --- a/helm/DEVEL/mathml_editor/src/TTokenizer.cc +++ b/helm/DEVEL/mathml_editor/src/TTokenizer.cc @@ -1,20 +1,46 @@ +/* This file is part of EdiTeX, an editor of mathematical + * expressions based on TeX syntax. + * + * Copyright (C) 2002-2003 Luca Padovani , + * 2003 Paolo Marinelli . + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + * + * For more information, please visit the project's home page + * http://helm.cs.unibo.it/editex/ + * or send an email to + */ #include +#include #include "TTokenizer.hh" #include "TPushLexer.hh" std::vector TTokenizer::tokenize(const std::string& s) -{ - TPushLexer lexer(*this); +{ + TPushLexer lexer(logger, *this); tokens.clear(); for (std::string::const_iterator p = s.begin(); p != s.end(); p++) lexer.push(*p); - //lexer.push('\n'); + + lexer.flush(); std::vector res; res.reserve(tokens.size()); @@ -23,9 +49,21 @@ TTokenizer::tokenize(const std::string& s) return res; } +void +TTokenizer::reset() +{ + assert(0); +} + void TTokenizer::push(const TToken& token) { tokens.push_back(token); } +std::string +TTokenizer::drop(bool alt) +{ + assert(0); + return ""; +}