Now the document is not parsed at the beginning anymore.

Instead, new token are read when requested. The idea from now is
to be able to chenge encoding on the fly.

Now it is time to see whether idocstream actually works...


git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@27489 a592a061-630c-0410-9148-cb99ea01b6c8
This commit is contained in:
Jean-Marc Lasgouttes 2008-11-15 20:30:45 +00:00
parent 86438200b0
commit bb04efd5a8
3 changed files with 10 additions and 18 deletions

View File

@ -138,14 +138,12 @@ string Token::asInput() const
Parser::Parser(istream & is)
: lineno_(0), pos_(0), iss_(0), is_(is)
{
tokenize();
}
Parser::Parser(string const & s)
: lineno_(0), pos_(0), iss_(new istringstream(s)), is_(*iss_)
{
tokenize();
}
@ -175,7 +173,7 @@ Token const & Parser::curr_token() const
}
Token const & Parser::next_token() const
Token const & Parser::next_token()
{
static const Token dummy;
return good() ? tokens_[pos_] : dummy;
@ -190,7 +188,7 @@ Token const & Parser::get_token()
}
bool Parser::isParagraph() const
bool Parser::isParagraph()
{
// A new paragraph in TeX ist started
// - either by a newline, following any amount of whitespace
@ -256,8 +254,11 @@ void Parser::putback()
}
bool Parser::good() const
bool Parser::good()
{
if (pos_ < tokens_.size())
return true;
tokenize_one();
return pos_ < tokens_.size();
}
@ -438,13 +439,6 @@ void Parser::tokenize_one()
}
void Parser::tokenize()
{
while (is_)
tokenize_one();
}
void Parser::dump() const
{
cerr << "\nTokens: ";

View File

@ -178,8 +178,6 @@ public:
void error(std::string const & msg);
/// Parses one token from \p is
void tokenize_one();
/// Parses \p is into tokens
void tokenize();
///
void push_back(Token const & t);
/// The previous token.
@ -187,11 +185,11 @@ public:
/// The current token.
Token const & curr_token() const;
/// The next token.
Token const & next_token() const;
Token const & next_token();
/// Make the next token current and return that.
Token const & get_token();
/// \return whether the current token starts a new paragraph
bool isParagraph() const;
bool isParagraph();
/// skips spaces (and comments if \p skip_comments is true)
void skip_spaces(bool skip_comments = false);
/// puts back spaces (and comments if \p skip_comments is true)
@ -199,7 +197,7 @@ public:
///
void lex(std::string const & s);
///
bool good() const;
bool good();
///
std::string verbatim_item();
///

View File

@ -509,7 +509,7 @@ void output_command_layout(ostream & os, Parser & p, bool outer,
* The drawback is that the logic inside the function becomes
* complicated, and that is the reason why it is not implemented.
*/
void check_space(Parser const & p, ostream & os, Context & context)
void check_space(Parser & p, ostream & os, Context & context)
{
Token const next = p.next_token();
Token const curr = p.curr_token();