2003-08-19 10:04:35 +00:00
|
|
|
|
/**
|
2003-10-08 11:31:51 +00:00
|
|
|
|
* \file tex2lyx/text.C
|
2003-08-19 10:04:35 +00:00
|
|
|
|
* This file is part of LyX, the document processor.
|
|
|
|
|
* Licence details can be found in the file COPYING.
|
|
|
|
|
*
|
|
|
|
|
* \author Andr<EFBFBD> P<EFBFBD>nitz
|
|
|
|
|
* \author Jean-Marc Lasgouttes
|
|
|
|
|
*
|
2003-08-23 00:17:00 +00:00
|
|
|
|
* Full author contact details are available in file CREDITS.
|
2003-04-17 09:47:21 +00:00
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
// {[(
|
|
|
|
|
|
|
|
|
|
#include <config.h>
|
|
|
|
|
|
|
|
|
|
#include "tex2lyx.h"
|
2003-08-04 10:26:10 +00:00
|
|
|
|
#include "context.h"
|
2003-07-26 00:15:38 +00:00
|
|
|
|
#include "FloatList.h"
|
2003-12-10 08:33:37 +00:00
|
|
|
|
#include "lengthcommon.h"
|
2003-07-26 00:15:38 +00:00
|
|
|
|
#include "support/lstrings.h"
|
|
|
|
|
#include "support/tostr.h"
|
2003-10-23 11:46:33 +00:00
|
|
|
|
#include "support/filetools.h"
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
#include <iostream>
|
|
|
|
|
#include <map>
|
2003-06-30 11:36:08 +00:00
|
|
|
|
#include <sstream>
|
2003-04-17 09:47:21 +00:00
|
|
|
|
#include <vector>
|
|
|
|
|
|
|
|
|
|
using std::cerr;
|
|
|
|
|
using std::endl;
|
2003-09-08 00:33:41 +00:00
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
using std::map;
|
|
|
|
|
using std::ostream;
|
|
|
|
|
using std::ostringstream;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
using std::istringstream;
|
2003-06-30 11:36:08 +00:00
|
|
|
|
using std::string;
|
2003-04-17 09:47:21 +00:00
|
|
|
|
using std::vector;
|
|
|
|
|
|
2003-07-26 00:15:38 +00:00
|
|
|
|
using lyx::support::rtrim;
|
|
|
|
|
using lyx::support::suffixIs;
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-07-28 23:50:24 +00:00
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
// thin wrapper around parse_text using a string
|
|
|
|
|
string parse_text(Parser & p, unsigned flags, const bool outer,
|
|
|
|
|
Context & context)
|
2003-07-28 23:50:24 +00:00
|
|
|
|
{
|
2003-08-04 10:26:10 +00:00
|
|
|
|
ostringstream os;
|
|
|
|
|
parse_text(p, os, flags, outer, context);
|
|
|
|
|
return os.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// parses a subdocument, usually useful in insets (whence the name)
|
|
|
|
|
void parse_text_in_inset(Parser & p, ostream & os, unsigned flags, bool outer,
|
|
|
|
|
Context & context)
|
|
|
|
|
{
|
|
|
|
|
Context newcontext(true, context.textclass);
|
|
|
|
|
parse_text(p, os, flags, outer, newcontext);
|
|
|
|
|
newcontext.check_end_layout(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// parses a paragraph snippet, useful for example for \emph{...}
|
|
|
|
|
void parse_text_snippet(Parser & p, ostream & os, unsigned flags, bool outer,
|
|
|
|
|
Context & context)
|
|
|
|
|
{
|
|
|
|
|
Context newcontext(false, context.textclass);
|
|
|
|
|
parse_text(p, os, flags, outer, newcontext);
|
|
|
|
|
// should not be needed
|
|
|
|
|
newcontext.check_end_layout(os);
|
2003-07-28 23:50:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
char const * known_latex_commands[] = { "ref", "cite", "label", "index",
|
2003-08-07 22:59:53 +00:00
|
|
|
|
"printindex", "pageref", "url", "vref", "vpageref", "prettyref", "eqref", 0 };
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
// LaTeX names for quotes
|
|
|
|
|
char const * known_quotes[] = { "glqq", "grqq", "quotedblbase",
|
2003-04-23 15:14:43 +00:00
|
|
|
|
"textquotedblleft", "quotesinglbase", "guilsinglleft", "guilsinglright", 0};
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
// the same as known_quotes with .lyx names
|
2003-04-23 15:14:43 +00:00
|
|
|
|
char const * known_coded_quotes[] = { "gld", "grd", "gld",
|
|
|
|
|
"grd", "gls", "fls", "frd", 0};
|
|
|
|
|
|
|
|
|
|
char const * known_sizes[] = { "tiny", "scriptsize", "footnotesize",
|
|
|
|
|
"small", "normalsize", "large", "Large", "LARGE", "huge", "Huge", 0};
|
|
|
|
|
|
|
|
|
|
char const * known_coded_sizes[] = { "tiny", "scriptsize", "footnotesize",
|
|
|
|
|
"small", "normal", "large", "larger", "largest", "huge", "giant", 0};
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
// splits "x=z, y=b" into a map
|
|
|
|
|
map<string, string> split_map(string const & s)
|
|
|
|
|
{
|
|
|
|
|
map<string, string> res;
|
|
|
|
|
vector<string> v;
|
|
|
|
|
split(s, v);
|
|
|
|
|
for (size_t i = 0; i < v.size(); ++i) {
|
|
|
|
|
size_t const pos = v[i].find('=');
|
|
|
|
|
string const index = v[i].substr(0, pos);
|
|
|
|
|
string const value = v[i].substr(pos + 1, string::npos);
|
|
|
|
|
res[trim(index)] = trim(value);
|
|
|
|
|
}
|
|
|
|
|
return res;
|
|
|
|
|
}
|
|
|
|
|
|
2003-12-10 08:33:37 +00:00
|
|
|
|
|
|
|
|
|
/*!
|
|
|
|
|
* Split a LaTeX length into value and unit.
|
|
|
|
|
* The latter can be a real unit like "pt", or a latex length variable
|
|
|
|
|
* like "\textwidth". The unit may contain additional stuff like glue
|
|
|
|
|
* lengths, but we don't care, because such lengths are ERT anyway.
|
|
|
|
|
* \return true if \param value and \param unit are valid.
|
|
|
|
|
*/
|
|
|
|
|
bool splitLatexLength(string const & len, string & value, string & unit)
|
2003-08-07 22:59:53 +00:00
|
|
|
|
{
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (len.empty())
|
|
|
|
|
return false;
|
|
|
|
|
const string::size_type i = len.find_first_not_of(" -+0123456789.,");
|
2003-10-23 11:46:33 +00:00
|
|
|
|
//'4,5' is a valid LaTeX length number. Change it to '4.5'
|
|
|
|
|
string const length = lyx::support::subst(len, ',', '.');
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (i == string::npos)
|
|
|
|
|
return false;
|
2003-10-23 11:46:33 +00:00
|
|
|
|
if (i == 0) {
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (len[0] == '\\') {
|
|
|
|
|
// We had something like \textwidth without a factor
|
|
|
|
|
value = "1.0";
|
|
|
|
|
} else {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
} else {
|
2003-12-10 08:33:37 +00:00
|
|
|
|
value = trim(string(length, 0, i));
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (value == "-")
|
|
|
|
|
value = "-1.0";
|
|
|
|
|
// 'cM' is a valid LaTeX length unit. Change it to 'cm'
|
|
|
|
|
if (lyx::support::contains(len, '\\'))
|
|
|
|
|
unit = trim(string(len, i));
|
|
|
|
|
else
|
|
|
|
|
unit = lyx::support::lowercase(trim(string(len, i)));
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// A simple function to translate a latex length to something lyx can
|
|
|
|
|
// understand. Not perfect, but rather best-effort.
|
|
|
|
|
string translate_len(string const & length)
|
|
|
|
|
{
|
|
|
|
|
string unit;
|
|
|
|
|
string valstring;
|
|
|
|
|
// If the input is invalid, return what we have.
|
|
|
|
|
if (!splitLatexLength(length, valstring, unit))
|
|
|
|
|
return length;
|
|
|
|
|
// LyX uses percent values
|
|
|
|
|
double value;
|
|
|
|
|
istringstream iss(valstring);
|
|
|
|
|
iss >> value;
|
|
|
|
|
value *= 100;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
ostringstream oss;
|
2003-12-10 08:33:37 +00:00
|
|
|
|
oss << value;
|
|
|
|
|
string const percentval = oss.str();
|
|
|
|
|
// a normal length
|
|
|
|
|
if (unit.empty() || unit[0] != '\\')
|
|
|
|
|
return valstring + unit;
|
|
|
|
|
const string::size_type i = unit.find(" ", i);
|
|
|
|
|
string const endlen = (i == string::npos) ? string() : string(unit, i);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
if (unit == "\\textwidth")
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return percentval + "text%" + endlen;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (unit == "\\columnwidth")
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return percentval + "col%" + endlen;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (unit == "\\paperwidth")
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return percentval + "page%" + endlen;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (unit == "\\linewidth")
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return percentval + "line%" + endlen;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (unit == "\\paperheight")
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return percentval + "pheight%" + endlen;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (unit == "\\textheight")
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return percentval + "theight%" + endlen;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return valstring + unit;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
void begin_inset(ostream & os, string const & name)
|
|
|
|
|
{
|
|
|
|
|
os << "\n\\begin_inset " << name;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void end_inset(ostream & os)
|
|
|
|
|
{
|
2003-07-27 18:25:58 +00:00
|
|
|
|
os << "\n\\end_inset \n\n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
void skip_braces(Parser & p)
|
|
|
|
|
{
|
|
|
|
|
if (p.next_token().cat() != catBegin)
|
|
|
|
|
return;
|
|
|
|
|
p.get_token();
|
|
|
|
|
if (p.next_token().cat() == catEnd) {
|
|
|
|
|
p.get_token();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
p.putback();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
void handle_ert(ostream & os, string const & s, Context & context, bool check_layout = true)
|
2003-04-17 09:47:21 +00:00
|
|
|
|
{
|
2003-11-05 10:14:13 +00:00
|
|
|
|
if (check_layout) {
|
|
|
|
|
// We must have a valid layout before outputting the ERT inset.
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context newcontext(true, context.textclass);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "ERT");
|
2003-08-04 10:26:10 +00:00
|
|
|
|
os << "\nstatus Collapsed\n";
|
|
|
|
|
newcontext.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
|
|
|
|
|
if (*it == '\\')
|
2003-07-28 14:06:04 +00:00
|
|
|
|
os << "\n\\backslash \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else
|
|
|
|
|
os << *it;
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
newcontext.check_end_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
void handle_comment(ostream & os, string const & s, Context & context)
|
|
|
|
|
{
|
|
|
|
|
// TODO: Handle this better
|
|
|
|
|
Context newcontext(true, context.textclass);
|
|
|
|
|
begin_inset(os, "ERT");
|
|
|
|
|
os << "\nstatus Collapsed\n";
|
|
|
|
|
newcontext.check_layout(os);
|
|
|
|
|
for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
|
|
|
|
|
if (*it == '\\')
|
|
|
|
|
os << "\n\\backslash \n";
|
|
|
|
|
else
|
|
|
|
|
os << *it;
|
|
|
|
|
}
|
|
|
|
|
// make sure that our comment is the last thing on the line
|
|
|
|
|
os << "\n\\newline";
|
|
|
|
|
newcontext.check_end_layout(os);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-07-27 00:39:35 +00:00
|
|
|
|
struct isLayout {
|
|
|
|
|
isLayout(string const name) : name_(name) {}
|
|
|
|
|
bool operator()(LyXLayout_ptr const & ptr) {
|
|
|
|
|
return ptr.get() && ptr->latexname() == name_;
|
|
|
|
|
}
|
|
|
|
|
private:
|
|
|
|
|
string const name_;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
LyXLayout_ptr findLayout(LyXTextClass const & textclass,
|
2003-09-09 18:27:24 +00:00
|
|
|
|
string const & name)
|
2003-07-27 00:39:35 +00:00
|
|
|
|
{
|
|
|
|
|
LyXTextClass::const_iterator it = textclass.begin();
|
|
|
|
|
LyXTextClass::const_iterator end = textclass.end();
|
|
|
|
|
it = std::find_if(it, end, isLayout(name));
|
|
|
|
|
return (it == end) ? LyXLayout_ptr() : *it;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
void output_command_layout(ostream & os, Parser & p, bool outer,
|
|
|
|
|
Context & parent_context,
|
|
|
|
|
LyXLayout_ptr newlayout)
|
2003-07-27 00:39:35 +00:00
|
|
|
|
{
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
Context context(true, parent_context.textclass, newlayout,
|
|
|
|
|
parent_context.layout);
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_deeper(os);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
if (context.layout->optionalargs > 0) {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-07-27 18:25:58 +00:00
|
|
|
|
if (p.next_token().character() == '[') {
|
|
|
|
|
p.get_token(); // eat '['
|
2003-07-27 00:39:35 +00:00
|
|
|
|
begin_inset(os, "OptArg\n");
|
2003-11-05 10:14:13 +00:00
|
|
|
|
os << "collapsed true\n\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
|
2003-07-27 00:39:35 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_end_layout(os);
|
|
|
|
|
context.check_end_deeper(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
parent_context.new_paragraph(os);
|
2003-07-27 00:39:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Output a space if necessary.
|
|
|
|
|
* This function gets called for every whitespace token.
|
|
|
|
|
*
|
|
|
|
|
* We have three cases here:
|
|
|
|
|
* 1. A space must be suppressed. Example: The lyxcode case below
|
|
|
|
|
* 2. A space may be suppressed. Example: Spaces before "\par"
|
|
|
|
|
* 3. A space must not be suppressed. Example: A space between two words
|
|
|
|
|
*
|
|
|
|
|
* We currently handle only 1. and 3 and from 2. only the case of
|
|
|
|
|
* spaces before newlines as a side effect.
|
|
|
|
|
*
|
|
|
|
|
* 2. could be used to suppress as many spaces as possible. This has two effects:
|
|
|
|
|
* - Reimporting LyX generated LaTeX files changes almost no whitespace
|
|
|
|
|
* - Superflous whitespace from non LyX generated LaTeX files is removed.
|
|
|
|
|
* The drawback is that the logic inside the function becomes
|
|
|
|
|
* complicated, and that is the reason why it is not implemented.
|
|
|
|
|
*/
|
|
|
|
|
void check_space(Parser const & p, ostream & os, Context & context)
|
|
|
|
|
{
|
|
|
|
|
Token const next = p.next_token();
|
|
|
|
|
Token const curr = p.curr_token();
|
|
|
|
|
// A space before a single newline and vice versa must be ignored
|
|
|
|
|
// LyX emits a newline before \end{lyxcode}.
|
|
|
|
|
// This newline must be ignored,
|
|
|
|
|
// otherwise LyX will add an additional protected space.
|
|
|
|
|
if (next.cat() == catSpace ||
|
|
|
|
|
next.cat() == catNewline ||
|
|
|
|
|
(next.cs() == "end" && context.layout->free_spacing && curr.cat() == catNewline)) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << ' ';
|
|
|
|
|
}
|
|
|
|
|
|
2003-11-19 10:35:50 +00:00
|
|
|
|
|
|
|
|
|
/*!
|
|
|
|
|
* Check wether \param command is a known command. If yes,
|
|
|
|
|
* handle the command with all arguments.
|
|
|
|
|
* \return true if the command was parsed, false otherwise.
|
|
|
|
|
*/
|
|
|
|
|
bool parse_command(string const & command, Parser & p, ostream & os,
|
|
|
|
|
bool outer, Context & context)
|
|
|
|
|
{
|
|
|
|
|
if (known_commands.find(command) != known_commands.end()) {
|
|
|
|
|
vector<ArgumentType> const & template_arguments = known_commands[command];
|
|
|
|
|
string ert = command;
|
|
|
|
|
size_t no_arguments = template_arguments.size();
|
|
|
|
|
for (size_t i = 0; i < no_arguments; ++i) {
|
|
|
|
|
switch (template_arguments[i]) {
|
|
|
|
|
case required:
|
|
|
|
|
// This argument contains regular LaTeX
|
|
|
|
|
handle_ert(os, ert + '{', context);
|
|
|
|
|
parse_text(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
ert = "}";
|
|
|
|
|
break;
|
|
|
|
|
case verbatim:
|
|
|
|
|
// This argument may contain special characters
|
|
|
|
|
ert += '{' + p.verbatim_item() + '}';
|
|
|
|
|
break;
|
|
|
|
|
case optional:
|
|
|
|
|
ert += p.getOpt();
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
handle_ert(os, ert, context);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
void parse_environment(Parser & p, ostream & os, bool outer,
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context & parent_context)
|
2003-07-28 21:58:09 +00:00
|
|
|
|
{
|
|
|
|
|
LyXLayout_ptr newlayout;
|
|
|
|
|
string const name = p.getArg('{', '}');
|
|
|
|
|
const bool is_starred = suffixIs(name, '*');
|
|
|
|
|
string const unstarred_name = rtrim(name, "*");
|
|
|
|
|
active_environments.push_back(name);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
if (is_math_env(name)) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
begin_inset(os, "Formula ");
|
|
|
|
|
os << "\\begin{" << name << "}";
|
|
|
|
|
parse_math(p, os, FLAG_END, MATH_MODE);
|
|
|
|
|
os << "\\end{" << name << "}";
|
|
|
|
|
end_inset(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (name == "tabular") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
begin_inset(os, "Tabular ");
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_tabular(p, os, parent_context);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
end_inset(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (parent_context.textclass.floats().typeExist(unstarred_name)) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
begin_inset(os, "Float " + unstarred_name + "\n");
|
|
|
|
|
if (p.next_token().asInput() == "[") {
|
|
|
|
|
os << "placement " << p.getArg('[', ']') << '\n';
|
|
|
|
|
}
|
|
|
|
|
os << "wide " << tostr(is_starred)
|
2003-11-05 10:14:13 +00:00
|
|
|
|
<< "\ncollapsed false\n\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
end_inset(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
parent_context.new_paragraph(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (name == "minipage") {
|
|
|
|
|
string position = "1";
|
|
|
|
|
string inner_pos = "0";
|
2003-10-23 11:46:33 +00:00
|
|
|
|
string height = "0pt";
|
|
|
|
|
string latex_position;
|
|
|
|
|
string latex_inner_pos;
|
|
|
|
|
string latex_height;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
if (p.next_token().asInput() == "[") {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
latex_position = p.getArg('[', ']');
|
|
|
|
|
switch(latex_position[0]) {
|
2003-08-07 22:59:53 +00:00
|
|
|
|
case 't': position = "0"; break;
|
|
|
|
|
case 'c': position = "1"; break;
|
|
|
|
|
case 'b': position = "2"; break;
|
|
|
|
|
default:
|
|
|
|
|
cerr << "invalid position for minipage"
|
|
|
|
|
<< endl;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (p.next_token().asInput() == "[") {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
latex_height = p.getArg('[', ']');
|
|
|
|
|
height = translate_len(latex_height);
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2003-08-07 22:59:53 +00:00
|
|
|
|
if (p.next_token().asInput() == "[") {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
latex_inner_pos = p.getArg('[', ']');
|
|
|
|
|
switch(latex_inner_pos[0]) {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
case 'c': inner_pos = "0"; break;
|
|
|
|
|
case 't': inner_pos = "1"; break;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
case 'b': inner_pos = "2"; break;
|
|
|
|
|
case 's': inner_pos = "3"; break;
|
|
|
|
|
default:
|
|
|
|
|
cerr << "invalid inner_pos for minipage"
|
|
|
|
|
<< endl;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
string width = translate_len(p.verbatim_item());
|
|
|
|
|
if (width[0] == '\\') {
|
|
|
|
|
// lyx can't handle length variables
|
|
|
|
|
ostringstream ss;
|
|
|
|
|
ss << "\\begin{minipage}";
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!latex_position.empty())
|
2003-10-23 11:46:33 +00:00
|
|
|
|
ss << '[' << latex_position << ']';
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!latex_height.empty())
|
2003-10-23 11:46:33 +00:00
|
|
|
|
ss << '[' << latex_height << ']';
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!latex_inner_pos.empty())
|
2003-10-23 11:46:33 +00:00
|
|
|
|
ss << '[' << latex_inner_pos << ']';
|
|
|
|
|
ss << "{" << width << "}";
|
|
|
|
|
handle_ert(os, ss.str(), parent_context);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
parent_context.new_paragraph(os);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
handle_ert(os, "\\end{minipage}", parent_context);
|
|
|
|
|
} else {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
begin_inset(os, "Minipage\n");
|
|
|
|
|
os << "position " << position << '\n';
|
|
|
|
|
os << "inner_position " << inner_pos << '\n';
|
|
|
|
|
os << "height \"" << height << "\"\n";
|
|
|
|
|
os << "width \"" << width << "\"\n";
|
|
|
|
|
os << "collapsed false\n\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
// Alignment settings
|
|
|
|
|
else if (name == "center" || name == "flushleft" || name == "flushright" ||
|
|
|
|
|
name == "centering" || name == "raggedright" || name == "raggedleft") {
|
|
|
|
|
// We must begin a new paragraph if not already done
|
|
|
|
|
if (! parent_context.atParagraphStart()) {
|
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
}
|
|
|
|
|
if (name == "flushleft" || name == "raggedright")
|
|
|
|
|
parent_context.extra_stuff += "\\align left ";
|
|
|
|
|
else if (name == "flushright" || name == "raggedleft")
|
|
|
|
|
parent_context.extra_stuff += "\\align right ";
|
|
|
|
|
else
|
|
|
|
|
parent_context.extra_stuff += "\\align center ";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text(p, os, FLAG_END, outer, parent_context);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
// Just in case the environment is empty ..
|
|
|
|
|
parent_context.extra_stuff.erase();
|
|
|
|
|
// We must begin a new paragraph to reset the alignment
|
|
|
|
|
parent_context.new_paragraph(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// The single '=' is meant here.
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if ((newlayout = findLayout(parent_context.textclass, name)).get() &&
|
2003-07-28 21:58:09 +00:00
|
|
|
|
newlayout->isEnvironment()) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context context(true, parent_context.textclass, newlayout,
|
|
|
|
|
parent_context.layout);
|
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
switch (context.layout->latextype) {
|
2003-07-28 21:58:09 +00:00
|
|
|
|
case LATEX_LIST_ENVIRONMENT:
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.extra_stuff = "\\labelwidthstring "
|
2003-07-28 21:58:09 +00:00
|
|
|
|
+ p.verbatim_item() + '\n';
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
break;
|
|
|
|
|
case LATEX_BIB_ENVIRONMENT:
|
|
|
|
|
p.verbatim_item(); // swallow next arg
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_deeper(os);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text(p, os, FLAG_END, outer, context);
|
|
|
|
|
context.check_end_layout(os);
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_end_deeper(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
parent_context.new_paragraph(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (name == "appendix") {
|
|
|
|
|
// This is no good latex style, but it works and is used in some documents...
|
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
Context context(true, parent_context.textclass, parent_context.layout,
|
|
|
|
|
parent_context.layout);
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\\start_of_appendix\n";
|
|
|
|
|
parse_text(p, os, FLAG_END, outer, context);
|
|
|
|
|
context.check_end_layout(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (name == "comment") {
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Comment\n");
|
|
|
|
|
os << "collapsed false\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (name == "tabbing") {
|
|
|
|
|
// We need to remember that we have to handle '\=' specially
|
|
|
|
|
handle_ert(os, "\\begin{" + name + "}", parent_context);
|
|
|
|
|
parse_text_snippet(p, os, FLAG_END | FLAG_TABBING, outer, parent_context);
|
|
|
|
|
handle_ert(os, "\\end{" + name + "}", parent_context);
|
|
|
|
|
}
|
|
|
|
|
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, "\\begin{" + name + "}", parent_context);
|
|
|
|
|
parse_text_snippet(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
handle_ert(os, "\\end{" + name + "}", parent_context);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-11-05 10:14:13 +00:00
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
active_environments.pop_back();
|
2003-11-05 10:14:13 +00:00
|
|
|
|
if (name != "math")
|
|
|
|
|
p.skip_spaces();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
} // anonymous namespace
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
|
2003-07-26 00:15:38 +00:00
|
|
|
|
void parse_text(Parser & p, ostream & os, unsigned flags, bool outer,
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context & context)
|
2003-04-17 09:47:21 +00:00
|
|
|
|
{
|
2003-07-28 21:58:09 +00:00
|
|
|
|
LyXLayout_ptr newlayout;
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// Store the latest bibliographystyle (needed for bibtex inset)
|
|
|
|
|
string bibliographystyle;
|
2003-04-17 09:47:21 +00:00
|
|
|
|
while (p.good()) {
|
2003-04-23 15:14:43 +00:00
|
|
|
|
Token const & t = p.get_token();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
#ifdef FILEDEBUG
|
|
|
|
|
cerr << "t: " << t << " flags: " << flags << "\n";
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
if (flags & FLAG_ITEM) {
|
|
|
|
|
if (t.cat() == catSpace)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
flags &= ~FLAG_ITEM;
|
|
|
|
|
if (t.cat() == catBegin) {
|
|
|
|
|
// skip the brace and collect everything to the next matching
|
|
|
|
|
// closing brace
|
|
|
|
|
flags |= FLAG_BRACE_LAST;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// handle only this single token, leave the loop if done
|
|
|
|
|
flags |= FLAG_LEAVE;
|
|
|
|
|
}
|
|
|
|
|
|
2003-09-09 18:27:24 +00:00
|
|
|
|
if (t.character() == ']' && (flags & FLAG_BRACK_LAST))
|
2003-04-23 15:14:43 +00:00
|
|
|
|
return;
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
//
|
|
|
|
|
// cat codes
|
|
|
|
|
//
|
|
|
|
|
if (t.cat() == catMath) {
|
|
|
|
|
// we are inside some text mode thingy, so opening new math is allowed
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Formula ");
|
2003-04-23 15:14:43 +00:00
|
|
|
|
Token const & n = p.get_token();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
if (n.cat() == catMath && outer) {
|
|
|
|
|
// TeX's $$...$$ syntax for displayed math
|
|
|
|
|
os << "\\[";
|
|
|
|
|
parse_math(p, os, FLAG_SIMPLE, MATH_MODE);
|
|
|
|
|
os << "\\]";
|
2003-04-23 15:14:43 +00:00
|
|
|
|
p.get_token(); // skip the second '$' token
|
2003-04-17 09:47:21 +00:00
|
|
|
|
} else {
|
|
|
|
|
// simple $...$ stuff
|
|
|
|
|
p.putback();
|
|
|
|
|
os << '$';
|
|
|
|
|
parse_math(p, os, FLAG_SIMPLE, MATH_MODE);
|
|
|
|
|
os << '$';
|
|
|
|
|
}
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catSuper || t.cat() == catSub)
|
|
|
|
|
cerr << "catcode " << t << " illegal in text mode\n";
|
|
|
|
|
|
2003-07-28 15:45:41 +00:00
|
|
|
|
// Basic support for english quotes. This should be
|
|
|
|
|
// extended to other quotes, but is not so easy (a
|
|
|
|
|
// left english quote is the same as a right german
|
|
|
|
|
// quote...)
|
2003-09-09 18:27:24 +00:00
|
|
|
|
else if (t.asInput() == "`"
|
2003-07-28 15:45:41 +00:00
|
|
|
|
&& p.next_token().asInput() == "`") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 15:45:41 +00:00
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
os << "eld";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.get_token();
|
|
|
|
|
skip_braces(p);
|
2003-09-09 18:27:24 +00:00
|
|
|
|
}
|
|
|
|
|
else if (t.asInput() == "'"
|
2003-07-28 15:45:41 +00:00
|
|
|
|
&& p.next_token().asInput() == "'") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 15:45:41 +00:00
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
os << "erd";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.get_token();
|
|
|
|
|
skip_braces(p);
|
2003-09-09 18:27:24 +00:00
|
|
|
|
}
|
2003-07-28 15:45:41 +00:00
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
else if (t.cat() == catSpace || (t.cat() == catNewline && t.cs().size() == 1))
|
|
|
|
|
check_space(p, os, context);
|
2003-07-28 15:45:41 +00:00
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cat() == catLetter ||
|
|
|
|
|
t.cat() == catOther ||
|
|
|
|
|
t.cat() == catAlign ||
|
2003-07-28 21:58:09 +00:00
|
|
|
|
t.cat() == catParameter) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << t.character();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
else if (t.cat() == catNewline || (t.cat() == catEscape && t.cs() == "par")) {
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
context.new_paragraph(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catActive) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
if (t.character() == '~') {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
if (context.layout->free_spacing)
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << ' ';
|
2003-09-09 18:27:24 +00:00
|
|
|
|
else
|
2003-07-28 14:06:04 +00:00
|
|
|
|
os << "\\InsetSpace ~\n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
} else
|
|
|
|
|
os << t.character();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catBegin) {
|
2003-04-23 15:14:43 +00:00
|
|
|
|
// special handling of size changes
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
bool const is_size = is_known(p.next_token().cs(), known_sizes);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
Token const prev = p.prev_token();
|
|
|
|
|
string const s = parse_text(p, FLAG_BRACE_LAST, outer, context);
|
|
|
|
|
if (s.empty() && (p.next_token().character() == '`' ||
|
|
|
|
|
(prev.character() == '-' && p.next_token().character())))
|
|
|
|
|
; // ignore it in {}`` or -{}-
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (is_size || s == "[" || s == "]" || s == "*")
|
|
|
|
|
os << s;
|
|
|
|
|
else {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
handle_ert(os, "{", context, false);
|
|
|
|
|
// s will end the current layout and begin a new one if necessary
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << s;
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, "}", context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catEnd) {
|
2003-07-28 23:50:24 +00:00
|
|
|
|
if (flags & FLAG_BRACE_LAST) {
|
2003-04-17 09:47:21 +00:00
|
|
|
|
return;
|
2003-07-28 23:50:24 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
cerr << "stray '}' in text\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, "}", context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
else if (t.cat() == catComment) {
|
|
|
|
|
context.check_layout(os);
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!t.cs().empty()) {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
handle_comment(os, '%' + t.cs(), context);
|
|
|
|
|
if (p.next_token().cat() == catNewline) {
|
|
|
|
|
// A newline after a comment line starts a new paragraph
|
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// "%\n" combination
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
//
|
|
|
|
|
// control sequences
|
|
|
|
|
//
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "(") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Formula");
|
|
|
|
|
os << " \\(";
|
|
|
|
|
parse_math(p, os, FLAG_SIMPLE2, MATH_MODE);
|
|
|
|
|
os << "\\)";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "[") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Formula");
|
|
|
|
|
os << " \\[";
|
|
|
|
|
parse_math(p, os, FLAG_EQUATION, MATH_MODE);
|
|
|
|
|
os << "\\]";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
else if (t.cs() == "begin")
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_environment(p, os, outer, context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "end") {
|
|
|
|
|
if (flags & FLAG_END) {
|
|
|
|
|
// eat environment name
|
|
|
|
|
string const name = p.getArg('{', '}');
|
2003-04-23 15:14:43 +00:00
|
|
|
|
if (name != active_environment())
|
|
|
|
|
cerr << "\\end{" + name + "} does not match \\begin{"
|
|
|
|
|
+ active_environment() + "}\n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
p.error("found 'end' unexpectedly");
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "item") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-09-09 18:27:24 +00:00
|
|
|
|
string s;
|
2003-10-23 11:46:33 +00:00
|
|
|
|
bool optarg = false;
|
2003-04-23 15:14:43 +00:00
|
|
|
|
if (p.next_token().character() == '[') {
|
|
|
|
|
p.get_token(); // eat '['
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context newcontext(false, context.textclass);
|
|
|
|
|
s = parse_text(p, FLAG_BRACK_LAST, outer, newcontext);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
optarg = true;
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-11-05 10:14:13 +00:00
|
|
|
|
context.set_item();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
if (optarg) {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
if (context.layout->labeltype != LABEL_MANUAL) {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// lyx does not support \item[\mybullet] in itemize environments
|
|
|
|
|
handle_ert(os, "[", context);
|
|
|
|
|
os << s;
|
|
|
|
|
handle_ert(os, "]", context);
|
2003-11-19 10:35:50 +00:00
|
|
|
|
} else if (!s.empty()) {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// The space is needed to separate the item from the rest of the sentence.
|
|
|
|
|
os << s << ' ';
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-08-06 22:47:22 +00:00
|
|
|
|
else if (t.cs() == "bibitem") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
context.set_item();
|
2003-08-06 22:47:22 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\\bibitem ";
|
|
|
|
|
os << p.getOpt();
|
|
|
|
|
os << '{' << p.verbatim_item() << '}' << "\n";
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cs() == "def") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-10-23 11:46:33 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
string name = p.get_token().cs();
|
|
|
|
|
while (p.next_token().cat() != catBegin)
|
|
|
|
|
name += p.get_token().asString();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, "\\def\\" + name + '{' + p.verbatim_item() + '}', context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
else if (t.cs() == "noindent") {
|
2003-04-25 15:54:29 +00:00
|
|
|
|
p.skip_spaces();
|
2003-11-05 10:14:13 +00:00
|
|
|
|
context.extra_stuff += "\\noindent ";
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (t.cs() == "appendix") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
context.extra_stuff += "\\start_of_appendix ";
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-27 00:39:35 +00:00
|
|
|
|
// Must attempt to parse "Section*" before "Section".
|
|
|
|
|
else if ((p.next_token().asInput() == "*") &&
|
|
|
|
|
// The single '=' is meant here.
|
2003-08-04 10:26:10 +00:00
|
|
|
|
(newlayout = findLayout(context.textclass,
|
2003-07-28 21:58:09 +00:00
|
|
|
|
t.cs() + '*')).get() &&
|
|
|
|
|
newlayout->isCommand()) {
|
2003-07-27 00:39:35 +00:00
|
|
|
|
p.get_token();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
output_command_layout(os, p, outer, context, newlayout);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-07-27 00:39:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The single '=' is meant here.
|
2003-08-04 10:26:10 +00:00
|
|
|
|
else if ((newlayout = findLayout(context.textclass, t.cs())).get() &&
|
2003-07-28 21:58:09 +00:00
|
|
|
|
newlayout->isCommand()) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
output_command_layout(os, p, outer, context, newlayout);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "includegraphics") {
|
|
|
|
|
map<string, string> opts = split_map(p.getArg('[', ']'));
|
2003-04-23 15:14:43 +00:00
|
|
|
|
string name = p.verbatim_item();
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Graphics ");
|
|
|
|
|
os << "\n\tfilename " << name << '\n';
|
|
|
|
|
if (opts.find("width") != opts.end())
|
2003-08-07 22:59:53 +00:00
|
|
|
|
os << "\twidth "
|
|
|
|
|
<< translate_len(opts["width"]) << '\n';
|
2003-04-17 09:47:21 +00:00
|
|
|
|
if (opts.find("height") != opts.end())
|
2003-08-07 22:59:53 +00:00
|
|
|
|
os << "\theight "
|
|
|
|
|
<< translate_len(opts["height"]) << '\n';
|
2003-10-23 11:46:33 +00:00
|
|
|
|
if (opts.find("scale") != opts.end()) {
|
|
|
|
|
istringstream iss(opts["scale"]);
|
|
|
|
|
double val;
|
|
|
|
|
iss >> val;
|
|
|
|
|
val = val*100;
|
|
|
|
|
os << "\tscale " << val << '\n';
|
|
|
|
|
}
|
|
|
|
|
if (opts.find("angle") != opts.end())
|
|
|
|
|
os << "\trotateAngle "
|
|
|
|
|
<< opts["angle"] << '\n';
|
|
|
|
|
if (opts.find("origin") != opts.end()) {
|
|
|
|
|
ostringstream ss;
|
|
|
|
|
string const opt = opts["origin"];
|
|
|
|
|
if (opt.find('l') != string::npos) ss << "left";
|
|
|
|
|
if (opt.find('r') != string::npos) ss << "right";
|
|
|
|
|
if (opt.find('c') != string::npos) ss << "center";
|
|
|
|
|
if (opt.find('t') != string::npos) ss << "Top";
|
|
|
|
|
if (opt.find('b') != string::npos) ss << "Bottom";
|
|
|
|
|
if (opt.find('B') != string::npos) ss << "Baseline";
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!ss.str().empty())
|
2003-10-23 11:46:33 +00:00
|
|
|
|
os << "\trotateOrigin " << ss.str() << '\n';
|
|
|
|
|
else
|
|
|
|
|
cerr << "Warning: Ignoring unknown includegraphics origin argument '" << opt << "'\n";
|
|
|
|
|
}
|
|
|
|
|
if (opts.find("keepaspectratio") != opts.end())
|
|
|
|
|
os << "\tkeepAspectRatio\n";
|
|
|
|
|
if (opts.find("clip") != opts.end())
|
|
|
|
|
os << "\tclip\n";
|
|
|
|
|
if (opts.find("draft") != opts.end())
|
|
|
|
|
os << "\tdraft\n";
|
|
|
|
|
if (opts.find("bb") != opts.end())
|
|
|
|
|
os << "\tBoundingBox "
|
|
|
|
|
<< opts["bb"] << '\n';
|
|
|
|
|
int numberOfbbOptions = 0;
|
|
|
|
|
if (opts.find("bbllx") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("bblly") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("bburx") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("bbury") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (numberOfbbOptions == 4)
|
|
|
|
|
os << "\tBoundingBox "
|
|
|
|
|
<< opts["bbllx"] << opts["bblly"]
|
|
|
|
|
<< opts["bburx"] << opts["bbury"] << '\n';
|
|
|
|
|
else if (numberOfbbOptions > 0)
|
|
|
|
|
cerr << "Warning: Ignoring incomplete includegraphics boundingbox arguments.\n";
|
|
|
|
|
numberOfbbOptions = 0;
|
|
|
|
|
if (opts.find("natwidth") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("natheight") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (numberOfbbOptions == 2)
|
|
|
|
|
os << "\tBoundingBox 0bp 0bp "
|
|
|
|
|
<< opts["natwidth"] << opts["natheight"] << '\n';
|
|
|
|
|
else if (numberOfbbOptions > 0)
|
|
|
|
|
cerr << "Warning: Ignoring incomplete includegraphics boundingbox arguments.\n";
|
|
|
|
|
ostringstream special;
|
|
|
|
|
if (opts.find("hiresbb") != opts.end())
|
|
|
|
|
special << "hiresbb,";
|
|
|
|
|
if (opts.find("trim") != opts.end())
|
|
|
|
|
special << "trim,";
|
|
|
|
|
if (opts.find("viewport") != opts.end())
|
|
|
|
|
special << "viewport=" << opts["viewport"] << ',';
|
|
|
|
|
if (opts.find("totalheight") != opts.end())
|
|
|
|
|
special << "totalheight=" << opts["totalheight"] << ',';
|
|
|
|
|
if (opts.find("type") != opts.end())
|
|
|
|
|
special << "type=" << opts["type"] << ',';
|
|
|
|
|
if (opts.find("ext") != opts.end())
|
|
|
|
|
special << "ext=" << opts["ext"] << ',';
|
|
|
|
|
if (opts.find("read") != opts.end())
|
|
|
|
|
special << "read=" << opts["read"] << ',';
|
|
|
|
|
if (opts.find("command") != opts.end())
|
|
|
|
|
special << "command=" << opts["command"] << ',';
|
|
|
|
|
string s_special = special.str();
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!s_special.empty()) {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// We had special arguments. Remove the trailing ','.
|
|
|
|
|
os << "\tspecial " << s_special.substr(0, s_special.size() - 1) << '\n';
|
|
|
|
|
}
|
|
|
|
|
// TODO: Handle the unknown settings better.
|
|
|
|
|
// Warn about invalid options.
|
|
|
|
|
// Check wether some option was given twice.
|
2003-04-17 09:47:21 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cs() == "footnote") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Foot\n");
|
2003-11-05 10:14:13 +00:00
|
|
|
|
os << "collapsed true\n\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, false, context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "marginpar") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
begin_inset(os, "Marginal\n");
|
2003-11-05 10:14:13 +00:00
|
|
|
|
os << "collapsed true\n\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, false, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
end_inset(os);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "ensuremath") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
Context newcontext(false, context.textclass);
|
|
|
|
|
string s = parse_text(p, FLAG_ITEM, false, newcontext);
|
|
|
|
|
if (s == "<EFBFBD>" || s == "<EFBFBD>" || s == "<EFBFBD>" || s == "<EFBFBD>")
|
|
|
|
|
os << s;
|
|
|
|
|
else
|
|
|
|
|
handle_ert(os, "\\ensuremath{" + s + "}",
|
|
|
|
|
context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "hfill") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\hfill\n";
|
|
|
|
|
skip_braces(p);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
else if (t.cs() == "makeindex" || t.cs() == "maketitle") {
|
|
|
|
|
p.skip_spaces();
|
2003-04-23 15:14:43 +00:00
|
|
|
|
skip_braces(p); // swallow this
|
2003-11-05 10:14:13 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-07-28 14:06:04 +00:00
|
|
|
|
else if (t.cs() == "tableofcontents") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
begin_inset(os, "LatexCommand \\tableofcontents\n");
|
2003-07-28 14:06:04 +00:00
|
|
|
|
end_inset(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
skip_braces(p); // swallow this
|
2003-07-28 14:06:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (t.cs() == "listoffigures") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-07 22:59:53 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "FloatList figure\n");
|
|
|
|
|
end_inset(os);
|
|
|
|
|
skip_braces(p); // swallow this
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "listoftables") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-07 22:59:53 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "FloatList table\n");
|
|
|
|
|
end_inset(os);
|
|
|
|
|
skip_braces(p); // swallow this
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "listof") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces(true);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
string const name = p.get_token().asString();
|
|
|
|
|
if (context.textclass.floats().typeExist(name)) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "FloatList ");
|
|
|
|
|
os << name << "\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.get_token(); // swallow second arg
|
2003-09-09 18:27:24 +00:00
|
|
|
|
} else
|
2003-08-07 22:59:53 +00:00
|
|
|
|
handle_ert(os, "\\listof{" + name + "}", context);
|
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "textrm") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\family roman \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\family default \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "textsf") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\family sans \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\family default \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (t.cs() == "textsl") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\n\\shape slanted \n";
|
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
os << "\n\\shape default \n";
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cs() == "texttt") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\family typewriter \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\family default \n";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "textit") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\shape italic \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\shape default \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "textsc") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\noun on \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\noun default \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "textbf") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\series bold \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\series default \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "underbar") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\bar under \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\bar default \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "emph" || t.cs() == "noun") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\" << t.cs() << " on \n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\" << t.cs() << " default \n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (is_known(t.cs(), known_latex_commands)) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "LatexCommand ");
|
|
|
|
|
os << '\\' << t.cs();
|
|
|
|
|
os << p.getOpt();
|
|
|
|
|
os << p.getOpt();
|
2003-07-27 18:25:58 +00:00
|
|
|
|
os << '{' << p.verbatim_item() << "}\n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (is_known(t.cs(), known_quotes)) {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
char const ** where = is_known(t.cs(), known_quotes);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
os << known_coded_quotes[where - known_quotes];
|
|
|
|
|
end_inset(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (is_known(t.cs(), known_sizes)) {
|
2003-07-28 21:58:09 +00:00
|
|
|
|
char const ** where = is_known(t.cs(), known_sizes);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\size " << known_coded_sizes[where - known_sizes] << "\n";
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-09-09 18:27:24 +00:00
|
|
|
|
else if (t.cs() == "LyX" || t.cs() == "TeX"
|
2003-07-26 23:04:39 +00:00
|
|
|
|
|| t.cs() == "LaTeX") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << t.cs();
|
|
|
|
|
skip_braces(p); // eat {}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-26 23:04:39 +00:00
|
|
|
|
else if (t.cs() == "LaTeXe") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-26 23:04:39 +00:00
|
|
|
|
os << "LaTeX2e";
|
|
|
|
|
skip_braces(p); // eat {}
|
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 14:06:04 +00:00
|
|
|
|
else if (t.cs() == "ldots") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 14:06:04 +00:00
|
|
|
|
skip_braces(p);
|
|
|
|
|
os << "\\SpecialChar \\ldots{}\n";
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cs() == "lyxarrow") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << "\\SpecialChar \\menuseparator\n";
|
2003-04-23 15:14:43 +00:00
|
|
|
|
skip_braces(p);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (t.cs() == "textcompwordmark") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\\SpecialChar \\textcompwordmark{}\n";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 14:06:04 +00:00
|
|
|
|
else if (t.cs() == "@" && p.next_token().asInput() == ".") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 14:06:04 +00:00
|
|
|
|
os << "\\SpecialChar \\@.\n";
|
|
|
|
|
p.get_token();
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
else if (t.cs() == "-") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 14:06:04 +00:00
|
|
|
|
os << "\\SpecialChar \\-\n";
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-07-28 14:06:04 +00:00
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "textasciitilde") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << '~';
|
2003-04-23 15:14:43 +00:00
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "textasciicircum") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << '^';
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "textbackslash") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 14:06:04 +00:00
|
|
|
|
os << "\n\\backslash \n";
|
2003-04-23 15:14:43 +00:00
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
2003-09-09 18:27:24 +00:00
|
|
|
|
else if (t.cs() == "_" || t.cs() == "&" || t.cs() == "#"
|
|
|
|
|
|| t.cs() == "$" || t.cs() == "{" || t.cs() == "}"
|
2003-07-28 21:58:09 +00:00
|
|
|
|
|| t.cs() == "%") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << t.cs();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "char") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
if (p.next_token().character() == '`') {
|
|
|
|
|
p.get_token();
|
|
|
|
|
if (p.next_token().cs() == "\"") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
os << '"';
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, "\\char`", context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
} else {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, "\\char", context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cs() == "\"") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
string const name = p.verbatim_item();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
if (name == "a") os << '<EFBFBD>';
|
|
|
|
|
else if (name == "o") os << '<EFBFBD>';
|
|
|
|
|
else if (name == "u") os << '<EFBFBD>';
|
|
|
|
|
else if (name == "A") os << '<EFBFBD>';
|
|
|
|
|
else if (name == "O") os << '<EFBFBD>';
|
|
|
|
|
else if (name == "U") os << '<EFBFBD>';
|
2003-08-04 10:26:10 +00:00
|
|
|
|
else handle_ert(os, "\"{" + name + "}", context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// Problem: \= creates a tabstop inside the tabbing environment
|
|
|
|
|
// and else an accent. In the latter case we really would want
|
|
|
|
|
// \={o} instead of \= o.
|
2003-12-10 08:33:37 +00:00
|
|
|
|
else if (t.cs() == "=" && (flags & FLAG_TABBING))
|
|
|
|
|
handle_ert(os, t.asInput(), context);
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (t.cs() == "H" || t.cs() == "c" || t.cs() == "^" || t.cs() == "'"
|
2003-12-10 08:33:37 +00:00
|
|
|
|
|| t.cs() == "~" || t.cs() == "." || t.cs() == "=") {
|
2003-04-23 15:14:43 +00:00
|
|
|
|
// we need the trim as the LyX parser chokes on such spaces
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\n\\i \\" << t.cs() << "{"
|
2003-08-04 10:26:10 +00:00
|
|
|
|
<< trim(parse_text(p, FLAG_ITEM, outer, context), " ") << "}\n";
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
else if (t.cs() == "ss") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << "<EFBFBD>";
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
else if (t.cs() == "i" || t.cs() == "j") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
os << "\\" << t.cs() << ' ';
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-04-23 15:14:43 +00:00
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
else if (t.cs() == "\\") {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
context.check_layout(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
string const next = p.next_token().asInput();
|
|
|
|
|
if (next == "[")
|
|
|
|
|
handle_ert(os, "\\\\" + p.getOpt(), context);
|
|
|
|
|
else if (next == "*") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
handle_ert(os, "\\\\*" + p.getOpt(), context);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
os << "\n\\newline \n";
|
|
|
|
|
}
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (t.cs() == "input" || t.cs() == "include"
|
|
|
|
|
|| t.cs() == "verbatiminput") {
|
|
|
|
|
string name = '\\' + t.cs();
|
|
|
|
|
if (t.cs() == "verbatiminput"
|
2003-09-09 18:27:24 +00:00
|
|
|
|
&& p.next_token().asInput() == "*")
|
2003-08-07 22:59:53 +00:00
|
|
|
|
name += p.get_token().asInput();
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Include ");
|
2003-10-23 11:46:33 +00:00
|
|
|
|
string filename(p.getArg('{', '}'));
|
|
|
|
|
string lyxname(lyx::support::ChangeExtension(filename, ".lyx"));
|
|
|
|
|
if (tex2lyx(filename, lyxname)) {
|
|
|
|
|
os << name << '{' << lyxname << "}\n";
|
|
|
|
|
} else {
|
|
|
|
|
os << name << '{' << filename << "}\n";
|
|
|
|
|
}
|
2003-08-07 22:59:53 +00:00
|
|
|
|
os << "preview false\n";
|
|
|
|
|
end_inset(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cs() == "fancyhead") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
ostringstream ss;
|
|
|
|
|
ss << "\\fancyhead";
|
|
|
|
|
ss << p.getOpt();
|
2003-04-23 15:14:43 +00:00
|
|
|
|
ss << '{' << p.verbatim_item() << "}\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, ss.str(), context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
2003-11-03 17:47:28 +00:00
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (t.cs() == "bibliographystyle") {
|
|
|
|
|
// store new bibliographystyle
|
|
|
|
|
bibliographystyle = p.verbatim_item();
|
|
|
|
|
// output new bibliographystyle.
|
|
|
|
|
// This is only necessary if used in some other macro than \bibliography.
|
|
|
|
|
handle_ert(os, "\\bibliographystyle{" + bibliographystyle + "}", context);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "bibliography") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "LatexCommand ");
|
|
|
|
|
os << "\\bibtex";
|
|
|
|
|
// Do we have a bibliographystyle set?
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!bibliographystyle.empty()) {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
os << '[' << bibliographystyle << ']';
|
|
|
|
|
}
|
|
|
|
|
os << '{' << p.verbatim_item() << "}\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2003-12-10 08:33:37 +00:00
|
|
|
|
else if (t.cs() == "smallskip" ||
|
|
|
|
|
t.cs() == "medskip" ||
|
|
|
|
|
t.cs() == "bigskip" ||
|
|
|
|
|
t.cs() == "vfill") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "VSpace ");
|
|
|
|
|
os << t.cs();
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "vspace") {
|
|
|
|
|
bool starred = false;
|
|
|
|
|
if (p.next_token().asInput() == "*") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
starred = true;
|
|
|
|
|
}
|
|
|
|
|
string const length = p.verbatim_item();
|
|
|
|
|
string unit;
|
|
|
|
|
string valstring;
|
|
|
|
|
bool valid = splitLatexLength(length, valstring, unit);
|
|
|
|
|
bool known_vspace = false;
|
|
|
|
|
bool known_unit = false;
|
|
|
|
|
double value;
|
|
|
|
|
if (valid) {
|
|
|
|
|
istringstream iss(valstring);
|
|
|
|
|
iss >> value;
|
|
|
|
|
if (value == 1.0) {
|
|
|
|
|
if (unit == "\\smallskipamount") {
|
|
|
|
|
unit = "smallskip";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
} else if (unit == "\\medskipamount") {
|
|
|
|
|
unit = "medskip";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
} else if (unit == "\\bigskipamount") {
|
|
|
|
|
unit = "bigskip";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
} else if (unit == "\\fill") {
|
|
|
|
|
unit = "vfill";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
switch (unitFromString(unit)) {
|
|
|
|
|
case LyXLength::SP:
|
|
|
|
|
case LyXLength::PT:
|
|
|
|
|
case LyXLength::BP:
|
|
|
|
|
case LyXLength::DD:
|
|
|
|
|
case LyXLength::MM:
|
|
|
|
|
case LyXLength::PC:
|
|
|
|
|
case LyXLength::CC:
|
|
|
|
|
case LyXLength::CM:
|
|
|
|
|
case LyXLength::IN:
|
|
|
|
|
case LyXLength::EX:
|
|
|
|
|
case LyXLength::EM:
|
|
|
|
|
case LyXLength::MU:
|
|
|
|
|
known_unit = true;
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (known_unit || known_vspace) {
|
|
|
|
|
// Literal length or known variable
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "VSpace ");
|
|
|
|
|
if (known_unit)
|
|
|
|
|
os << value;
|
|
|
|
|
os << unit;
|
|
|
|
|
if (starred)
|
|
|
|
|
os << '*';
|
|
|
|
|
end_inset(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
} else {
|
2003-12-10 08:33:37 +00:00
|
|
|
|
// LyX can't handle other length variables in Inset VSpace
|
|
|
|
|
string name = t.asInput();
|
|
|
|
|
if (starred)
|
|
|
|
|
name += '*';
|
|
|
|
|
if (valid) {
|
|
|
|
|
if (value == 1.0)
|
|
|
|
|
handle_ert(os, name + '{' + unit + '}', context);
|
|
|
|
|
else if (value == -1.0)
|
|
|
|
|
handle_ert(os, name + "{-" + unit + '}', context);
|
|
|
|
|
else
|
|
|
|
|
handle_ert(os, name + '{' + valstring + unit + '}', context);
|
|
|
|
|
} else
|
|
|
|
|
handle_ert(os, name + '{' + length + '}', context);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else {
|
|
|
|
|
//cerr << "#: " << t << " mode: " << mode << endl;
|
|
|
|
|
// heuristic: read up to next non-nested space
|
|
|
|
|
/*
|
|
|
|
|
string s = t.asInput();
|
2003-04-23 15:14:43 +00:00
|
|
|
|
string z = p.verbatim_item();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
while (p.good() && z != " " && z.size()) {
|
|
|
|
|
//cerr << "read: " << z << endl;
|
|
|
|
|
s += z;
|
2003-04-23 15:14:43 +00:00
|
|
|
|
z = p.verbatim_item();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
cerr << "found ERT: " << s << endl;
|
2003-08-04 10:26:10 +00:00
|
|
|
|
handle_ert(os, s + ' ', context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
*/
|
2003-11-05 10:14:13 +00:00
|
|
|
|
string name = t.asInput();
|
|
|
|
|
if (p.next_token().asInput() == "*") {
|
|
|
|
|
// Starred commands like \vspace*{}
|
|
|
|
|
p.get_token(); // Eat '*'
|
|
|
|
|
name += '*';
|
|
|
|
|
}
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (! parse_command(name, p, os, outer, context))
|
2003-11-19 10:35:50 +00:00
|
|
|
|
handle_ert(os, name, context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (flags & FLAG_LEAVE) {
|
|
|
|
|
flags &= ~FLAG_LEAVE;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// }])
|