2003-08-19 10:04:35 +00:00
|
|
|
|
/**
|
2007-04-26 04:41:58 +00:00
|
|
|
|
* \file tex2lyx/text.cpp
|
2003-08-19 10:04:35 +00:00
|
|
|
|
* This file is part of LyX, the document processor.
|
|
|
|
|
* Licence details can be found in the file COPYING.
|
|
|
|
|
*
|
2008-11-14 15:58:50 +00:00
|
|
|
|
* \author André Pönitz
|
2003-08-19 10:04:35 +00:00
|
|
|
|
* \author Jean-Marc Lasgouttes
|
2008-11-14 15:58:50 +00:00
|
|
|
|
* \author Uwe Stöhr
|
2003-08-19 10:04:35 +00:00
|
|
|
|
*
|
2003-08-23 00:17:00 +00:00
|
|
|
|
* Full author contact details are available in file CREDITS.
|
2003-04-17 09:47:21 +00:00
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
// {[(
|
|
|
|
|
|
|
|
|
|
#include <config.h>
|
|
|
|
|
|
|
|
|
|
#include "tex2lyx.h"
|
2007-10-24 22:55:02 +00:00
|
|
|
|
|
2007-04-26 04:53:06 +00:00
|
|
|
|
#include "Context.h"
|
2008-11-16 23:24:56 +00:00
|
|
|
|
#include "Encoding.h"
|
2003-07-26 00:15:38 +00:00
|
|
|
|
#include "FloatList.h"
|
2011-11-06 17:03:59 +00:00
|
|
|
|
#include "LaTeXPackages.h"
|
2007-10-24 22:55:02 +00:00
|
|
|
|
#include "Layout.h"
|
|
|
|
|
#include "Length.h"
|
2011-10-30 12:47:45 +00:00
|
|
|
|
#include "Preamble.h"
|
2007-10-24 22:55:02 +00:00
|
|
|
|
|
2011-12-13 19:40:05 +00:00
|
|
|
|
#include "insets/ExternalTemplate.h"
|
|
|
|
|
|
2008-04-30 08:26:40 +00:00
|
|
|
|
#include "support/lassert.h"
|
2005-01-06 16:39:35 +00:00
|
|
|
|
#include "support/convert.h"
|
2007-12-17 16:04:46 +00:00
|
|
|
|
#include "support/FileName.h"
|
2003-10-23 11:46:33 +00:00
|
|
|
|
#include "support/filetools.h"
|
2007-12-17 16:04:46 +00:00
|
|
|
|
#include "support/lstrings.h"
|
2011-11-06 17:03:59 +00:00
|
|
|
|
#include "support/lyxtime.h"
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2008-03-07 07:21:13 +00:00
|
|
|
|
#include <algorithm>
|
2003-04-17 09:47:21 +00:00
|
|
|
|
#include <iostream>
|
|
|
|
|
#include <map>
|
2003-06-30 11:36:08 +00:00
|
|
|
|
#include <sstream>
|
2003-04-17 09:47:21 +00:00
|
|
|
|
#include <vector>
|
|
|
|
|
|
2007-12-12 10:16:00 +00:00
|
|
|
|
using namespace std;
|
2007-12-12 18:57:56 +00:00
|
|
|
|
using namespace lyx::support;
|
2006-10-21 00:16:43 +00:00
|
|
|
|
|
|
|
|
|
namespace lyx {
|
|
|
|
|
|
2003-07-28 23:50:24 +00:00
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
void parse_text_in_inset(Parser & p, ostream & os, unsigned flags, bool outer,
|
2011-01-22 12:00:33 +00:00
|
|
|
|
Context const & context, InsetLayout const * layout)
|
2003-08-04 10:26:10 +00:00
|
|
|
|
{
|
2011-01-22 12:00:33 +00:00
|
|
|
|
bool const forcePlainLayout =
|
|
|
|
|
layout ? layout->forcePlainLayout() : false;
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context newcontext(true, context.textclass);
|
2011-01-22 12:00:33 +00:00
|
|
|
|
if (forcePlainLayout)
|
|
|
|
|
newcontext.layout = &context.textclass.plainLayout();
|
|
|
|
|
else
|
|
|
|
|
newcontext.font = context.font;
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text(p, os, flags, outer, newcontext);
|
|
|
|
|
newcontext.check_end_layout(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2005-07-26 11:58:43 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
2011-01-22 12:00:33 +00:00
|
|
|
|
void parse_text_in_inset(Parser & p, ostream & os, unsigned flags, bool outer,
|
|
|
|
|
Context const & context, string const & name)
|
|
|
|
|
{
|
|
|
|
|
InsetLayout const * layout = 0;
|
|
|
|
|
DocumentClass::InsetLayouts::const_iterator it =
|
|
|
|
|
context.textclass.insetLayouts().find(from_ascii(name));
|
|
|
|
|
if (it != context.textclass.insetLayouts().end())
|
|
|
|
|
layout = &(it->second);
|
|
|
|
|
parse_text_in_inset(p, os, flags, outer, context, layout);
|
|
|
|
|
}
|
|
|
|
|
|
2005-07-13 11:38:55 +00:00
|
|
|
|
/// parses a paragraph snippet, useful for example for \\emph{...}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
void parse_text_snippet(Parser & p, ostream & os, unsigned flags, bool outer,
|
|
|
|
|
Context & context)
|
|
|
|
|
{
|
2005-07-26 11:58:43 +00:00
|
|
|
|
Context newcontext(context);
|
2008-05-07 13:55:03 +00:00
|
|
|
|
// Don't inherit the paragraph-level extra stuff
|
|
|
|
|
newcontext.par_extra_stuff.clear();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text(p, os, flags, outer, newcontext);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
// Make sure that we don't create invalid .lyx files
|
|
|
|
|
context.need_layout = newcontext.need_layout;
|
|
|
|
|
context.need_end_layout = newcontext.need_end_layout;
|
2003-07-28 23:50:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
|
2005-07-26 11:58:43 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Thin wrapper around parse_text_snippet() using a string.
|
|
|
|
|
*
|
|
|
|
|
* We completely ignore \c context.need_layout and \c context.need_end_layout,
|
|
|
|
|
* because our return value is not used directly (otherwise the stream version
|
|
|
|
|
* of parse_text_snippet() could be used). That means that the caller needs
|
|
|
|
|
* to do layout management manually.
|
|
|
|
|
* This is intended to parse text that does not create any layout changes.
|
|
|
|
|
*/
|
|
|
|
|
string parse_text_snippet(Parser & p, unsigned flags, const bool outer,
|
|
|
|
|
Context & context)
|
|
|
|
|
{
|
|
|
|
|
Context newcontext(context);
|
|
|
|
|
newcontext.need_layout = false;
|
|
|
|
|
newcontext.need_end_layout = false;
|
|
|
|
|
newcontext.new_layout_allowed = false;
|
|
|
|
|
// Avoid warning by Context::~Context()
|
2008-05-07 13:55:03 +00:00
|
|
|
|
newcontext.par_extra_stuff.clear();
|
2005-07-26 11:58:43 +00:00
|
|
|
|
ostringstream os;
|
|
|
|
|
parse_text_snippet(p, os, flags, outer, newcontext);
|
|
|
|
|
return os.str();
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2009-06-11 23:46:41 +00:00
|
|
|
|
char const * const known_ref_commands[] = { "ref", "pageref", "vref",
|
|
|
|
|
"vpageref", "prettyref", "eqref", 0 };
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2011-10-22 17:25:34 +00:00
|
|
|
|
char const * const known_coded_ref_commands[] = { "ref", "pageref", "vref",
|
|
|
|
|
"vpageref", "formatted", "eqref", 0 };
|
|
|
|
|
|
2013-02-04 20:37:14 +00:00
|
|
|
|
char const * const known_refstyle_commands[] = { "algref", "chapref", "corref",
|
|
|
|
|
"eqref", "enuref", "figref", "fnref", "lemref", "parref", "partref", "propref",
|
|
|
|
|
"secref", "subref", "tabref", "thmref", 0 };
|
2013-02-04 00:41:56 +00:00
|
|
|
|
|
2013-02-04 20:37:14 +00:00
|
|
|
|
char const * const known_refstyle_prefixes[] = { "alg", "chap", "cor",
|
|
|
|
|
"eq", "enu", "fig", "fn", "lem", "par", "part", "prop",
|
|
|
|
|
"sec", "sub", "tab", "thm", 0 };
|
2013-02-04 00:41:56 +00:00
|
|
|
|
|
|
|
|
|
|
2012-06-24 04:38:33 +00:00
|
|
|
|
/**
|
|
|
|
|
* supported CJK encodings
|
2012-07-07 11:37:26 +00:00
|
|
|
|
* JIS does not work with LyX's encoding conversion
|
2012-06-24 04:38:33 +00:00
|
|
|
|
*/
|
|
|
|
|
const char * const supported_CJK_encodings[] = {
|
2012-12-30 19:39:33 +00:00
|
|
|
|
"EUC-JP", "KS", "GB", "UTF8",
|
|
|
|
|
"Bg5", /*"JIS",*/ "SJIS", 0};
|
2012-06-24 04:38:33 +00:00
|
|
|
|
|
|
|
|
|
/**
|
2012-06-24 12:39:57 +00:00
|
|
|
|
* the same as supported_CJK_encodings with their corresponding LyX language name
|
2012-12-30 19:39:33 +00:00
|
|
|
|
* FIXME: The mapping "UTF8" => "chinese-traditional" is only correct for files
|
|
|
|
|
* created by LyX.
|
|
|
|
|
* NOTE: "Bg5", "JIS" and "SJIS" are not supported by LyX, on re-export the
|
|
|
|
|
* encodings "UTF8", "EUC-JP" and "EUC-JP" will be used.
|
2012-06-24 04:38:33 +00:00
|
|
|
|
* please keep this in sync with supported_CJK_encodings line by line!
|
|
|
|
|
*/
|
2012-10-06 07:38:14 +00:00
|
|
|
|
const char * const supported_CJK_languages[] = {
|
2012-12-30 19:39:33 +00:00
|
|
|
|
"japanese-cjk", "korean", "chinese-simplified", "chinese-traditional",
|
|
|
|
|
"chinese-traditional", /*"japanese-cjk",*/ "japanese-cjk", 0};
|
2012-06-24 04:38:33 +00:00
|
|
|
|
|
2004-08-10 09:40:53 +00:00
|
|
|
|
/*!
|
|
|
|
|
* natbib commands.
|
2010-12-17 21:02:39 +00:00
|
|
|
|
* The starred forms are also known except for "citefullauthor",
|
|
|
|
|
* "citeyear" and "citeyearpar".
|
2004-08-10 09:40:53 +00:00
|
|
|
|
*/
|
|
|
|
|
char const * const known_natbib_commands[] = { "cite", "citet", "citep",
|
|
|
|
|
"citealt", "citealp", "citeauthor", "citeyear", "citeyearpar",
|
|
|
|
|
"citefullauthor", "Citet", "Citep", "Citealt", "Citealp", "Citeauthor", 0 };
|
|
|
|
|
|
|
|
|
|
/*!
|
|
|
|
|
* jurabib commands.
|
|
|
|
|
* No starred form other than "cite*" known.
|
|
|
|
|
*/
|
|
|
|
|
char const * const known_jurabib_commands[] = { "cite", "citet", "citep",
|
2006-10-15 08:32:37 +00:00
|
|
|
|
"citealt", "citealp", "citeauthor", "citeyear", "citeyearpar",
|
2004-08-10 09:40:53 +00:00
|
|
|
|
// jurabib commands not (yet) supported by LyX:
|
2006-10-15 08:32:37 +00:00
|
|
|
|
// "fullcite",
|
2004-08-10 09:40:53 +00:00
|
|
|
|
// "footcite", "footcitet", "footcitep", "footcitealt", "footcitealp",
|
|
|
|
|
// "footciteauthor", "footciteyear", "footciteyearpar",
|
2010-12-17 21:02:39 +00:00
|
|
|
|
"citefield", "citetitle", 0 };
|
2004-08-10 09:40:53 +00:00
|
|
|
|
|
2004-06-28 06:53:12 +00:00
|
|
|
|
/// LaTeX names for quotes
|
2007-12-09 13:40:03 +00:00
|
|
|
|
char const * const known_quotes[] = { "dq", "guillemotleft", "flqq", "og",
|
|
|
|
|
"guillemotright", "frqq", "fg", "glq", "glqq", "textquoteleft", "grq", "grqq",
|
|
|
|
|
"quotedblbase", "textquotedblleft", "quotesinglbase", "textquoteright", "flq",
|
|
|
|
|
"guilsinglleft", "frq", "guilsinglright", 0};
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2004-06-28 06:53:12 +00:00
|
|
|
|
/// the same as known_quotes with .lyx names
|
2007-12-09 13:40:03 +00:00
|
|
|
|
char const * const known_coded_quotes[] = { "prd", "ard", "ard", "ard",
|
|
|
|
|
"ald", "ald", "ald", "gls", "gld", "els", "els", "grd",
|
|
|
|
|
"gld", "grd", "gls", "ers", "fls",
|
|
|
|
|
"fls", "frs", "frs", 0};
|
2003-04-23 15:14:43 +00:00
|
|
|
|
|
2004-06-28 06:53:12 +00:00
|
|
|
|
/// LaTeX names for font sizes
|
|
|
|
|
char const * const known_sizes[] = { "tiny", "scriptsize", "footnotesize",
|
2003-04-23 15:14:43 +00:00
|
|
|
|
"small", "normalsize", "large", "Large", "LARGE", "huge", "Huge", 0};
|
|
|
|
|
|
2011-01-07 19:58:31 +00:00
|
|
|
|
/// the same as known_sizes with .lyx names
|
|
|
|
|
char const * const known_coded_sizes[] = { "tiny", "scriptsize", "footnotesize",
|
2008-04-27 10:54:06 +00:00
|
|
|
|
"small", "normal", "large", "larger", "largest", "huge", "giant", 0};
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2004-06-28 06:53:12 +00:00
|
|
|
|
/// LaTeX 2.09 names for font families
|
|
|
|
|
char const * const known_old_font_families[] = { "rm", "sf", "tt", 0};
|
|
|
|
|
|
|
|
|
|
/// LaTeX names for font families
|
|
|
|
|
char const * const known_font_families[] = { "rmfamily", "sffamily",
|
|
|
|
|
"ttfamily", 0};
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
/// LaTeX names for font family changing commands
|
|
|
|
|
char const * const known_text_font_families[] = { "textrm", "textsf",
|
|
|
|
|
"texttt", 0};
|
|
|
|
|
|
|
|
|
|
/// The same as known_old_font_families, known_font_families and
|
|
|
|
|
/// known_text_font_families with .lyx names
|
2004-06-28 06:53:12 +00:00
|
|
|
|
char const * const known_coded_font_families[] = { "roman", "sans",
|
|
|
|
|
"typewriter", 0};
|
|
|
|
|
|
|
|
|
|
/// LaTeX 2.09 names for font series
|
|
|
|
|
char const * const known_old_font_series[] = { "bf", 0};
|
|
|
|
|
|
|
|
|
|
/// LaTeX names for font series
|
|
|
|
|
char const * const known_font_series[] = { "bfseries", "mdseries", 0};
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
/// LaTeX names for font series changing commands
|
|
|
|
|
char const * const known_text_font_series[] = { "textbf", "textmd", 0};
|
|
|
|
|
|
|
|
|
|
/// The same as known_old_font_series, known_font_series and
|
|
|
|
|
/// known_text_font_series with .lyx names
|
2004-06-28 06:53:12 +00:00
|
|
|
|
char const * const known_coded_font_series[] = { "bold", "medium", 0};
|
|
|
|
|
|
|
|
|
|
/// LaTeX 2.09 names for font shapes
|
|
|
|
|
char const * const known_old_font_shapes[] = { "it", "sl", "sc", 0};
|
|
|
|
|
|
|
|
|
|
/// LaTeX names for font shapes
|
|
|
|
|
char const * const known_font_shapes[] = { "itshape", "slshape", "scshape",
|
|
|
|
|
"upshape", 0};
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
/// LaTeX names for font shape changing commands
|
|
|
|
|
char const * const known_text_font_shapes[] = { "textit", "textsl", "textsc",
|
|
|
|
|
"textup", 0};
|
|
|
|
|
|
|
|
|
|
/// The same as known_old_font_shapes, known_font_shapes and
|
|
|
|
|
/// known_text_font_shapes with .lyx names
|
2004-06-28 06:53:12 +00:00
|
|
|
|
char const * const known_coded_font_shapes[] = { "italic", "slanted",
|
|
|
|
|
"smallcaps", "up", 0};
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
/// Known special characters which need skip_spaces_braces() afterwards
|
2013-02-24 18:00:17 +00:00
|
|
|
|
char const * const known_special_chars[] = {"ldots",
|
|
|
|
|
"lyxarrow", "textcompwordmark",
|
|
|
|
|
"slash", "textasciitilde", "textasciicircum", "textbackslash", 0};
|
2011-11-20 17:03:00 +00:00
|
|
|
|
|
|
|
|
|
/// the same as known_special_chars with .lyx names
|
2013-02-24 18:00:17 +00:00
|
|
|
|
char const * const known_coded_special_chars[] = {"\\SpecialChar \\ldots{}\n",
|
|
|
|
|
"\\SpecialChar \\menuseparator\n", "\\SpecialChar \\textcompwordmark{}\n",
|
|
|
|
|
"\\SpecialChar \\slash{}\n", "~", "^", "\n\\backslash\n", 0};
|
2011-11-20 17:03:00 +00:00
|
|
|
|
|
2004-07-29 17:03:37 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Graphics file extensions known by the dvips driver of the graphics package.
|
|
|
|
|
* These extensions are used to complete the filename of an included
|
|
|
|
|
* graphics file if it does not contain an extension.
|
|
|
|
|
* The order must be the same that latex uses to find a file, because we
|
|
|
|
|
* will use the first extension that matches.
|
|
|
|
|
* This is only an approximation for the common cases. If we would want to
|
|
|
|
|
* do it right in all cases, we would need to know which graphics driver is
|
|
|
|
|
* used and know the extensions of every driver of the graphics package.
|
|
|
|
|
*/
|
|
|
|
|
char const * const known_dvips_graphics_formats[] = {"eps", "ps", "eps.gz",
|
|
|
|
|
"ps.gz", "eps.Z", "ps.Z", 0};
|
|
|
|
|
|
|
|
|
|
/*!
|
|
|
|
|
* Graphics file extensions known by the pdftex driver of the graphics package.
|
2005-04-15 14:04:13 +00:00
|
|
|
|
* \sa known_dvips_graphics_formats
|
2004-07-29 17:03:37 +00:00
|
|
|
|
*/
|
|
|
|
|
char const * const known_pdftex_graphics_formats[] = {"png", "pdf", "jpg",
|
|
|
|
|
"mps", "tif", 0};
|
|
|
|
|
|
2005-04-15 14:04:13 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Known file extensions for TeX files as used by \\include.
|
|
|
|
|
*/
|
|
|
|
|
char const * const known_tex_extensions[] = {"tex", 0};
|
|
|
|
|
|
2006-08-24 16:34:47 +00:00
|
|
|
|
/// spaces known by InsetSpace
|
2011-10-23 11:16:11 +00:00
|
|
|
|
char const * const known_spaces[] = { " ", "space", ",",
|
|
|
|
|
"thinspace", "quad", "qquad", "enspace", "enskip",
|
|
|
|
|
"negthinspace", "negmedspace", "negthickspace", "textvisiblespace",
|
|
|
|
|
"hfill", "dotfill", "hrulefill", "leftarrowfill", "rightarrowfill",
|
|
|
|
|
"upbracefill", "downbracefill", 0};
|
2006-08-24 16:34:47 +00:00
|
|
|
|
|
|
|
|
|
/// the same as known_spaces with .lyx names
|
|
|
|
|
char const * const known_coded_spaces[] = { "space{}", "space{}",
|
|
|
|
|
"thinspace{}", "thinspace{}", "quad{}", "qquad{}", "enspace{}", "enskip{}",
|
2011-10-23 11:16:11 +00:00
|
|
|
|
"negthinspace{}", "negmedspace{}", "negthickspace{}", "textvisiblespace{}",
|
|
|
|
|
"hfill{}", "dotfill{}", "hrulefill{}", "leftarrowfill{}", "rightarrowfill{}",
|
|
|
|
|
"upbracefill{}", "downbracefill{}", 0};
|
2006-08-24 16:34:47 +00:00
|
|
|
|
|
2010-12-30 21:56:55 +00:00
|
|
|
|
/// These are translated by LyX to commands like "\\LyX{}", so we have to put
|
|
|
|
|
/// them in ERT. "LaTeXe" must come before "LaTeX"!
|
|
|
|
|
char const * const known_phrases[] = {"LyX", "TeX", "LaTeXe", "LaTeX", 0};
|
|
|
|
|
char const * const known_coded_phrases[] = {"LyX", "TeX", "LaTeX2e", "LaTeX", 0};
|
2010-12-31 11:59:33 +00:00
|
|
|
|
int const known_phrase_lengths[] = {3, 5, 7, 0};
|
2010-12-30 21:56:55 +00:00
|
|
|
|
|
2013-02-18 03:50:18 +00:00
|
|
|
|
/// known TIPA combining diacritical marks
|
|
|
|
|
char const * const known_tipa_marks[] = {"textsubwedge", "textsubumlaut",
|
|
|
|
|
"textsubtilde", "textseagull", "textsubbridge", "textinvsubbridge",
|
|
|
|
|
"textsubsquare", "textsubrhalfring", "textsublhalfring", "textsubplus",
|
|
|
|
|
"textovercross", "textsubarch", "textsuperimposetilde", "textraising",
|
|
|
|
|
"textlowering", "textadvancing", "textretracting", "textdoublegrave",
|
2013-02-18 04:03:18 +00:00
|
|
|
|
"texthighrise", "textlowrise", "textrisefall", "textsyllabic",
|
|
|
|
|
"textsubring", 0};
|
2013-02-18 03:50:18 +00:00
|
|
|
|
|
2013-02-18 13:07:13 +00:00
|
|
|
|
/// TIPA tones that need special handling
|
2013-02-18 03:50:18 +00:00
|
|
|
|
char const * const known_tones[] = {"15", "51", "45", "12", "454", 0};
|
|
|
|
|
|
2011-10-27 02:29:03 +00:00
|
|
|
|
// string to store the float type to be able to determine the type of subfloats
|
|
|
|
|
string float_type = "";
|
|
|
|
|
|
2004-06-28 06:53:12 +00:00
|
|
|
|
|
2010-12-12 17:47:36 +00:00
|
|
|
|
/// splits "x=z, y=b" into a map and an ordered keyword vector
|
|
|
|
|
void split_map(string const & s, map<string, string> & res, vector<string> & keys)
|
2003-04-17 09:47:21 +00:00
|
|
|
|
{
|
|
|
|
|
vector<string> v;
|
|
|
|
|
split(s, v);
|
2010-12-12 17:47:36 +00:00
|
|
|
|
res.clear();
|
|
|
|
|
keys.resize(v.size());
|
2003-04-17 09:47:21 +00:00
|
|
|
|
for (size_t i = 0; i < v.size(); ++i) {
|
|
|
|
|
size_t const pos = v[i].find('=');
|
2011-10-16 08:22:20 +00:00
|
|
|
|
string const index = trimSpaceAndEol(v[i].substr(0, pos));
|
|
|
|
|
string const value = trimSpaceAndEol(v[i].substr(pos + 1, string::npos));
|
2010-12-12 17:47:36 +00:00
|
|
|
|
res[index] = value;
|
|
|
|
|
keys[i] = index;
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2003-12-10 08:33:37 +00:00
|
|
|
|
|
|
|
|
|
/*!
|
|
|
|
|
* Split a LaTeX length into value and unit.
|
|
|
|
|
* The latter can be a real unit like "pt", or a latex length variable
|
|
|
|
|
* like "\textwidth". The unit may contain additional stuff like glue
|
|
|
|
|
* lengths, but we don't care, because such lengths are ERT anyway.
|
2005-02-06 09:32:52 +00:00
|
|
|
|
* \returns true if \p value and \p unit are valid.
|
2003-12-10 08:33:37 +00:00
|
|
|
|
*/
|
|
|
|
|
bool splitLatexLength(string const & len, string & value, string & unit)
|
2003-08-07 22:59:53 +00:00
|
|
|
|
{
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (len.empty())
|
|
|
|
|
return false;
|
|
|
|
|
const string::size_type i = len.find_first_not_of(" -+0123456789.,");
|
2003-10-23 11:46:33 +00:00
|
|
|
|
//'4,5' is a valid LaTeX length number. Change it to '4.5'
|
2004-04-30 11:50:54 +00:00
|
|
|
|
string const length = subst(len, ',', '.');
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (i == string::npos)
|
|
|
|
|
return false;
|
2003-10-23 11:46:33 +00:00
|
|
|
|
if (i == 0) {
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (len[0] == '\\') {
|
|
|
|
|
// We had something like \textwidth without a factor
|
|
|
|
|
value = "1.0";
|
|
|
|
|
} else {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
} else {
|
2011-10-16 08:22:20 +00:00
|
|
|
|
value = trimSpaceAndEol(string(length, 0, i));
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (value == "-")
|
|
|
|
|
value = "-1.0";
|
|
|
|
|
// 'cM' is a valid LaTeX length unit. Change it to 'cm'
|
2003-12-19 10:40:07 +00:00
|
|
|
|
if (contains(len, '\\'))
|
2011-10-16 08:22:20 +00:00
|
|
|
|
unit = trimSpaceAndEol(string(len, i));
|
2003-12-10 08:33:37 +00:00
|
|
|
|
else
|
2011-10-16 08:22:20 +00:00
|
|
|
|
unit = ascii_lowercase(trimSpaceAndEol(string(len, i)));
|
2003-12-10 08:33:37 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-06-14 21:29:24 +00:00
|
|
|
|
/// A simple function to translate a latex length to something LyX can
|
2004-06-18 06:47:19 +00:00
|
|
|
|
/// understand. Not perfect, but rather best-effort.
|
2003-12-19 10:40:07 +00:00
|
|
|
|
bool translate_len(string const & length, string & valstring, string & unit)
|
2003-12-10 08:33:37 +00:00
|
|
|
|
{
|
|
|
|
|
if (!splitLatexLength(length, valstring, unit))
|
2003-12-19 10:40:07 +00:00
|
|
|
|
return false;
|
2003-12-10 08:33:37 +00:00
|
|
|
|
// LyX uses percent values
|
|
|
|
|
double value;
|
|
|
|
|
istringstream iss(valstring);
|
|
|
|
|
iss >> value;
|
|
|
|
|
value *= 100;
|
2003-08-07 22:59:53 +00:00
|
|
|
|
ostringstream oss;
|
2003-12-10 08:33:37 +00:00
|
|
|
|
oss << value;
|
|
|
|
|
string const percentval = oss.str();
|
|
|
|
|
// a normal length
|
|
|
|
|
if (unit.empty() || unit[0] != '\\')
|
2003-12-19 10:40:07 +00:00
|
|
|
|
return true;
|
2004-01-07 18:30:14 +00:00
|
|
|
|
string::size_type const i = unit.find(' ');
|
2003-12-10 08:33:37 +00:00
|
|
|
|
string const endlen = (i == string::npos) ? string() : string(unit, i);
|
2003-12-19 10:40:07 +00:00
|
|
|
|
if (unit == "\\textwidth") {
|
|
|
|
|
valstring = percentval;
|
|
|
|
|
unit = "text%" + endlen;
|
|
|
|
|
} else if (unit == "\\columnwidth") {
|
|
|
|
|
valstring = percentval;
|
|
|
|
|
unit = "col%" + endlen;
|
|
|
|
|
} else if (unit == "\\paperwidth") {
|
|
|
|
|
valstring = percentval;
|
|
|
|
|
unit = "page%" + endlen;
|
|
|
|
|
} else if (unit == "\\linewidth") {
|
|
|
|
|
valstring = percentval;
|
|
|
|
|
unit = "line%" + endlen;
|
|
|
|
|
} else if (unit == "\\paperheight") {
|
|
|
|
|
valstring = percentval;
|
|
|
|
|
unit = "pheight%" + endlen;
|
|
|
|
|
} else if (unit == "\\textheight") {
|
|
|
|
|
valstring = percentval;
|
|
|
|
|
unit = "theight%" + endlen;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2006-03-16 15:37:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-12-19 10:40:07 +00:00
|
|
|
|
|
|
|
|
|
string translate_len(string const & length)
|
|
|
|
|
{
|
|
|
|
|
string unit;
|
|
|
|
|
string value;
|
|
|
|
|
if (translate_len(length, value, unit))
|
|
|
|
|
return value + unit;
|
|
|
|
|
// If the input is invalid, return what we have.
|
|
|
|
|
return length;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2006-03-16 15:37:35 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
2003-12-19 10:40:07 +00:00
|
|
|
|
/*!
|
2005-02-06 09:32:52 +00:00
|
|
|
|
* Translates a LaTeX length into \p value, \p unit and
|
|
|
|
|
* \p special parts suitable for a box inset.
|
2003-12-19 10:40:07 +00:00
|
|
|
|
* The difference from translate_len() is that a box inset knows about
|
2005-02-06 09:32:52 +00:00
|
|
|
|
* some special "units" that are stored in \p special.
|
2003-12-19 10:40:07 +00:00
|
|
|
|
*/
|
|
|
|
|
void translate_box_len(string const & length, string & value, string & unit, string & special)
|
|
|
|
|
{
|
|
|
|
|
if (translate_len(length, value, unit)) {
|
|
|
|
|
if (unit == "\\height" || unit == "\\depth" ||
|
|
|
|
|
unit == "\\totalheight" || unit == "\\width") {
|
|
|
|
|
special = unit.substr(1);
|
|
|
|
|
// The unit is not used, but LyX requires a dummy setting
|
|
|
|
|
unit = "in";
|
|
|
|
|
} else
|
|
|
|
|
special = "none";
|
|
|
|
|
} else {
|
|
|
|
|
value.clear();
|
|
|
|
|
unit = length;
|
|
|
|
|
special = "none";
|
|
|
|
|
}
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2004-07-29 17:03:37 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Find a file with basename \p name in path \p path and an extension
|
|
|
|
|
* in \p extensions.
|
|
|
|
|
*/
|
|
|
|
|
string find_file(string const & name, string const & path,
|
2004-10-05 10:11:42 +00:00
|
|
|
|
char const * const * extensions)
|
2004-07-29 17:03:37 +00:00
|
|
|
|
{
|
|
|
|
|
for (char const * const * what = extensions; *what; ++what) {
|
2006-12-27 10:56:11 +00:00
|
|
|
|
string const trial = addExtension(name, *what);
|
2007-10-18 19:29:32 +00:00
|
|
|
|
if (makeAbsPath(trial, path).exists())
|
2004-07-29 17:03:37 +00:00
|
|
|
|
return trial;
|
|
|
|
|
}
|
|
|
|
|
return string();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
void begin_inset(ostream & os, string const & name)
|
|
|
|
|
{
|
|
|
|
|
os << "\n\\begin_inset " << name;
|
|
|
|
|
}
|
|
|
|
|
|
2010-12-17 21:02:39 +00:00
|
|
|
|
|
2009-06-13 15:34:31 +00:00
|
|
|
|
void begin_command_inset(ostream & os, string const & name,
|
2010-12-17 21:02:39 +00:00
|
|
|
|
string const & latexname)
|
2009-06-11 23:46:41 +00:00
|
|
|
|
{
|
2010-12-17 21:02:39 +00:00
|
|
|
|
begin_inset(os, "CommandInset ");
|
|
|
|
|
os << name << "\nLatexCommand " << latexname << '\n';
|
|
|
|
|
}
|
2009-06-11 23:46:41 +00:00
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
void end_inset(ostream & os)
|
|
|
|
|
{
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << "\n\\end_inset\n\n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-12-19 14:54:23 +00:00
|
|
|
|
bool skip_braces(Parser & p)
|
2003-04-23 15:14:43 +00:00
|
|
|
|
{
|
|
|
|
|
if (p.next_token().cat() != catBegin)
|
2010-12-19 14:54:23 +00:00
|
|
|
|
return false;
|
2003-04-23 15:14:43 +00:00
|
|
|
|
p.get_token();
|
|
|
|
|
if (p.next_token().cat() == catEnd) {
|
|
|
|
|
p.get_token();
|
2010-12-19 14:54:23 +00:00
|
|
|
|
return true;
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
p.putback();
|
2010-12-19 14:54:23 +00:00
|
|
|
|
return false;
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-02-18 20:55:50 +00:00
|
|
|
|
/// replace LaTeX commands in \p s from the unicodesymbols file with their
|
2011-04-11 20:11:27 +00:00
|
|
|
|
/// unicode points
|
2011-02-18 20:55:50 +00:00
|
|
|
|
docstring convert_unicodesymbols(docstring s)
|
|
|
|
|
{
|
|
|
|
|
odocstringstream os;
|
|
|
|
|
for (size_t i = 0; i < s.size();) {
|
|
|
|
|
if (s[i] != '\\') {
|
2011-04-11 20:11:27 +00:00
|
|
|
|
os.put(s[i++]);
|
2011-02-18 20:55:50 +00:00
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
s = s.substr(i);
|
2012-03-25 13:36:00 +00:00
|
|
|
|
bool termination;
|
2011-02-18 20:55:50 +00:00
|
|
|
|
docstring rem;
|
2011-12-08 20:05:51 +00:00
|
|
|
|
set<string> req;
|
|
|
|
|
docstring parsed = encodings.fromLaTeXCommand(s,
|
2012-03-25 13:36:00 +00:00
|
|
|
|
Encodings::TEXT_CMD, termination, rem, &req);
|
2012-05-28 22:21:22 +00:00
|
|
|
|
set<string>::const_iterator it = req.begin();
|
|
|
|
|
set<string>::const_iterator en = req.end();
|
|
|
|
|
for (; it != en; ++it)
|
2011-12-08 20:05:51 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2011-02-18 20:55:50 +00:00
|
|
|
|
os << parsed;
|
|
|
|
|
s = rem;
|
2011-02-20 18:57:13 +00:00
|
|
|
|
if (s.empty() || s[0] != '\\')
|
|
|
|
|
i = 0;
|
|
|
|
|
else
|
|
|
|
|
i = 1;
|
2011-02-18 20:55:50 +00:00
|
|
|
|
}
|
|
|
|
|
return os.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/// try to convert \p s to a valid InsetCommand argument
|
|
|
|
|
string convert_command_inset_arg(string s)
|
|
|
|
|
{
|
|
|
|
|
if (isAscii(s))
|
|
|
|
|
// since we don't know the input encoding we can't use from_utf8
|
|
|
|
|
s = to_utf8(convert_unicodesymbols(from_ascii(s)));
|
|
|
|
|
// LyX cannot handle newlines in a latex command
|
|
|
|
|
return subst(s, "\n", " ");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2013-02-04 14:14:30 +00:00
|
|
|
|
void output_ert(ostream & os, string const & s, Context & context)
|
2011-02-18 20:55:50 +00:00
|
|
|
|
{
|
2005-07-26 11:58:43 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
|
|
|
|
|
if (*it == '\\')
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << "\n\\backslash\n";
|
2005-02-06 09:32:52 +00:00
|
|
|
|
else if (*it == '\n') {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
context.check_layout(os);
|
2005-02-06 09:32:52 +00:00
|
|
|
|
} else
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << *it;
|
|
|
|
|
}
|
2013-02-04 14:14:30 +00:00
|
|
|
|
context.check_end_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2013-02-04 14:14:30 +00:00
|
|
|
|
void output_ert_inset(ostream & os, string const & s, Context & context)
|
2003-11-05 10:14:13 +00:00
|
|
|
|
{
|
2013-02-04 14:14:30 +00:00
|
|
|
|
// We must have a valid layout before outputting the ERT inset.
|
|
|
|
|
context.check_layout(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
Context newcontext(true, context.textclass);
|
2012-12-30 10:58:21 +00:00
|
|
|
|
InsetLayout const & layout = context.textclass.insetLayout(from_ascii("ERT"));
|
|
|
|
|
if (layout.forcePlainLayout())
|
|
|
|
|
newcontext.layout = &context.textclass.plainLayout();
|
2003-11-05 10:14:13 +00:00
|
|
|
|
begin_inset(os, "ERT");
|
2004-01-07 14:36:43 +00:00
|
|
|
|
os << "\nstatus collapsed\n";
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert(os, s, newcontext);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-01-23 21:10:20 +00:00
|
|
|
|
Layout const * findLayout(TextClass const & textclass, string const & name, bool command)
|
2003-07-27 00:39:35 +00:00
|
|
|
|
{
|
2011-01-23 21:10:20 +00:00
|
|
|
|
Layout const * layout = findLayoutWithoutModule(textclass, name, command);
|
|
|
|
|
if (layout)
|
|
|
|
|
return layout;
|
|
|
|
|
if (checkModule(name, command))
|
|
|
|
|
return findLayoutWithoutModule(textclass, name, command);
|
|
|
|
|
return layout;
|
2003-07-27 00:39:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-01-07 21:04:16 +00:00
|
|
|
|
InsetLayout const * findInsetLayout(TextClass const & textclass, string const & name, bool command)
|
|
|
|
|
{
|
2011-01-23 21:10:20 +00:00
|
|
|
|
InsetLayout const * insetlayout = findInsetLayoutWithoutModule(textclass, name, command);
|
|
|
|
|
if (insetlayout)
|
|
|
|
|
return insetlayout;
|
|
|
|
|
if (checkModule(name, command))
|
|
|
|
|
return findInsetLayoutWithoutModule(textclass, name, command);
|
|
|
|
|
return insetlayout;
|
2011-01-07 21:04:16 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2004-06-18 06:47:19 +00:00
|
|
|
|
void eat_whitespace(Parser &, ostream &, Context &, bool);
|
|
|
|
|
|
|
|
|
|
|
2010-12-19 14:54:23 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Skips whitespace and braces.
|
|
|
|
|
* This should be called after a command has been parsed that is not put into
|
|
|
|
|
* ERT, and where LyX adds "{}" if needed.
|
|
|
|
|
*/
|
2011-01-02 15:39:48 +00:00
|
|
|
|
void skip_spaces_braces(Parser & p, bool keepws = false)
|
2010-12-19 14:54:23 +00:00
|
|
|
|
{
|
|
|
|
|
/* The following four examples produce the same typeset output and
|
|
|
|
|
should be handled by this function:
|
|
|
|
|
- abc \j{} xyz
|
|
|
|
|
- abc \j {} xyz
|
2011-12-14 01:08:54 +00:00
|
|
|
|
- abc \j
|
2010-12-19 14:54:23 +00:00
|
|
|
|
{} xyz
|
|
|
|
|
- abc \j %comment
|
|
|
|
|
{} xyz
|
|
|
|
|
*/
|
|
|
|
|
// Unfortunately we need to skip comments, too.
|
|
|
|
|
// We can't use eat_whitespace since writing them after the {}
|
|
|
|
|
// results in different output in some cases.
|
|
|
|
|
bool const skipped_spaces = p.skip_spaces(true);
|
|
|
|
|
bool const skipped_braces = skip_braces(p);
|
2011-01-02 15:39:48 +00:00
|
|
|
|
if (keepws && skipped_spaces && !skipped_braces)
|
2010-12-19 14:54:23 +00:00
|
|
|
|
// put back the space (it is better handled by check_space)
|
|
|
|
|
p.unskip_spaces(true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
void output_command_layout(ostream & os, Parser & p, bool outer,
|
|
|
|
|
Context & parent_context,
|
2008-04-05 19:01:43 +00:00
|
|
|
|
Layout const * newlayout)
|
2003-07-27 00:39:35 +00:00
|
|
|
|
{
|
2011-01-07 19:58:31 +00:00
|
|
|
|
TeXFont const oldFont = parent_context.font;
|
|
|
|
|
// save the current font size
|
|
|
|
|
string const size = oldFont.size;
|
|
|
|
|
// reset the font size to default, because the font size switches
|
|
|
|
|
// don't affect section headings and the like
|
|
|
|
|
parent_context.font.size = Context::normalfont.size;
|
|
|
|
|
// we only need to write the font change if we have an open layout
|
|
|
|
|
if (!parent_context.atParagraphStart())
|
|
|
|
|
output_font_change(os, oldFont, parent_context.font);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
Context context(true, parent_context.textclass, newlayout,
|
2004-06-28 06:53:12 +00:00
|
|
|
|
parent_context.layout, parent_context.font);
|
2004-06-23 15:11:23 +00:00
|
|
|
|
if (parent_context.deeper_paragraph) {
|
|
|
|
|
// We are beginning a nested environment after a
|
|
|
|
|
// deeper paragraph inside the outer list environment.
|
|
|
|
|
// Therefore we don't need to output a "begin deeper".
|
|
|
|
|
context.need_end_deeper = true;
|
|
|
|
|
}
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_deeper(os);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2012-11-19 14:50:43 +00:00
|
|
|
|
// FIXME: Adjust to format 446!
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// Since format 446, layouts do not require anymore all optional
|
|
|
|
|
// arguments before the required ones. Needs to be implemented!
|
2012-11-19 13:21:02 +00:00
|
|
|
|
int optargs = 0;
|
|
|
|
|
while (optargs < context.layout->optArgs()) {
|
2004-06-18 06:47:19 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
2011-01-05 20:32:45 +00:00
|
|
|
|
if (p.next_token().cat() == catEscape ||
|
2011-12-14 01:08:54 +00:00
|
|
|
|
p.next_token().character() != '[')
|
2010-06-09 15:08:04 +00:00
|
|
|
|
break;
|
|
|
|
|
p.get_token(); // eat '['
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// FIXME: Just a workaround. InsetArgument::updateBuffer
|
|
|
|
|
// will compute a proper ID for all "999" Arguments
|
2012-11-20 12:30:52 +00:00
|
|
|
|
// (which is also what lyx2lyx produces).
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// However, tex2lyx should be able to output proper IDs
|
|
|
|
|
// itself.
|
2012-11-19 14:50:43 +00:00
|
|
|
|
begin_inset(os, "Argument 999\n");
|
2010-06-09 15:08:04 +00:00
|
|
|
|
os << "status collapsed\n\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
2010-11-25 16:53:56 +00:00
|
|
|
|
++optargs;
|
2010-06-04 21:50:08 +00:00
|
|
|
|
}
|
2012-11-19 13:21:02 +00:00
|
|
|
|
int reqargs = 0;
|
|
|
|
|
while (reqargs < context.layout->requiredArgs()) {
|
2010-06-04 21:50:08 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
2011-01-05 20:32:45 +00:00
|
|
|
|
if (p.next_token().cat() != catBegin)
|
2010-06-09 15:08:04 +00:00
|
|
|
|
break;
|
|
|
|
|
p.get_token(); // eat '{'
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// FIXME: Just a workaround. InsetArgument::updateBuffer
|
|
|
|
|
// will compute a proper ID for all "999" Arguments
|
2012-11-20 12:30:52 +00:00
|
|
|
|
// (which is also what lyx2lyx produces).
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// However, tex2lyx should be able to output proper IDs
|
|
|
|
|
// itself.
|
2012-11-19 14:50:43 +00:00
|
|
|
|
begin_inset(os, "Argument 999\n");
|
2010-06-09 15:08:04 +00:00
|
|
|
|
os << "status collapsed\n\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_BRACE_LAST, outer, context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
2010-11-25 16:53:56 +00:00
|
|
|
|
++reqargs;
|
2003-07-27 00:39:35 +00:00
|
|
|
|
}
|
2005-03-07 12:30:44 +00:00
|
|
|
|
parse_text(p, os, FLAG_ITEM, outer, context);
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_end_layout(os);
|
2004-06-23 15:11:23 +00:00
|
|
|
|
if (parent_context.deeper_paragraph) {
|
|
|
|
|
// We must suppress the "end deeper" because we
|
|
|
|
|
// suppressed the "begin deeper" above.
|
|
|
|
|
context.need_end_deeper = false;
|
|
|
|
|
}
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_end_deeper(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
parent_context.new_paragraph(os);
|
2011-01-07 19:58:31 +00:00
|
|
|
|
// Set the font size to the original value. No need to output it here
|
|
|
|
|
// (Context::begin_layout() will do that if needed)
|
|
|
|
|
parent_context.font.size = size;
|
2003-07-27 00:39:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Output a space if necessary.
|
|
|
|
|
* This function gets called for every whitespace token.
|
|
|
|
|
*
|
|
|
|
|
* We have three cases here:
|
|
|
|
|
* 1. A space must be suppressed. Example: The lyxcode case below
|
|
|
|
|
* 2. A space may be suppressed. Example: Spaces before "\par"
|
|
|
|
|
* 3. A space must not be suppressed. Example: A space between two words
|
|
|
|
|
*
|
|
|
|
|
* We currently handle only 1. and 3 and from 2. only the case of
|
|
|
|
|
* spaces before newlines as a side effect.
|
|
|
|
|
*
|
|
|
|
|
* 2. could be used to suppress as many spaces as possible. This has two effects:
|
|
|
|
|
* - Reimporting LyX generated LaTeX files changes almost no whitespace
|
|
|
|
|
* - Superflous whitespace from non LyX generated LaTeX files is removed.
|
|
|
|
|
* The drawback is that the logic inside the function becomes
|
|
|
|
|
* complicated, and that is the reason why it is not implemented.
|
|
|
|
|
*/
|
2008-11-15 20:30:45 +00:00
|
|
|
|
void check_space(Parser & p, ostream & os, Context & context)
|
2003-11-05 10:14:13 +00:00
|
|
|
|
{
|
|
|
|
|
Token const next = p.next_token();
|
|
|
|
|
Token const curr = p.curr_token();
|
|
|
|
|
// A space before a single newline and vice versa must be ignored
|
|
|
|
|
// LyX emits a newline before \end{lyxcode}.
|
|
|
|
|
// This newline must be ignored,
|
|
|
|
|
// otherwise LyX will add an additional protected space.
|
|
|
|
|
if (next.cat() == catSpace ||
|
|
|
|
|
next.cat() == catNewline ||
|
|
|
|
|
(next.cs() == "end" && context.layout->free_spacing && curr.cat() == catNewline)) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << ' ';
|
|
|
|
|
}
|
|
|
|
|
|
2003-11-19 10:35:50 +00:00
|
|
|
|
|
2005-07-14 15:19:01 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Parse all arguments of \p command
|
|
|
|
|
*/
|
|
|
|
|
void parse_arguments(string const & command,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
vector<ArgumentType> const & template_arguments,
|
|
|
|
|
Parser & p, ostream & os, bool outer, Context & context)
|
2005-07-14 15:19:01 +00:00
|
|
|
|
{
|
|
|
|
|
string ert = command;
|
|
|
|
|
size_t no_arguments = template_arguments.size();
|
|
|
|
|
for (size_t i = 0; i < no_arguments; ++i) {
|
|
|
|
|
switch (template_arguments[i]) {
|
|
|
|
|
case required:
|
2011-11-13 16:11:45 +00:00
|
|
|
|
case req_group:
|
2005-07-14 15:19:01 +00:00
|
|
|
|
// This argument contains regular LaTeX
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, ert + '{', context);
|
2006-04-27 06:56:04 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
2011-11-13 16:11:45 +00:00
|
|
|
|
if (template_arguments[i] == required)
|
|
|
|
|
parse_text(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
else
|
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2005-07-14 15:19:01 +00:00
|
|
|
|
ert = "}";
|
|
|
|
|
break;
|
2010-12-27 20:15:24 +00:00
|
|
|
|
case item:
|
|
|
|
|
// This argument consists only of a single item.
|
|
|
|
|
// The presence of '{' or not must be preserved.
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
if (p.next_token().cat() == catBegin)
|
|
|
|
|
ert += '{' + p.verbatim_item() + '}';
|
|
|
|
|
else
|
|
|
|
|
ert += p.verbatim_item();
|
|
|
|
|
break;
|
2011-11-13 11:29:48 +00:00
|
|
|
|
case displaymath:
|
2005-07-14 15:19:01 +00:00
|
|
|
|
case verbatim:
|
|
|
|
|
// This argument may contain special characters
|
|
|
|
|
ert += '{' + p.verbatim_item() + '}';
|
|
|
|
|
break;
|
|
|
|
|
case optional:
|
2011-11-13 16:11:45 +00:00
|
|
|
|
case opt_group:
|
2010-12-19 14:54:23 +00:00
|
|
|
|
// true because we must not eat whitespace
|
2011-10-16 19:57:10 +00:00
|
|
|
|
// if an optional arg follows we must not strip the
|
2011-01-28 20:29:06 +00:00
|
|
|
|
// brackets from this one
|
|
|
|
|
if (i < no_arguments - 1 &&
|
|
|
|
|
template_arguments[i+1] == optional)
|
|
|
|
|
ert += p.getFullOpt(true);
|
|
|
|
|
else
|
|
|
|
|
ert += p.getOpt(true);
|
2005-07-14 15:19:01 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, ert, context);
|
2005-07-14 15:19:01 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-11-19 10:35:50 +00:00
|
|
|
|
/*!
|
2004-06-18 06:47:19 +00:00
|
|
|
|
* Check whether \p command is a known command. If yes,
|
2003-11-19 10:35:50 +00:00
|
|
|
|
* handle the command with all arguments.
|
|
|
|
|
* \return true if the command was parsed, false otherwise.
|
|
|
|
|
*/
|
|
|
|
|
bool parse_command(string const & command, Parser & p, ostream & os,
|
2004-10-05 10:11:42 +00:00
|
|
|
|
bool outer, Context & context)
|
2003-11-19 10:35:50 +00:00
|
|
|
|
{
|
|
|
|
|
if (known_commands.find(command) != known_commands.end()) {
|
2005-07-14 15:19:01 +00:00
|
|
|
|
parse_arguments(command, known_commands[command], p, os,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2003-11-19 10:35:50 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-12-19 10:40:07 +00:00
|
|
|
|
/// Parses a minipage or parbox
|
2011-01-21 18:29:10 +00:00
|
|
|
|
void parse_box(Parser & p, ostream & os, unsigned outer_flags,
|
|
|
|
|
unsigned inner_flags, bool outer, Context & parent_context,
|
|
|
|
|
string const & outer_type, string const & special,
|
|
|
|
|
string const & inner_type)
|
2003-12-19 10:40:07 +00:00
|
|
|
|
{
|
|
|
|
|
string position;
|
|
|
|
|
string inner_pos;
|
2011-01-21 18:29:10 +00:00
|
|
|
|
string hor_pos = "c";
|
2007-06-05 07:01:47 +00:00
|
|
|
|
// We need to set the height to the LaTeX default of 1\\totalheight
|
|
|
|
|
// for the case when no height argument is given
|
|
|
|
|
string height_value = "1";
|
|
|
|
|
string height_unit = "in";
|
|
|
|
|
string height_special = "totalheight";
|
2003-12-19 10:40:07 +00:00
|
|
|
|
string latex_height;
|
2011-11-06 22:42:05 +00:00
|
|
|
|
string width_value;
|
|
|
|
|
string width_unit;
|
|
|
|
|
string latex_width;
|
|
|
|
|
string width_special = "none";
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (!inner_type.empty() && p.hasOpt()) {
|
2011-11-06 22:42:05 +00:00
|
|
|
|
if (inner_type != "makebox")
|
|
|
|
|
position = p.getArg('[', ']');
|
|
|
|
|
else {
|
|
|
|
|
latex_width = p.getArg('[', ']');
|
|
|
|
|
translate_box_len(latex_width, width_value, width_unit, width_special);
|
|
|
|
|
position = "t";
|
|
|
|
|
}
|
2003-12-19 10:40:07 +00:00
|
|
|
|
if (position != "t" && position != "c" && position != "b") {
|
2011-01-21 18:29:10 +00:00
|
|
|
|
cerr << "invalid position " << position << " for "
|
|
|
|
|
<< inner_type << endl;
|
2003-12-19 10:40:07 +00:00
|
|
|
|
position = "c";
|
|
|
|
|
}
|
2011-01-02 15:39:48 +00:00
|
|
|
|
if (p.hasOpt()) {
|
2011-11-06 22:42:05 +00:00
|
|
|
|
if (inner_type != "makebox") {
|
|
|
|
|
latex_height = p.getArg('[', ']');
|
|
|
|
|
translate_box_len(latex_height, height_value, height_unit, height_special);
|
2012-10-06 13:23:36 +00:00
|
|
|
|
} else {
|
|
|
|
|
string const opt = p.getArg('[', ']');
|
|
|
|
|
if (!opt.empty()) {
|
|
|
|
|
hor_pos = opt;
|
|
|
|
|
if (hor_pos != "l" && hor_pos != "c" &&
|
|
|
|
|
hor_pos != "r" && hor_pos != "s") {
|
|
|
|
|
cerr << "invalid hor_pos " << hor_pos
|
|
|
|
|
<< " for " << inner_type << endl;
|
|
|
|
|
hor_pos = "c";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2003-12-19 10:40:07 +00:00
|
|
|
|
|
2011-01-02 15:39:48 +00:00
|
|
|
|
if (p.hasOpt()) {
|
2003-12-19 10:40:07 +00:00
|
|
|
|
inner_pos = p.getArg('[', ']');
|
|
|
|
|
if (inner_pos != "c" && inner_pos != "t" &&
|
|
|
|
|
inner_pos != "b" && inner_pos != "s") {
|
2011-01-21 18:29:10 +00:00
|
|
|
|
cerr << "invalid inner_pos "
|
|
|
|
|
<< inner_pos << " for "
|
|
|
|
|
<< inner_type << endl;
|
2003-12-19 10:40:07 +00:00
|
|
|
|
inner_pos = position;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (inner_type.empty()) {
|
2011-11-06 22:42:05 +00:00
|
|
|
|
if (special.empty() && outer_type != "framebox")
|
|
|
|
|
latex_width = "1\\columnwidth";
|
2011-01-21 18:29:10 +00:00
|
|
|
|
else {
|
|
|
|
|
Parser p2(special);
|
2011-01-28 20:29:06 +00:00
|
|
|
|
latex_width = p2.getArg('[', ']');
|
|
|
|
|
string const opt = p2.getArg('[', ']');
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (!opt.empty()) {
|
|
|
|
|
hor_pos = opt;
|
|
|
|
|
if (hor_pos != "l" && hor_pos != "c" &&
|
2012-10-06 13:23:36 +00:00
|
|
|
|
hor_pos != "r" && hor_pos != "s") {
|
2011-01-21 18:29:10 +00:00
|
|
|
|
cerr << "invalid hor_pos " << hor_pos
|
|
|
|
|
<< " for " << outer_type << endl;
|
|
|
|
|
hor_pos = "c";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2011-11-06 22:42:05 +00:00
|
|
|
|
} else if (inner_type != "makebox")
|
2011-01-21 18:29:10 +00:00
|
|
|
|
latex_width = p.verbatim_item();
|
2011-11-06 22:42:05 +00:00
|
|
|
|
// if e.g. only \ovalbox{content} was used, set the width to 1\columnwidth
|
|
|
|
|
// as this is LyX's standard for such cases (except for makebox)
|
2011-11-06 22:46:30 +00:00
|
|
|
|
// \framebox is more special and handled below
|
2011-11-06 22:42:05 +00:00
|
|
|
|
if (latex_width.empty() && inner_type != "makebox"
|
|
|
|
|
&& outer_type != "framebox")
|
|
|
|
|
latex_width = "1\\columnwidth";
|
|
|
|
|
|
2003-12-19 10:40:07 +00:00
|
|
|
|
translate_len(latex_width, width_value, width_unit);
|
2011-11-06 22:42:05 +00:00
|
|
|
|
|
2011-11-06 15:06:19 +00:00
|
|
|
|
bool shadedparbox = false;
|
2011-11-04 21:03:51 +00:00
|
|
|
|
if (inner_type == "shaded") {
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2011-11-06 15:06:19 +00:00
|
|
|
|
if (outer_type == "parbox") {
|
|
|
|
|
// Eat '{'
|
|
|
|
|
if (p.next_token().cat() == catBegin)
|
|
|
|
|
p.get_token();
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
shadedparbox = true;
|
|
|
|
|
}
|
2011-11-04 21:03:51 +00:00
|
|
|
|
p.get_token();
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
}
|
|
|
|
|
// If we already read the inner box we have to push the inner env
|
|
|
|
|
if (!outer_type.empty() && !inner_type.empty() &&
|
|
|
|
|
(inner_flags & FLAG_END))
|
|
|
|
|
active_environments.push_back(inner_type);
|
2011-01-21 18:29:10 +00:00
|
|
|
|
// LyX can't handle length variables
|
|
|
|
|
bool use_ert = contains(width_unit, '\\') || contains(height_unit, '\\');
|
|
|
|
|
if (!use_ert && !outer_type.empty() && !inner_type.empty()) {
|
|
|
|
|
// Look whether there is some content after the end of the
|
|
|
|
|
// inner box, but before the end of the outer box.
|
|
|
|
|
// If yes, we need to output ERT.
|
|
|
|
|
p.pushPosition();
|
|
|
|
|
if (inner_flags & FLAG_END)
|
2013-01-25 11:48:52 +00:00
|
|
|
|
p.ertEnvironment(inner_type);
|
2003-12-19 10:40:07 +00:00
|
|
|
|
else
|
2011-01-21 18:29:10 +00:00
|
|
|
|
p.verbatim_item();
|
|
|
|
|
p.skip_spaces(true);
|
2011-11-04 21:03:51 +00:00
|
|
|
|
bool const outer_env(outer_type == "framed" || outer_type == "minipage");
|
|
|
|
|
if ((outer_env && p.next_token().asInput() != "\\end") ||
|
|
|
|
|
(!outer_env && p.next_token().cat() != catEnd)) {
|
2011-01-21 18:29:10 +00:00
|
|
|
|
// something is between the end of the inner box and
|
|
|
|
|
// the end of the outer box, so we need to use ERT.
|
|
|
|
|
use_ert = true;
|
|
|
|
|
}
|
|
|
|
|
p.popPosition();
|
|
|
|
|
}
|
2011-11-06 22:46:30 +00:00
|
|
|
|
// if only \makebox{content} was used we can set its width to 1\width
|
2011-11-06 22:42:05 +00:00
|
|
|
|
// because this identic and also identic to \mbox
|
|
|
|
|
// this doesn't work for \framebox{content}, thus we have to use ERT for this
|
|
|
|
|
if (latex_width.empty() && inner_type == "makebox") {
|
|
|
|
|
width_value = "1";
|
|
|
|
|
width_unit = "in";
|
|
|
|
|
width_special = "width";
|
|
|
|
|
} else if (latex_width.empty() && outer_type == "framebox") {
|
|
|
|
|
use_ert = true;
|
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (use_ert) {
|
|
|
|
|
ostringstream ss;
|
|
|
|
|
if (!outer_type.empty()) {
|
|
|
|
|
if (outer_flags & FLAG_END)
|
|
|
|
|
ss << "\\begin{" << outer_type << '}';
|
|
|
|
|
else {
|
|
|
|
|
ss << '\\' << outer_type << '{';
|
|
|
|
|
if (!special.empty())
|
|
|
|
|
ss << special;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!inner_type.empty()) {
|
2011-11-04 21:03:51 +00:00
|
|
|
|
if (inner_type != "shaded") {
|
|
|
|
|
if (inner_flags & FLAG_END)
|
|
|
|
|
ss << "\\begin{" << inner_type << '}';
|
|
|
|
|
else
|
|
|
|
|
ss << '\\' << inner_type;
|
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (!position.empty())
|
|
|
|
|
ss << '[' << position << ']';
|
|
|
|
|
if (!latex_height.empty())
|
|
|
|
|
ss << '[' << latex_height << ']';
|
|
|
|
|
if (!inner_pos.empty())
|
|
|
|
|
ss << '[' << inner_pos << ']';
|
|
|
|
|
ss << '{' << latex_width << '}';
|
|
|
|
|
if (!(inner_flags & FLAG_END))
|
|
|
|
|
ss << '{';
|
|
|
|
|
}
|
2011-11-04 21:03:51 +00:00
|
|
|
|
if (inner_type == "shaded")
|
|
|
|
|
ss << "\\begin{shaded}";
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, ss.str(), parent_context);
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (!inner_type.empty()) {
|
|
|
|
|
parse_text(p, os, inner_flags, outer, parent_context);
|
|
|
|
|
if (inner_flags & FLAG_END)
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\end{" + inner_type + '}',
|
2011-01-21 18:29:10 +00:00
|
|
|
|
parent_context);
|
|
|
|
|
else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", parent_context);
|
2011-01-21 18:29:10 +00:00
|
|
|
|
}
|
|
|
|
|
if (!outer_type.empty()) {
|
2011-11-04 21:03:51 +00:00
|
|
|
|
// If we already read the inner box we have to pop
|
|
|
|
|
// the inner env
|
|
|
|
|
if (!inner_type.empty() && (inner_flags & FLAG_END))
|
|
|
|
|
active_environments.pop_back();
|
2011-12-14 01:08:54 +00:00
|
|
|
|
|
2011-11-12 18:41:44 +00:00
|
|
|
|
// Ensure that the end of the outer box is parsed correctly:
|
|
|
|
|
// The opening brace has been eaten by parse_outer_box()
|
|
|
|
|
if (!outer_type.empty() && (outer_flags & FLAG_ITEM)) {
|
|
|
|
|
outer_flags &= ~FLAG_ITEM;
|
|
|
|
|
outer_flags |= FLAG_BRACE_LAST;
|
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
parse_text(p, os, outer_flags, outer, parent_context);
|
|
|
|
|
if (outer_flags & FLAG_END)
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\end{" + outer_type + '}',
|
2011-01-21 18:29:10 +00:00
|
|
|
|
parent_context);
|
2011-11-06 22:42:05 +00:00
|
|
|
|
else if (inner_type.empty() && outer_type == "framebox")
|
|
|
|
|
// in this case it is already closed later
|
|
|
|
|
;
|
2011-01-21 18:29:10 +00:00
|
|
|
|
else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", parent_context);
|
2011-01-21 18:29:10 +00:00
|
|
|
|
}
|
2003-12-19 10:40:07 +00:00
|
|
|
|
} else {
|
|
|
|
|
// LyX does not like empty positions, so we have
|
|
|
|
|
// to set them to the LaTeX default values here.
|
|
|
|
|
if (position.empty())
|
|
|
|
|
position = "c";
|
|
|
|
|
if (inner_pos.empty())
|
|
|
|
|
inner_pos = position;
|
|
|
|
|
parent_context.check_layout(os);
|
2011-01-21 18:29:10 +00:00
|
|
|
|
begin_inset(os, "Box ");
|
|
|
|
|
if (outer_type == "framed")
|
|
|
|
|
os << "Framed\n";
|
2013-03-22 00:33:58 +00:00
|
|
|
|
else if (outer_type == "framebox" || outer_type == "fbox")
|
2011-01-21 18:29:10 +00:00
|
|
|
|
os << "Boxed\n";
|
|
|
|
|
else if (outer_type == "shadowbox")
|
|
|
|
|
os << "Shadowbox\n";
|
2011-11-04 21:03:51 +00:00
|
|
|
|
else if ((outer_type == "shaded" && inner_type.empty()) ||
|
2011-11-06 15:06:19 +00:00
|
|
|
|
(outer_type == "minipage" && inner_type == "shaded") ||
|
|
|
|
|
(outer_type == "parbox" && inner_type == "shaded")) {
|
2011-01-21 18:29:10 +00:00
|
|
|
|
os << "Shaded\n";
|
2011-10-30 18:12:49 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("color");
|
|
|
|
|
} else if (outer_type == "doublebox")
|
2011-01-21 18:29:10 +00:00
|
|
|
|
os << "Doublebox\n";
|
2013-03-22 00:33:58 +00:00
|
|
|
|
else if (outer_type.empty() || outer_type == "mbox")
|
2011-01-21 18:29:10 +00:00
|
|
|
|
os << "Frameless\n";
|
|
|
|
|
else
|
|
|
|
|
os << outer_type << '\n';
|
2003-12-19 10:40:07 +00:00
|
|
|
|
os << "position \"" << position << "\"\n";
|
2011-01-21 18:29:10 +00:00
|
|
|
|
os << "hor_pos \"" << hor_pos << "\"\n";
|
2013-03-22 00:33:58 +00:00
|
|
|
|
if (outer_type == "mbox")
|
|
|
|
|
os << "has_inner_box 1\n";
|
|
|
|
|
else
|
|
|
|
|
os << "has_inner_box " << !inner_type.empty() << "\n";
|
2003-12-19 10:40:07 +00:00
|
|
|
|
os << "inner_pos \"" << inner_pos << "\"\n";
|
2011-11-06 15:06:19 +00:00
|
|
|
|
os << "use_parbox " << (inner_type == "parbox" || shadedparbox)
|
2011-11-06 22:42:05 +00:00
|
|
|
|
<< '\n';
|
2013-03-22 00:33:58 +00:00
|
|
|
|
if (outer_type == "mbox")
|
|
|
|
|
os << "use_makebox 1\n";
|
|
|
|
|
else
|
|
|
|
|
os << "use_makebox " << (inner_type == "makebox") << '\n';
|
|
|
|
|
if (outer_type == "fbox" || outer_type == "mbox")
|
|
|
|
|
os << "width \"-999col%\"\n";
|
|
|
|
|
else
|
|
|
|
|
os << "width \"" << width_value << width_unit << "\"\n";
|
2011-11-06 22:42:05 +00:00
|
|
|
|
os << "special \"" << width_special << "\"\n";
|
2003-12-19 10:40:07 +00:00
|
|
|
|
os << "height \"" << height_value << height_unit << "\"\n";
|
|
|
|
|
os << "height_special \"" << height_special << "\"\n";
|
|
|
|
|
os << "status open\n\n";
|
2011-01-21 18:29:10 +00:00
|
|
|
|
|
2011-10-31 21:31:32 +00:00
|
|
|
|
// Unfortunately we can't use parse_text_in_inset:
|
|
|
|
|
// InsetBox::forcePlainLayout() is hard coded and does not
|
|
|
|
|
// use the inset layout. Apart from that do we call parse_text
|
|
|
|
|
// up to two times, but need only one check_end_layout.
|
|
|
|
|
bool const forcePlainLayout =
|
2011-11-06 22:42:05 +00:00
|
|
|
|
(!inner_type.empty() || inner_type == "makebox") &&
|
2011-10-31 21:31:32 +00:00
|
|
|
|
outer_type != "shaded" && outer_type != "framed";
|
|
|
|
|
Context context(true, parent_context.textclass);
|
|
|
|
|
if (forcePlainLayout)
|
|
|
|
|
context.layout = &context.textclass.plainLayout();
|
|
|
|
|
else
|
|
|
|
|
context.font = parent_context.font;
|
2011-10-24 22:39:54 +00:00
|
|
|
|
|
2011-10-31 21:31:32 +00:00
|
|
|
|
// If we have no inner box the contents will be read with the outer box
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (!inner_type.empty())
|
|
|
|
|
parse_text(p, os, inner_flags, outer, context);
|
|
|
|
|
|
|
|
|
|
// Ensure that the end of the outer box is parsed correctly:
|
|
|
|
|
// The opening brace has been eaten by parse_outer_box()
|
|
|
|
|
if (!outer_type.empty() && (outer_flags & FLAG_ITEM)) {
|
|
|
|
|
outer_flags &= ~FLAG_ITEM;
|
|
|
|
|
outer_flags |= FLAG_BRACE_LAST;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Find end of outer box, output contents if inner_type is
|
|
|
|
|
// empty and output possible comments
|
|
|
|
|
if (!outer_type.empty()) {
|
2011-11-04 21:03:51 +00:00
|
|
|
|
// If we already read the inner box we have to pop
|
|
|
|
|
// the inner env
|
|
|
|
|
if (!inner_type.empty() && (inner_flags & FLAG_END))
|
|
|
|
|
active_environments.pop_back();
|
2011-01-21 18:29:10 +00:00
|
|
|
|
// This does not output anything but comments if
|
|
|
|
|
// inner_type is not empty (see use_ert)
|
|
|
|
|
parse_text(p, os, outer_flags, outer, context);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
context.check_end_layout(os);
|
2003-12-19 10:40:07 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
#ifdef PRESERVE_LAYOUT
|
2009-06-14 21:29:24 +00:00
|
|
|
|
// LyX puts a % after the end of the minipage
|
2003-12-19 10:40:07 +00:00
|
|
|
|
if (p.next_token().cat() == catNewline && p.next_token().cs().size() > 1) {
|
|
|
|
|
// new paragraph
|
2013-02-04 14:14:30 +00:00
|
|
|
|
//output_ert_inset(os, "%dummy", parent_context);
|
2003-12-19 10:40:07 +00:00
|
|
|
|
p.get_token();
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
}
|
|
|
|
|
else if (p.next_token().cat() == catSpace || p.next_token().cat() == catNewline) {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
//output_ert_inset(os, "%dummy", parent_context);
|
2003-12-19 10:40:07 +00:00
|
|
|
|
p.get_token();
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
// We add a protected space if something real follows
|
|
|
|
|
if (p.good() && p.next_token().cat() != catComment) {
|
2010-12-27 18:14:55 +00:00
|
|
|
|
begin_inset(os, "space ~\n");
|
2010-12-19 20:23:55 +00:00
|
|
|
|
end_inset(os);
|
2003-12-19 10:40:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-01-21 18:29:10 +00:00
|
|
|
|
void parse_outer_box(Parser & p, ostream & os, unsigned flags, bool outer,
|
|
|
|
|
Context & parent_context, string const & outer_type,
|
|
|
|
|
string const & special)
|
|
|
|
|
{
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
if (flags & FLAG_ITEM) {
|
|
|
|
|
// Eat '{'
|
|
|
|
|
if (p.next_token().cat() == catBegin)
|
|
|
|
|
p.get_token();
|
|
|
|
|
else
|
|
|
|
|
cerr << "Warning: Ignoring missing '{' after \\"
|
|
|
|
|
<< outer_type << '.' << endl;
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
}
|
|
|
|
|
string inner;
|
|
|
|
|
unsigned int inner_flags = 0;
|
2011-11-04 21:03:51 +00:00
|
|
|
|
p.pushPosition();
|
2011-11-06 15:06:19 +00:00
|
|
|
|
if (outer_type == "minipage" || outer_type == "parbox") {
|
2011-11-04 21:03:51 +00:00
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
while (p.hasOpt()) {
|
|
|
|
|
p.getArg('[', ']');
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
}
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
p.skip_spaces(true);
|
2011-11-06 15:06:19 +00:00
|
|
|
|
if (outer_type == "parbox") {
|
|
|
|
|
// Eat '{'
|
|
|
|
|
if (p.next_token().cat() == catBegin)
|
|
|
|
|
p.get_token();
|
2011-11-06 17:16:54 +00:00
|
|
|
|
p.skip_spaces(true);
|
2011-11-06 15:06:19 +00:00
|
|
|
|
}
|
2011-11-04 21:03:51 +00:00
|
|
|
|
}
|
2013-03-22 00:33:58 +00:00
|
|
|
|
if (outer_type == "shaded" || outer_type == "fbox"
|
|
|
|
|
|| outer_type == "mbox") {
|
2011-01-21 18:29:10 +00:00
|
|
|
|
// These boxes never have an inner box
|
|
|
|
|
;
|
|
|
|
|
} else if (p.next_token().asInput() == "\\parbox") {
|
|
|
|
|
inner = p.get_token().cs();
|
|
|
|
|
inner_flags = FLAG_ITEM;
|
|
|
|
|
} else if (p.next_token().asInput() == "\\begin") {
|
2011-11-06 15:06:19 +00:00
|
|
|
|
// Is this a minipage or shaded box?
|
2011-01-21 18:29:10 +00:00
|
|
|
|
p.pushPosition();
|
|
|
|
|
p.get_token();
|
|
|
|
|
inner = p.getArg('{', '}');
|
|
|
|
|
p.popPosition();
|
2011-11-04 21:03:51 +00:00
|
|
|
|
if (inner == "minipage" || inner == "shaded")
|
2011-01-21 18:29:10 +00:00
|
|
|
|
inner_flags = FLAG_END;
|
2011-11-04 21:03:51 +00:00
|
|
|
|
else
|
2011-01-21 18:29:10 +00:00
|
|
|
|
inner = "";
|
|
|
|
|
}
|
2011-11-04 21:03:51 +00:00
|
|
|
|
p.popPosition();
|
2011-01-21 18:29:10 +00:00
|
|
|
|
if (inner_flags == FLAG_END) {
|
2011-11-04 21:03:51 +00:00
|
|
|
|
if (inner != "shaded")
|
|
|
|
|
{
|
|
|
|
|
p.get_token();
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
parse_box(p, os, flags, FLAG_END, outer, parent_context,
|
|
|
|
|
outer_type, special, inner);
|
|
|
|
|
} else {
|
2011-11-04 21:03:51 +00:00
|
|
|
|
if (inner_flags == FLAG_ITEM) {
|
|
|
|
|
p.get_token();
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
parse_box(p, os, flags, inner_flags, outer, parent_context,
|
|
|
|
|
outer_type, special, inner);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2012-03-05 22:04:22 +00:00
|
|
|
|
void parse_listings(Parser & p, ostream & os, Context & parent_context, bool in_line)
|
2011-01-28 21:20:48 +00:00
|
|
|
|
{
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
begin_inset(os, "listings\n");
|
2012-03-05 01:45:14 +00:00
|
|
|
|
if (p.hasOpt()) {
|
2012-03-15 01:10:13 +00:00
|
|
|
|
string arg = p.verbatimOption();
|
2012-03-05 01:45:14 +00:00
|
|
|
|
os << "lstparams " << '"' << arg << '"' << '\n';
|
2012-10-18 18:41:23 +00:00
|
|
|
|
if (arg.find("\\color") != string::npos)
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("color");
|
2012-03-05 01:45:14 +00:00
|
|
|
|
}
|
2012-03-05 22:04:22 +00:00
|
|
|
|
if (in_line)
|
|
|
|
|
os << "inline true\n";
|
|
|
|
|
else
|
|
|
|
|
os << "inline false\n";
|
|
|
|
|
os << "status collapsed\n";
|
2011-01-28 21:20:48 +00:00
|
|
|
|
Context context(true, parent_context.textclass);
|
|
|
|
|
context.layout = &parent_context.textclass.plainLayout();
|
2012-03-05 22:04:22 +00:00
|
|
|
|
string s;
|
|
|
|
|
if (in_line) {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
// set catcodes to verbatim early, just in case.
|
|
|
|
|
p.setCatcodes(VERBATIM_CATCODES);
|
|
|
|
|
string delim = p.get_token().asInput();
|
2013-02-22 14:35:38 +00:00
|
|
|
|
//FIXME: handler error condition
|
|
|
|
|
s = p.verbatimStuff(delim).second;
|
2013-02-04 14:14:30 +00:00
|
|
|
|
// context.new_paragraph(os);
|
2012-03-05 22:04:22 +00:00
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
s = p.verbatimEnvironment("lstlisting");
|
|
|
|
|
output_ert(os, s, context);
|
2011-01-28 21:20:48 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2005-07-26 11:58:43 +00:00
|
|
|
|
/// parse an unknown environment
|
|
|
|
|
void parse_unknown_environment(Parser & p, string const & name, ostream & os,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
unsigned flags, bool outer,
|
|
|
|
|
Context & parent_context)
|
2005-07-26 11:58:43 +00:00
|
|
|
|
{
|
|
|
|
|
if (name == "tabbing")
|
|
|
|
|
// We need to remember that we have to handle '\=' specially
|
|
|
|
|
flags |= FLAG_TABBING;
|
|
|
|
|
|
|
|
|
|
// We need to translate font changes and paragraphs inside the
|
|
|
|
|
// environment to ERT if we have a non standard font.
|
|
|
|
|
// Otherwise things like
|
|
|
|
|
// \large\begin{foo}\huge bar\end{foo}
|
|
|
|
|
// will not work.
|
|
|
|
|
bool const specialfont =
|
|
|
|
|
(parent_context.font != parent_context.normalfont);
|
|
|
|
|
bool const new_layout_allowed = parent_context.new_layout_allowed;
|
|
|
|
|
if (specialfont)
|
|
|
|
|
parent_context.new_layout_allowed = false;
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\begin{" + name + "}", parent_context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
parse_text_snippet(p, os, flags, outer, parent_context);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\end{" + name + "}", parent_context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
if (specialfont)
|
|
|
|
|
parent_context.new_layout_allowed = new_layout_allowed;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
void parse_environment(Parser & p, ostream & os, bool outer,
|
2012-04-19 18:46:08 +00:00
|
|
|
|
string & last_env, Context & parent_context)
|
2003-07-28 21:58:09 +00:00
|
|
|
|
{
|
2008-04-05 19:01:43 +00:00
|
|
|
|
Layout const * newlayout;
|
2011-01-23 21:10:20 +00:00
|
|
|
|
InsetLayout const * newinsetlayout = 0;
|
2003-07-28 21:58:09 +00:00
|
|
|
|
string const name = p.getArg('{', '}');
|
|
|
|
|
const bool is_starred = suffixIs(name, '*');
|
|
|
|
|
string const unstarred_name = rtrim(name, "*");
|
|
|
|
|
active_environments.push_back(name);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
if (is_math_env(name)) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
begin_inset(os, "Formula ");
|
|
|
|
|
os << "\\begin{" << name << "}";
|
|
|
|
|
parse_math(p, os, FLAG_END, MATH_MODE);
|
|
|
|
|
os << "\\end{" << name << "}";
|
|
|
|
|
end_inset(os);
|
2011-11-13 11:29:48 +00:00
|
|
|
|
if (is_display_math_env(name)) {
|
|
|
|
|
// Prevent the conversion of a line break to a space
|
|
|
|
|
// (bug 7668). This does not change the output, but
|
|
|
|
|
// looks ugly in LyX.
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
}
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-06-30 23:47:39 +00:00
|
|
|
|
else if (is_known(name, preamble.polyglossia_languages)) {
|
2012-06-22 01:41:00 +00:00
|
|
|
|
// We must begin a new paragraph if not already done
|
|
|
|
|
if (! parent_context.atParagraphStart()) {
|
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
}
|
2012-06-22 02:41:08 +00:00
|
|
|
|
// save the language in the context so that it is
|
|
|
|
|
// handled by parse_text
|
2012-06-30 23:47:39 +00:00
|
|
|
|
parent_context.font.language = preamble.polyglossia2lyx(name);
|
2012-06-22 01:41:00 +00:00
|
|
|
|
parse_text(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
// Just in case the environment is empty
|
|
|
|
|
parent_context.extra_stuff.erase();
|
|
|
|
|
// We must begin a new paragraph to reset the language
|
|
|
|
|
parent_context.new_paragraph(os);
|
2012-06-08 00:37:36 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
|
2011-11-12 17:54:50 +00:00
|
|
|
|
else if (unstarred_name == "tabular" || name == "longtable") {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2011-11-12 17:54:50 +00:00
|
|
|
|
string width = "0pt";
|
|
|
|
|
if (name == "tabular*") {
|
|
|
|
|
width = lyx::translate_len(p.getArg('{', '}'));
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
begin_inset(os, "Tabular ");
|
2011-11-12 17:54:50 +00:00
|
|
|
|
handle_tabular(p, os, name, width, parent_context);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
end_inset(os);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (parent_context.textclass.floats().typeExist(unstarred_name)) {
|
2011-11-14 21:00:47 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
string const opt = p.hasOpt() ? p.getArg('[', ']') : string();
|
2005-07-26 11:58:43 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
begin_inset(os, "Float " + unstarred_name + "\n");
|
2011-10-27 02:29:03 +00:00
|
|
|
|
// store the float type for subfloats
|
|
|
|
|
// subfloats only work with figures and tables
|
|
|
|
|
if (unstarred_name == "figure")
|
|
|
|
|
float_type = unstarred_name;
|
|
|
|
|
else if (unstarred_name == "table")
|
|
|
|
|
float_type = unstarred_name;
|
|
|
|
|
else
|
|
|
|
|
float_type = "";
|
2011-11-14 21:00:47 +00:00
|
|
|
|
if (!opt.empty())
|
|
|
|
|
os << "placement " << opt << '\n';
|
2011-11-29 20:09:40 +00:00
|
|
|
|
if (contains(opt, "H"))
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("float");
|
|
|
|
|
else {
|
|
|
|
|
Floating const & fl = parent_context.textclass.floats()
|
|
|
|
|
.getType(unstarred_name);
|
|
|
|
|
if (!fl.floattype().empty() && fl.usesFloatPkg())
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("float");
|
|
|
|
|
}
|
|
|
|
|
|
2005-01-06 15:40:49 +00:00
|
|
|
|
os << "wide " << convert<string>(is_starred)
|
2004-07-29 17:03:37 +00:00
|
|
|
|
<< "\nsideways false"
|
2003-12-19 10:40:07 +00:00
|
|
|
|
<< "\nstatus open\n\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
end_inset(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
parent_context.new_paragraph(os);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
p.skip_spaces();
|
2011-10-27 13:00:09 +00:00
|
|
|
|
// the float is parsed thus delete the type
|
|
|
|
|
float_type = "";
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-26 04:14:45 +00:00
|
|
|
|
else if (unstarred_name == "sidewaysfigure"
|
|
|
|
|
|| unstarred_name == "sidewaystable") {
|
2011-10-26 03:23:31 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
parent_context.check_layout(os);
|
2011-10-26 04:14:45 +00:00
|
|
|
|
if (unstarred_name == "sidewaysfigure")
|
|
|
|
|
begin_inset(os, "Float figure\n");
|
|
|
|
|
else
|
|
|
|
|
begin_inset(os, "Float table\n");
|
2011-10-26 03:23:31 +00:00
|
|
|
|
os << "wide " << convert<string>(is_starred)
|
|
|
|
|
<< "\nsideways true"
|
|
|
|
|
<< "\nstatus open\n\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
p.skip_spaces();
|
2012-03-04 14:18:33 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("rotfloat");
|
2011-10-26 03:23:31 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-26 20:53:18 +00:00
|
|
|
|
else if (name == "wrapfigure" || name == "wraptable") {
|
|
|
|
|
// syntax is \begin{wrapfigure}[lines]{placement}[overhang]{width}
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
// default values
|
|
|
|
|
string lines = "0";
|
|
|
|
|
string overhang = "0col%";
|
|
|
|
|
// parse
|
|
|
|
|
if (p.hasOpt())
|
|
|
|
|
lines = p.getArg('[', ']');
|
|
|
|
|
string const placement = p.getArg('{', '}');
|
|
|
|
|
if (p.hasOpt())
|
|
|
|
|
overhang = p.getArg('[', ']');
|
|
|
|
|
string const width = p.getArg('{', '}');
|
|
|
|
|
// write
|
|
|
|
|
if (name == "wrapfigure")
|
|
|
|
|
begin_inset(os, "Wrap figure\n");
|
|
|
|
|
else
|
|
|
|
|
begin_inset(os, "Wrap table\n");
|
|
|
|
|
os << "lines " << lines
|
|
|
|
|
<< "\nplacement " << placement
|
|
|
|
|
<< "\noverhang " << lyx::translate_len(overhang)
|
|
|
|
|
<< "\nwidth " << lyx::translate_len(width)
|
|
|
|
|
<< "\nstatus open\n\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
p.skip_spaces();
|
2012-03-04 14:18:33 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("wrapfig");
|
2011-10-26 20:53:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2005-07-26 11:58:43 +00:00
|
|
|
|
else if (name == "minipage") {
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2011-11-04 21:03:51 +00:00
|
|
|
|
// Test whether this is an outer box of a shaded box
|
|
|
|
|
p.pushPosition();
|
|
|
|
|
// swallow arguments
|
|
|
|
|
while (p.hasOpt()) {
|
|
|
|
|
p.getArg('[', ']');
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
}
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
Token t = p.get_token();
|
|
|
|
|
bool shaded = false;
|
|
|
|
|
if (t.asInput() == "\\begin") {
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
if (p.getArg('{', '}') == "shaded")
|
|
|
|
|
shaded = true;
|
|
|
|
|
}
|
|
|
|
|
p.popPosition();
|
|
|
|
|
if (shaded)
|
|
|
|
|
parse_outer_box(p, os, FLAG_END, outer,
|
|
|
|
|
parent_context, name, "shaded");
|
|
|
|
|
else
|
|
|
|
|
parse_box(p, os, 0, FLAG_END, outer, parent_context,
|
|
|
|
|
"", "", name);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (name == "comment") {
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Note Comment\n");
|
|
|
|
|
os << "status open\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.skip_spaces();
|
2008-04-25 00:38:59 +00:00
|
|
|
|
skip_braces(p); // eat {} that might by set by LyX behind comments
|
2011-12-08 20:05:51 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("verbatim");
|
2005-07-26 11:58:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-02-20 23:38:13 +00:00
|
|
|
|
else if (name == "verbatim") {
|
2013-01-25 11:48:52 +00:00
|
|
|
|
// FIXME: this should go in the generic code that
|
|
|
|
|
// handles environments defined in layout file that
|
2013-02-04 14:14:30 +00:00
|
|
|
|
// have "PassThru 1". However, the code over there is
|
2013-01-25 11:48:52 +00:00
|
|
|
|
// already too complicated for my taste.
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
Context context(true, parent_context.textclass,
|
2013-02-04 14:14:30 +00:00
|
|
|
|
&parent_context.textclass[from_ascii("Verbatim")]);
|
|
|
|
|
string s = p.verbatimEnvironment("verbatim");
|
|
|
|
|
output_ert(os, s, context);
|
2012-02-20 23:38:13 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-17 21:39:56 +00:00
|
|
|
|
else if (name == "IPA") {
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
begin_inset(os, "IPA\n");
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("tipa");
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("tipx");
|
|
|
|
|
}
|
|
|
|
|
|
2012-06-24 04:38:33 +00:00
|
|
|
|
else if (name == "CJK") {
|
2012-12-30 10:58:21 +00:00
|
|
|
|
// the scheme is \begin{CJK}{encoding}{mapping}text\end{CJK}
|
2012-06-24 04:38:33 +00:00
|
|
|
|
// It is impossible to decide if a CJK environment was in its own paragraph or within
|
|
|
|
|
// a line. We therefore always assume a paragraph since the latter is a rare case.
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
// store the encoding to be able to reset it
|
2012-06-27 20:32:36 +00:00
|
|
|
|
string const encoding_old = p.getEncoding();
|
2012-06-24 04:38:33 +00:00
|
|
|
|
string const encoding = p.getArg('{', '}');
|
2012-12-30 19:39:33 +00:00
|
|
|
|
// FIXME: For some reason JIS does not work. Although the text
|
|
|
|
|
// in tests/CJK.tex is identical with the SJIS version if you
|
|
|
|
|
// convert both snippets using the recode command line utility,
|
|
|
|
|
// the resulting .lyx file contains some extra characters if
|
|
|
|
|
// you set buggy_encoding to false for JIS.
|
|
|
|
|
bool const buggy_encoding = encoding == "JIS";
|
|
|
|
|
if (!buggy_encoding)
|
2013-01-19 18:47:15 +00:00
|
|
|
|
p.setEncoding(encoding, Encoding::CJK);
|
2012-12-30 10:58:21 +00:00
|
|
|
|
else {
|
|
|
|
|
// FIXME: This will read garbage, since the data is not encoded in utf8.
|
2013-01-19 18:47:15 +00:00
|
|
|
|
p.setEncoding("UTF-8");
|
2012-12-30 10:58:21 +00:00
|
|
|
|
}
|
|
|
|
|
// LyX only supports the same mapping for all CJK
|
|
|
|
|
// environments, so we might need to output everything as ERT
|
|
|
|
|
string const mapping = trim(p.getArg('{', '}'));
|
2012-10-06 07:38:14 +00:00
|
|
|
|
char const * const * const where =
|
|
|
|
|
is_known(encoding, supported_CJK_encodings);
|
2012-12-30 19:39:33 +00:00
|
|
|
|
if (!buggy_encoding && !preamble.fontCJKSet())
|
2012-12-30 10:58:21 +00:00
|
|
|
|
preamble.fontCJK(mapping);
|
|
|
|
|
bool knownMapping = mapping == preamble.fontCJK();
|
2012-12-30 19:39:33 +00:00
|
|
|
|
if (buggy_encoding || !knownMapping || !where) {
|
2012-06-24 04:38:33 +00:00
|
|
|
|
parent_context.check_layout(os);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\begin{" + name + "}{" + encoding + "}{" + mapping + "}",
|
2012-06-24 04:38:33 +00:00
|
|
|
|
parent_context);
|
2012-07-07 11:37:26 +00:00
|
|
|
|
// we must parse the content as verbatim because e.g. JIS can contain
|
2012-06-24 04:38:33 +00:00
|
|
|
|
// normally invalid characters
|
2012-12-30 10:58:21 +00:00
|
|
|
|
// FIXME: This works only for the most simple cases.
|
|
|
|
|
// Since TeX control characters are not parsed,
|
|
|
|
|
// things like comments are completely wrong.
|
2012-06-24 04:38:33 +00:00
|
|
|
|
string const s = p.plainEnvironment("CJK");
|
|
|
|
|
for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
|
|
|
|
|
if (*it == '\\')
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\", parent_context);
|
2012-06-24 04:38:33 +00:00
|
|
|
|
else if (*it == '$')
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "$", parent_context);
|
2013-02-21 21:31:22 +00:00
|
|
|
|
else if (*it == '\n' && it + 1 != et && s.begin() + 1 != it)
|
|
|
|
|
os << "\n ";
|
2012-12-14 11:30:08 +00:00
|
|
|
|
else
|
2012-06-24 04:38:33 +00:00
|
|
|
|
os << *it;
|
|
|
|
|
}
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\end{" + name + "}",
|
2012-06-24 04:38:33 +00:00
|
|
|
|
parent_context);
|
|
|
|
|
} else {
|
2012-10-06 07:38:14 +00:00
|
|
|
|
string const lang =
|
|
|
|
|
supported_CJK_languages[where - supported_CJK_encodings];
|
2012-06-24 04:38:33 +00:00
|
|
|
|
// store the language because we must reset it at the end
|
|
|
|
|
string const lang_old = parent_context.font.language;
|
|
|
|
|
parent_context.font.language = lang;
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
parent_context.font.language = lang_old;
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
}
|
2012-06-27 21:58:16 +00:00
|
|
|
|
p.setEncoding(encoding_old);
|
2012-06-24 04:38:33 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
|
2005-07-26 11:58:43 +00:00
|
|
|
|
else if (name == "lyxgreyedout") {
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Note Greyedout\n");
|
|
|
|
|
os << "status open\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.skip_spaces();
|
2011-10-30 18:12:49 +00:00
|
|
|
|
if (!preamble.notefontcolor().empty())
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("color");
|
2005-07-26 11:58:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-12-19 19:29:07 +00:00
|
|
|
|
else if (name == "framed" || name == "shaded") {
|
2007-11-24 18:04:26 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2011-01-21 18:29:10 +00:00
|
|
|
|
parse_outer_box(p, os, FLAG_END, outer, parent_context, name, "");
|
2007-11-24 18:04:26 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
|
2011-01-28 21:20:48 +00:00
|
|
|
|
else if (name == "lstlisting") {
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2012-03-05 22:04:22 +00:00
|
|
|
|
parse_listings(p, os, parent_context, false);
|
2011-01-28 21:20:48 +00:00
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
|
2005-07-26 11:58:43 +00:00
|
|
|
|
else if (!parent_context.new_layout_allowed)
|
|
|
|
|
parse_unknown_environment(p, name, os, FLAG_END, outer,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
parent_context);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
|
2008-04-27 12:44:09 +00:00
|
|
|
|
// Alignment and spacing settings
|
|
|
|
|
// FIXME (bug xxxx): These settings can span multiple paragraphs and
|
|
|
|
|
// therefore are totally broken!
|
|
|
|
|
// Note that \centering, raggedright, and raggedleft cannot be handled, as
|
2008-04-27 13:32:20 +00:00
|
|
|
|
// they are commands not environments. They are furthermore switches that
|
|
|
|
|
// can be ended by another switches, but also by commands like \footnote or
|
|
|
|
|
// \parbox. So the only safe way is to leave them untouched.
|
2008-05-10 00:01:46 +00:00
|
|
|
|
else if (name == "center" || name == "centering" ||
|
2008-05-11 14:13:23 +00:00
|
|
|
|
name == "flushleft" || name == "flushright" ||
|
|
|
|
|
name == "singlespace" || name == "onehalfspace" ||
|
|
|
|
|
name == "doublespace" || name == "spacing") {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
// We must begin a new paragraph if not already done
|
|
|
|
|
if (! parent_context.atParagraphStart()) {
|
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
parent_context.new_paragraph(os);
|
|
|
|
|
}
|
2008-04-27 12:44:09 +00:00
|
|
|
|
if (name == "flushleft")
|
2005-02-06 09:32:52 +00:00
|
|
|
|
parent_context.add_extra_stuff("\\align left\n");
|
2008-04-27 12:44:09 +00:00
|
|
|
|
else if (name == "flushright")
|
2005-02-06 09:32:52 +00:00
|
|
|
|
parent_context.add_extra_stuff("\\align right\n");
|
2008-05-10 00:01:46 +00:00
|
|
|
|
else if (name == "center" || name == "centering")
|
2005-02-06 09:32:52 +00:00
|
|
|
|
parent_context.add_extra_stuff("\\align center\n");
|
2008-05-07 22:52:56 +00:00
|
|
|
|
else if (name == "singlespace")
|
|
|
|
|
parent_context.add_extra_stuff("\\paragraph_spacing single\n");
|
2011-11-29 20:09:40 +00:00
|
|
|
|
else if (name == "onehalfspace") {
|
2008-05-07 22:52:56 +00:00
|
|
|
|
parent_context.add_extra_stuff("\\paragraph_spacing onehalf\n");
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("setspace");
|
|
|
|
|
} else if (name == "doublespace") {
|
2008-05-07 22:52:56 +00:00
|
|
|
|
parent_context.add_extra_stuff("\\paragraph_spacing double\n");
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("setspace");
|
|
|
|
|
} else if (name == "spacing") {
|
2008-05-07 22:52:56 +00:00
|
|
|
|
parent_context.add_extra_stuff("\\paragraph_spacing other " + p.verbatim_item() + "\n");
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("setspace");
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text(p, os, FLAG_END, outer, parent_context);
|
2008-04-27 12:44:09 +00:00
|
|
|
|
// Just in case the environment is empty
|
2003-11-05 10:14:13 +00:00
|
|
|
|
parent_context.extra_stuff.erase();
|
|
|
|
|
// We must begin a new paragraph to reset the alignment
|
|
|
|
|
parent_context.new_paragraph(os);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// The single '=' is meant here.
|
2011-01-23 21:10:20 +00:00
|
|
|
|
else if ((newlayout = findLayout(parent_context.textclass, name, false))) {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context context(true, parent_context.textclass, newlayout,
|
2004-06-28 06:53:12 +00:00
|
|
|
|
parent_context.layout, parent_context.font);
|
2004-06-23 15:11:23 +00:00
|
|
|
|
if (parent_context.deeper_paragraph) {
|
|
|
|
|
// We are beginning a nested environment after a
|
|
|
|
|
// deeper paragraph inside the outer list environment.
|
|
|
|
|
// Therefore we don't need to output a "begin deeper".
|
|
|
|
|
context.need_end_deeper = true;
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parent_context.check_end_layout(os);
|
2010-12-12 11:45:09 +00:00
|
|
|
|
if (last_env == name) {
|
|
|
|
|
// we need to output a separator since LyX would export
|
|
|
|
|
// the two environments as one otherwise (bug 5716)
|
|
|
|
|
docstring const sep = from_ascii("--Separator--");
|
|
|
|
|
TeX2LyXDocClass const & textclass(parent_context.textclass);
|
2010-12-20 01:12:35 +00:00
|
|
|
|
if (textclass.hasLayout(sep)) {
|
2010-12-12 11:45:09 +00:00
|
|
|
|
Context newcontext(parent_context);
|
|
|
|
|
newcontext.layout = &(textclass[sep]);
|
|
|
|
|
newcontext.check_layout(os);
|
|
|
|
|
newcontext.check_end_layout(os);
|
|
|
|
|
} else {
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Note Note\n");
|
|
|
|
|
os << "status closed\n";
|
|
|
|
|
Context newcontext(true, textclass,
|
|
|
|
|
&(textclass.defaultLayout()));
|
|
|
|
|
newcontext.check_layout(os);
|
|
|
|
|
newcontext.check_end_layout(os);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
}
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
switch (context.layout->latextype) {
|
2003-07-28 21:58:09 +00:00
|
|
|
|
case LATEX_LIST_ENVIRONMENT:
|
2008-05-07 13:55:03 +00:00
|
|
|
|
context.add_par_extra_stuff("\\labelwidthstring "
|
|
|
|
|
+ p.verbatim_item() + '\n');
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
break;
|
|
|
|
|
case LATEX_BIB_ENVIRONMENT:
|
|
|
|
|
p.verbatim_item(); // swallow next arg
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_deeper(os);
|
2011-10-16 19:57:10 +00:00
|
|
|
|
// handle known optional and required arguments
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// FIXME: Since format 446, layouts do not require anymore all optional
|
|
|
|
|
// arguments before the required ones. Needs to be implemented!
|
2011-10-16 19:57:10 +00:00
|
|
|
|
// Unfortunately LyX can't handle arguments of list arguments (bug 7468):
|
|
|
|
|
// It is impossible to place anything after the environment name,
|
|
|
|
|
// but before the first \\item.
|
|
|
|
|
if (context.layout->latextype == LATEX_ENVIRONMENT) {
|
|
|
|
|
bool need_layout = true;
|
2012-11-19 13:21:02 +00:00
|
|
|
|
int optargs = 0;
|
|
|
|
|
while (optargs < context.layout->optArgs()) {
|
2011-10-16 19:57:10 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
if (p.next_token().cat() == catEscape ||
|
2011-12-14 01:08:54 +00:00
|
|
|
|
p.next_token().character() != '[')
|
2011-10-16 19:57:10 +00:00
|
|
|
|
break;
|
|
|
|
|
p.get_token(); // eat '['
|
|
|
|
|
if (need_layout) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
need_layout = false;
|
|
|
|
|
}
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// FIXME: Just a workaround. InsetArgument::updateBuffer
|
|
|
|
|
// will compute a proper ID for all "999" Arguments
|
2012-11-20 12:30:52 +00:00
|
|
|
|
// (which is also what lyx2lyx produces).
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// However, tex2lyx should be able to output proper IDs
|
|
|
|
|
// itself.
|
2012-11-19 14:50:43 +00:00
|
|
|
|
begin_inset(os, "Argument 999\n");
|
2011-10-16 19:57:10 +00:00
|
|
|
|
os << "status collapsed\n\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
++optargs;
|
|
|
|
|
}
|
2012-11-19 13:21:02 +00:00
|
|
|
|
int reqargs = 0;
|
|
|
|
|
while (reqargs < context.layout->requiredArgs()) {
|
2011-10-16 19:57:10 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
if (p.next_token().cat() != catBegin)
|
|
|
|
|
break;
|
|
|
|
|
p.get_token(); // eat '{'
|
|
|
|
|
if (need_layout) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
need_layout = false;
|
|
|
|
|
}
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// FIXME: Just a workaround. InsetArgument::updateBuffer
|
|
|
|
|
// will compute a proper ID for all "999" Arguments
|
2012-11-20 12:30:52 +00:00
|
|
|
|
// (which is also what lyx2lyx produces).
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// However, tex2lyx should be able to output proper IDs
|
|
|
|
|
// itself.
|
2012-11-19 14:50:43 +00:00
|
|
|
|
begin_inset(os, "Argument 999\n");
|
2011-10-16 19:57:10 +00:00
|
|
|
|
os << "status collapsed\n\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_BRACE_LAST, outer, context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
++reqargs;
|
|
|
|
|
}
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text(p, os, FLAG_END, outer, context);
|
|
|
|
|
context.check_end_layout(os);
|
2004-06-23 15:11:23 +00:00
|
|
|
|
if (parent_context.deeper_paragraph) {
|
|
|
|
|
// We must suppress the "end deeper" because we
|
|
|
|
|
// suppressed the "begin deeper" above.
|
|
|
|
|
context.need_end_deeper = false;
|
|
|
|
|
}
|
2003-08-05 21:46:51 +00:00
|
|
|
|
context.check_end_deeper(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
parent_context.new_paragraph(os);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
p.skip_spaces();
|
2012-04-19 18:46:08 +00:00
|
|
|
|
if (!preamble.titleLayoutFound())
|
|
|
|
|
preamble.titleLayoutFound(newlayout->intitle);
|
2011-11-29 20:09:40 +00:00
|
|
|
|
set<string> const & req = newlayout->requires();
|
2012-05-28 22:21:22 +00:00
|
|
|
|
set<string>::const_iterator it = req.begin();
|
|
|
|
|
set<string>::const_iterator en = req.end();
|
|
|
|
|
for (; it != en; ++it)
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-01-23 21:10:20 +00:00
|
|
|
|
// The single '=' is meant here.
|
|
|
|
|
else if ((newinsetlayout = findInsetLayout(parent_context.textclass, name, false))) {
|
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
|
|
|
|
parent_context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Flex ");
|
|
|
|
|
os << to_utf8(newinsetlayout->name()) << '\n'
|
|
|
|
|
<< "status collapsed\n";
|
2013-02-12 17:36:25 +00:00
|
|
|
|
if (newinsetlayout->isPassThru()) {
|
|
|
|
|
string const arg = p.verbatimEnvironment(name);
|
|
|
|
|
Context context(true, parent_context.textclass,
|
|
|
|
|
&parent_context.textclass.plainLayout(),
|
|
|
|
|
parent_context.layout);
|
|
|
|
|
output_ert(os, arg, parent_context);
|
|
|
|
|
} else
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_END, false, parent_context, newinsetlayout);
|
2011-01-23 21:10:20 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (name == "appendix") {
|
|
|
|
|
// This is no good latex style, but it works and is used in some documents...
|
2005-07-26 11:58:43 +00:00
|
|
|
|
eat_whitespace(p, os, parent_context, false);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
parent_context.check_end_layout(os);
|
|
|
|
|
Context context(true, parent_context.textclass, parent_context.layout,
|
2004-06-28 06:53:12 +00:00
|
|
|
|
parent_context.layout, parent_context.font);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\\start_of_appendix\n";
|
|
|
|
|
parse_text(p, os, FLAG_END, outer, context);
|
|
|
|
|
context.check_end_layout(os);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
p.skip_spaces();
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2005-07-14 15:19:01 +00:00
|
|
|
|
else if (known_environments.find(name) != known_environments.end()) {
|
|
|
|
|
vector<ArgumentType> arguments = known_environments[name];
|
|
|
|
|
// The last "argument" denotes wether we may translate the
|
|
|
|
|
// environment contents to LyX
|
|
|
|
|
// The default required if no argument is given makes us
|
|
|
|
|
// compatible with the reLyXre environment.
|
|
|
|
|
ArgumentType contents = arguments.empty() ?
|
|
|
|
|
required :
|
|
|
|
|
arguments.back();
|
|
|
|
|
if (!arguments.empty())
|
|
|
|
|
arguments.pop_back();
|
2005-07-26 11:58:43 +00:00
|
|
|
|
// See comment in parse_unknown_environment()
|
|
|
|
|
bool const specialfont =
|
|
|
|
|
(parent_context.font != parent_context.normalfont);
|
|
|
|
|
bool const new_layout_allowed =
|
|
|
|
|
parent_context.new_layout_allowed;
|
|
|
|
|
if (specialfont)
|
|
|
|
|
parent_context.new_layout_allowed = false;
|
2005-07-14 15:19:01 +00:00
|
|
|
|
parse_arguments("\\begin{" + name + "}", arguments, p, os,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, parent_context);
|
2005-07-14 15:19:01 +00:00
|
|
|
|
if (contents == verbatim)
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, p.ertEnvironment(name),
|
2006-04-05 23:56:29 +00:00
|
|
|
|
parent_context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
else
|
2005-07-14 15:19:01 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_END, outer,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
parent_context);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\end{" + name + "}", parent_context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
if (specialfont)
|
|
|
|
|
parent_context.new_layout_allowed = new_layout_allowed;
|
2005-07-14 15:19:01 +00:00
|
|
|
|
}
|
|
|
|
|
|
2005-07-26 11:58:43 +00:00
|
|
|
|
else
|
|
|
|
|
parse_unknown_environment(p, name, os, FLAG_END, outer,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
parent_context);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
|
2010-12-12 11:45:09 +00:00
|
|
|
|
last_env = name;
|
2003-10-23 11:46:33 +00:00
|
|
|
|
active_environments.pop_back();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2005-05-18 07:21:00 +00:00
|
|
|
|
|
2004-06-18 06:47:19 +00:00
|
|
|
|
/// parses a comment and outputs it to \p os.
|
|
|
|
|
void parse_comment(Parser & p, ostream & os, Token const & t, Context & context)
|
|
|
|
|
{
|
2008-04-10 21:49:34 +00:00
|
|
|
|
LASSERT(t.cat() == catComment, return);
|
2004-06-18 06:47:19 +00:00
|
|
|
|
if (!t.cs().empty()) {
|
2005-05-18 07:21:00 +00:00
|
|
|
|
context.check_layout(os);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, '%' + t.cs(), context);
|
2004-06-18 06:47:19 +00:00
|
|
|
|
if (p.next_token().cat() == catNewline) {
|
|
|
|
|
// A newline after a comment line starts a new
|
|
|
|
|
// paragraph
|
2005-07-26 11:58:43 +00:00
|
|
|
|
if (context.new_layout_allowed) {
|
|
|
|
|
if(!context.atParagraphStart())
|
|
|
|
|
// Only start a new paragraph if not already
|
|
|
|
|
// done (we might get called recursively)
|
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\n", context);
|
2004-06-18 06:47:19 +00:00
|
|
|
|
eat_whitespace(p, os, context, true);
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// "%\n" combination
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
}
|
2003-08-04 10:26:10 +00:00
|
|
|
|
|
|
|
|
|
|
2004-06-18 06:47:19 +00:00
|
|
|
|
/*!
|
|
|
|
|
* Reads spaces and comments until the first non-space, non-comment token.
|
|
|
|
|
* New paragraphs (double newlines or \\par) are handled like simple spaces
|
|
|
|
|
* if \p eatParagraph is true.
|
|
|
|
|
* Spaces are skipped, but comments are written to \p os.
|
|
|
|
|
*/
|
|
|
|
|
void eat_whitespace(Parser & p, ostream & os, Context & context,
|
2004-10-05 10:11:42 +00:00
|
|
|
|
bool eatParagraph)
|
2004-06-18 06:47:19 +00:00
|
|
|
|
{
|
|
|
|
|
while (p.good()) {
|
|
|
|
|
Token const & t = p.get_token();
|
|
|
|
|
if (t.cat() == catComment)
|
|
|
|
|
parse_comment(p, os, t, context);
|
|
|
|
|
else if ((! eatParagraph && p.isParagraph()) ||
|
2006-04-05 23:56:29 +00:00
|
|
|
|
(t.cat() != catSpace && t.cat() != catNewline)) {
|
2004-06-18 06:47:19 +00:00
|
|
|
|
p.putback();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2004-06-28 06:53:12 +00:00
|
|
|
|
|
|
|
|
|
/*!
|
|
|
|
|
* Set a font attribute, parse text and reset the font attribute.
|
|
|
|
|
* \param attribute Attribute name (e.g. \\family, \\shape etc.)
|
|
|
|
|
* \param currentvalue Current value of the attribute. Is set to the new
|
|
|
|
|
* value during parsing.
|
|
|
|
|
* \param newvalue New value of the attribute
|
|
|
|
|
*/
|
|
|
|
|
void parse_text_attributes(Parser & p, ostream & os, unsigned flags, bool outer,
|
2004-10-05 10:11:42 +00:00
|
|
|
|
Context & context, string const & attribute,
|
|
|
|
|
string & currentvalue, string const & newvalue)
|
2004-06-28 06:53:12 +00:00
|
|
|
|
{
|
|
|
|
|
context.check_layout(os);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
string const oldvalue = currentvalue;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
currentvalue = newvalue;
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << '\n' << attribute << ' ' << newvalue << "\n";
|
2004-06-28 06:53:12 +00:00
|
|
|
|
parse_text_snippet(p, os, flags, outer, context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
context.check_layout(os);
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << '\n' << attribute << ' ' << oldvalue << "\n";
|
2005-07-26 11:58:43 +00:00
|
|
|
|
currentvalue = oldvalue;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
}
|
|
|
|
|
|
2004-08-10 09:40:53 +00:00
|
|
|
|
|
|
|
|
|
/// get the arguments of a natbib or jurabib citation command
|
2007-11-27 20:30:22 +00:00
|
|
|
|
void get_cite_arguments(Parser & p, bool natbibOrder,
|
|
|
|
|
string & before, string & after)
|
2004-08-10 09:40:53 +00:00
|
|
|
|
{
|
|
|
|
|
// We need to distinguish "" and "[]", so we can't use p.getOpt().
|
|
|
|
|
|
|
|
|
|
// text before the citation
|
2007-11-27 20:30:22 +00:00
|
|
|
|
before.clear();
|
2004-08-10 09:40:53 +00:00
|
|
|
|
// text after the citation
|
2007-11-27 20:30:22 +00:00
|
|
|
|
after = p.getFullOpt();
|
2004-08-10 09:40:53 +00:00
|
|
|
|
|
2005-01-06 13:22:20 +00:00
|
|
|
|
if (!after.empty()) {
|
|
|
|
|
before = p.getFullOpt();
|
|
|
|
|
if (natbibOrder && !before.empty())
|
2007-12-12 19:28:07 +00:00
|
|
|
|
swap(before, after);
|
2004-08-10 09:40:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2005-04-15 14:04:13 +00:00
|
|
|
|
|
2007-11-27 20:30:22 +00:00
|
|
|
|
/// Convert filenames with TeX macros and/or quotes to something LyX
|
|
|
|
|
/// can understand
|
2005-04-15 14:04:13 +00:00
|
|
|
|
string const normalize_filename(string const & name)
|
|
|
|
|
{
|
2012-12-29 12:16:22 +00:00
|
|
|
|
Parser p(name);
|
2005-04-15 14:04:13 +00:00
|
|
|
|
ostringstream os;
|
|
|
|
|
while (p.good()) {
|
|
|
|
|
Token const & t = p.get_token();
|
|
|
|
|
if (t.cat() != catEscape)
|
|
|
|
|
os << t.asInput();
|
|
|
|
|
else if (t.cs() == "lyxdot") {
|
|
|
|
|
// This is used by LyX for simple dots in relative
|
|
|
|
|
// names
|
|
|
|
|
os << '.';
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
} else if (t.cs() == "space") {
|
|
|
|
|
os << ' ';
|
|
|
|
|
p.skip_spaces();
|
2012-12-29 12:16:22 +00:00
|
|
|
|
} else if (t.cs() == "string") {
|
|
|
|
|
// Convert \string" to " and \string~ to ~
|
|
|
|
|
Token const & n = p.next_token();
|
|
|
|
|
if (n.asInput() != "\"" && n.asInput() != "~")
|
|
|
|
|
os << t.asInput();
|
2005-04-15 14:04:13 +00:00
|
|
|
|
} else
|
|
|
|
|
os << t.asInput();
|
|
|
|
|
}
|
2012-12-29 12:16:22 +00:00
|
|
|
|
// Strip quotes. This is a bit complicated (see latex_path()).
|
|
|
|
|
string full = os.str();
|
|
|
|
|
if (!full.empty() && full[0] == '"') {
|
|
|
|
|
string base = removeExtension(full);
|
|
|
|
|
string ext = getExtension(full);
|
|
|
|
|
if (!base.empty() && base[base.length()-1] == '"')
|
|
|
|
|
// "a b"
|
|
|
|
|
// "a b".tex
|
|
|
|
|
return addExtension(trim(base, "\""), ext);
|
|
|
|
|
if (full[full.length()-1] == '"')
|
|
|
|
|
// "a b.c"
|
|
|
|
|
// "a b.c".tex
|
|
|
|
|
return trim(full, "\"");
|
|
|
|
|
}
|
|
|
|
|
return full;
|
2005-04-15 14:04:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
2005-05-25 16:01:19 +00:00
|
|
|
|
|
|
|
|
|
/// Convert \p name from TeX convention (relative to master file) to LyX
|
|
|
|
|
/// convention (relative to .lyx file) if it is relative
|
2012-10-03 11:23:27 +00:00
|
|
|
|
void fix_child_filename(string & name)
|
2005-05-25 16:01:19 +00:00
|
|
|
|
{
|
2012-10-03 11:23:27 +00:00
|
|
|
|
string const absMasterTeX = getMasterFilePath(true);
|
|
|
|
|
bool const isabs = FileName::isAbsolute(name);
|
|
|
|
|
// convert from "relative to .tex master" to absolute original path
|
|
|
|
|
if (!isabs)
|
|
|
|
|
name = makeAbsPath(name, absMasterTeX).absFileName();
|
|
|
|
|
bool copyfile = copyFiles();
|
|
|
|
|
string const absParentLyX = getParentFilePath(false);
|
2012-10-06 11:55:25 +00:00
|
|
|
|
string abs = name;
|
2012-10-03 11:23:27 +00:00
|
|
|
|
if (copyfile) {
|
2012-10-06 11:55:25 +00:00
|
|
|
|
// convert from absolute original path to "relative to master file"
|
|
|
|
|
string const rel = to_utf8(makeRelPath(from_utf8(name),
|
|
|
|
|
from_utf8(absMasterTeX)));
|
2012-10-03 11:23:27 +00:00
|
|
|
|
// re-interpret "relative to .tex file" as "relative to .lyx file"
|
|
|
|
|
// (is different if the master .lyx file resides in a
|
|
|
|
|
// different path than the master .tex file)
|
|
|
|
|
string const absMasterLyX = getMasterFilePath(false);
|
2012-10-06 11:55:25 +00:00
|
|
|
|
abs = makeAbsPath(rel, absMasterLyX).absFileName();
|
|
|
|
|
// Do not copy if the new path is impossible to create. Example:
|
|
|
|
|
// absMasterTeX = "/foo/bar/"
|
|
|
|
|
// absMasterLyX = "/bar/"
|
|
|
|
|
// name = "/baz.eps" => new absolute name would be "/../baz.eps"
|
|
|
|
|
if (contains(name, "/../"))
|
|
|
|
|
copyfile = false;
|
|
|
|
|
}
|
|
|
|
|
if (copyfile) {
|
|
|
|
|
if (isabs)
|
|
|
|
|
name = abs;
|
|
|
|
|
else {
|
2012-10-03 11:23:27 +00:00
|
|
|
|
// convert from absolute original path to
|
|
|
|
|
// "relative to .lyx file"
|
2012-10-06 11:55:25 +00:00
|
|
|
|
name = to_utf8(makeRelPath(from_utf8(abs),
|
2012-10-03 11:23:27 +00:00
|
|
|
|
from_utf8(absParentLyX)));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (!isabs) {
|
|
|
|
|
// convert from absolute original path to "relative to .lyx file"
|
|
|
|
|
name = to_utf8(makeRelPath(from_utf8(name),
|
|
|
|
|
from_utf8(absParentLyX)));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2007-12-17 16:04:46 +00:00
|
|
|
|
|
2012-10-03 11:23:27 +00:00
|
|
|
|
void copy_file(FileName const & src, string dstname)
|
|
|
|
|
{
|
|
|
|
|
if (!copyFiles())
|
|
|
|
|
return;
|
|
|
|
|
string const absParent = getParentFilePath(false);
|
|
|
|
|
FileName dst;
|
|
|
|
|
if (FileName::isAbsolute(dstname))
|
|
|
|
|
dst = FileName(dstname);
|
|
|
|
|
else
|
|
|
|
|
dst = makeAbsPath(dstname, absParent);
|
|
|
|
|
string const absMaster = getMasterFilePath(false);
|
|
|
|
|
FileName const srcpath = src.onlyPath();
|
|
|
|
|
FileName const dstpath = dst.onlyPath();
|
|
|
|
|
if (equivalent(srcpath, dstpath))
|
|
|
|
|
return;
|
|
|
|
|
if (!dstpath.isDirectory()) {
|
|
|
|
|
if (!dstpath.createPath()) {
|
|
|
|
|
cerr << "Warning: Could not create directory for file `"
|
|
|
|
|
<< dst.absFileName() << "´." << endl;
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (dst.isReadableFile()) {
|
|
|
|
|
if (overwriteFiles())
|
|
|
|
|
cerr << "Warning: Overwriting existing file `"
|
|
|
|
|
<< dst.absFileName() << "´." << endl;
|
|
|
|
|
else {
|
|
|
|
|
cerr << "Warning: Not overwriting existing file `"
|
|
|
|
|
<< dst.absFileName() << "´." << endl;
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!src.copyTo(dst))
|
|
|
|
|
cerr << "Warning: Could not copy file `" << src.absFileName()
|
|
|
|
|
<< "´ to `" << dst.absFileName() << "´." << endl;
|
2005-05-25 16:01:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2006-08-13 09:46:28 +00:00
|
|
|
|
|
2012-11-28 11:54:34 +00:00
|
|
|
|
/// Parse a NoWeb Chunk section. The initial "<<" is already parsed.
|
2013-02-22 14:49:18 +00:00
|
|
|
|
bool parse_noweb(Parser & p, ostream & os, Context & context)
|
2006-08-13 09:46:28 +00:00
|
|
|
|
{
|
2013-02-22 14:49:18 +00:00
|
|
|
|
// check whether a chunk is possible here.
|
|
|
|
|
if (!context.new_layout_allowed ||
|
|
|
|
|
!context.textclass.hasLayout(from_ascii("Chunk"))) {
|
|
|
|
|
return false;
|
2006-08-13 09:46:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2013-02-22 14:49:18 +00:00
|
|
|
|
p.pushPosition();
|
|
|
|
|
|
|
|
|
|
// read the parameters
|
|
|
|
|
Parser::Arg stuff = p.verbatimStuff(">>=", false);
|
|
|
|
|
if (!stuff.first) {
|
|
|
|
|
p.popPosition();
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
string chunk = "<<" + stuff.second + ">>="
|
|
|
|
|
+ p.verbatimStuff("\n").second + '\n';
|
|
|
|
|
|
|
|
|
|
stuff = p.verbatimStuff("\n@");
|
|
|
|
|
if (!stuff.first) {
|
|
|
|
|
p.popPosition();
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
chunk += stuff.second + "\n@";
|
|
|
|
|
string post_chunk = p.verbatimStuff("\n").second + '\n';
|
|
|
|
|
if (post_chunk[0] != ' ' && post_chunk[0] != '\n') {
|
|
|
|
|
p.popPosition();
|
|
|
|
|
return false;
|
2006-08-13 09:46:28 +00:00
|
|
|
|
}
|
2013-02-22 14:49:18 +00:00
|
|
|
|
chunk += post_chunk;
|
2006-08-13 09:46:28 +00:00
|
|
|
|
|
2007-01-02 12:35:53 +00:00
|
|
|
|
context.new_paragraph(os);
|
2007-07-11 13:39:08 +00:00
|
|
|
|
Context newcontext(true, context.textclass,
|
2012-11-28 11:54:34 +00:00
|
|
|
|
&context.textclass[from_ascii("Chunk")]);
|
2013-02-22 14:49:18 +00:00
|
|
|
|
output_ert(os, chunk, newcontext);
|
|
|
|
|
|
|
|
|
|
p.dropPosition();
|
|
|
|
|
return true;
|
2006-08-13 09:46:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-12-19 19:21:53 +00:00
|
|
|
|
|
|
|
|
|
/// detects \\def, \\long\\def and \\global\\long\\def with ws and comments
|
|
|
|
|
bool is_macro(Parser & p)
|
|
|
|
|
{
|
|
|
|
|
Token first = p.curr_token();
|
|
|
|
|
if (first.cat() != catEscape || !p.good())
|
|
|
|
|
return false;
|
|
|
|
|
if (first.cs() == "def")
|
|
|
|
|
return true;
|
|
|
|
|
if (first.cs() != "global" && first.cs() != "long")
|
|
|
|
|
return false;
|
|
|
|
|
Token second = p.get_token();
|
|
|
|
|
int pos = 1;
|
|
|
|
|
while (p.good() && !p.isParagraph() && (second.cat() == catSpace ||
|
|
|
|
|
second.cat() == catNewline || second.cat() == catComment)) {
|
|
|
|
|
second = p.get_token();
|
|
|
|
|
pos++;
|
|
|
|
|
}
|
|
|
|
|
bool secondvalid = second.cat() == catEscape;
|
|
|
|
|
Token third;
|
|
|
|
|
bool thirdvalid = false;
|
|
|
|
|
if (p.good() && first.cs() == "global" && secondvalid &&
|
|
|
|
|
second.cs() == "long") {
|
|
|
|
|
third = p.get_token();
|
|
|
|
|
pos++;
|
|
|
|
|
while (p.good() && !p.isParagraph() &&
|
|
|
|
|
(third.cat() == catSpace ||
|
|
|
|
|
third.cat() == catNewline ||
|
|
|
|
|
third.cat() == catComment)) {
|
|
|
|
|
third = p.get_token();
|
|
|
|
|
pos++;
|
|
|
|
|
}
|
|
|
|
|
thirdvalid = third.cat() == catEscape;
|
|
|
|
|
}
|
|
|
|
|
for (int i = 0; i < pos; ++i)
|
|
|
|
|
p.putback();
|
|
|
|
|
if (!secondvalid)
|
|
|
|
|
return false;
|
|
|
|
|
if (!thirdvalid)
|
|
|
|
|
return (first.cs() == "global" || first.cs() == "long") &&
|
|
|
|
|
second.cs() == "def";
|
|
|
|
|
return first.cs() == "global" && second.cs() == "long" &&
|
|
|
|
|
third.cs() == "def";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/// Parse a macro definition (assumes that is_macro() returned true)
|
|
|
|
|
void parse_macro(Parser & p, ostream & os, Context & context)
|
|
|
|
|
{
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
Token first = p.curr_token();
|
|
|
|
|
Token second;
|
|
|
|
|
Token third;
|
|
|
|
|
string command = first.asInput();
|
|
|
|
|
if (first.cs() != "def") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
second = p.curr_token();
|
|
|
|
|
command += second.asInput();
|
|
|
|
|
if (second.cs() != "def") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
third = p.curr_token();
|
|
|
|
|
command += third.asInput();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
string const name = p.get_token().cs();
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
|
|
|
|
|
// parameter text
|
|
|
|
|
bool simple = true;
|
|
|
|
|
string paramtext;
|
|
|
|
|
int arity = 0;
|
|
|
|
|
while (p.next_token().cat() != catBegin) {
|
|
|
|
|
if (p.next_token().cat() == catParameter) {
|
|
|
|
|
// # found
|
|
|
|
|
p.get_token();
|
|
|
|
|
paramtext += "#";
|
|
|
|
|
|
|
|
|
|
// followed by number?
|
|
|
|
|
if (p.next_token().cat() == catOther) {
|
2013-02-16 13:16:42 +00:00
|
|
|
|
string s = p.get_token().asInput();
|
|
|
|
|
paramtext += s;
|
2010-12-19 19:21:53 +00:00
|
|
|
|
// number = current arity + 1?
|
2013-02-16 13:16:42 +00:00
|
|
|
|
if (s.size() == 1 && s[0] == arity + '0' + 1)
|
2010-12-19 19:21:53 +00:00
|
|
|
|
++arity;
|
|
|
|
|
else
|
|
|
|
|
simple = false;
|
|
|
|
|
} else
|
|
|
|
|
paramtext += p.get_token().cs();
|
|
|
|
|
} else {
|
|
|
|
|
paramtext += p.get_token().cs();
|
|
|
|
|
simple = false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// only output simple (i.e. compatible) macro as FormulaMacros
|
|
|
|
|
string ert = '\\' + name + ' ' + paramtext + '{' + p.verbatim_item() + '}';
|
|
|
|
|
if (simple) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "FormulaMacro");
|
|
|
|
|
os << "\n\\def" << ert;
|
|
|
|
|
end_inset(os);
|
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, command + ert, context);
|
2010-12-19 19:21:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-12-13 19:40:05 +00:00
|
|
|
|
|
|
|
|
|
void registerExternalTemplatePackages(string const & name)
|
|
|
|
|
{
|
|
|
|
|
external::TemplateManager const & etm = external::TemplateManager::get();
|
|
|
|
|
external::Template const * const et = etm.getTemplateByName(name);
|
|
|
|
|
if (!et)
|
|
|
|
|
return;
|
|
|
|
|
external::Template::Formats::const_iterator cit = et->formats.end();
|
|
|
|
|
if (pdflatex)
|
|
|
|
|
cit = et->formats.find("PDFLaTeX");
|
|
|
|
|
if (cit == et->formats.end())
|
|
|
|
|
// If the template has not specified a PDFLaTeX output,
|
|
|
|
|
// we try the LaTeX format.
|
|
|
|
|
cit = et->formats.find("LaTeX");
|
|
|
|
|
if (cit == et->formats.end())
|
|
|
|
|
return;
|
|
|
|
|
vector<string>::const_iterator qit = cit->second.requirements.begin();
|
|
|
|
|
vector<string>::const_iterator qend = cit->second.requirements.end();
|
|
|
|
|
for (; qit != qend; ++qit)
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*qit);
|
|
|
|
|
}
|
|
|
|
|
|
2004-06-18 06:47:19 +00:00
|
|
|
|
} // anonymous namespace
|
2003-08-04 10:26:10 +00:00
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
|
2003-07-26 00:15:38 +00:00
|
|
|
|
void parse_text(Parser & p, ostream & os, unsigned flags, bool outer,
|
2003-08-04 10:26:10 +00:00
|
|
|
|
Context & context)
|
2003-04-17 09:47:21 +00:00
|
|
|
|
{
|
2008-04-05 19:01:43 +00:00
|
|
|
|
Layout const * newlayout = 0;
|
2011-01-07 21:04:16 +00:00
|
|
|
|
InsetLayout const * newinsetlayout = 0;
|
2011-11-20 17:03:00 +00:00
|
|
|
|
char const * const * where = 0;
|
2013-02-16 04:51:23 +00:00
|
|
|
|
// Store the latest bibliographystyle, addcontentslineContent and
|
|
|
|
|
// nocite{*} option (needed for bibtex inset)
|
2010-12-20 02:47:58 +00:00
|
|
|
|
string btprint;
|
2013-02-16 04:51:23 +00:00
|
|
|
|
string contentslineContent;
|
2011-12-14 01:11:18 +00:00
|
|
|
|
string bibliographystyle = "default";
|
2012-12-28 13:29:46 +00:00
|
|
|
|
bool const use_natbib = isProvided("natbib");
|
|
|
|
|
bool const use_jurabib = isProvided("jurabib");
|
2010-12-12 11:45:09 +00:00
|
|
|
|
string last_env;
|
2003-04-17 09:47:21 +00:00
|
|
|
|
while (p.good()) {
|
2009-05-11 11:02:25 +00:00
|
|
|
|
Token const & t = p.get_token();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2012-06-24 16:10:36 +00:00
|
|
|
|
// it is impossible to determine the correct encoding for non-CJK Japanese.
|
|
|
|
|
// Therefore write a note at the beginning of the document
|
|
|
|
|
if (is_nonCJKJapanese) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Note Note\n");
|
|
|
|
|
os << "status open\n\\begin_layout Plain Layout\n"
|
|
|
|
|
<< "\\series bold\n"
|
|
|
|
|
<< "Important information:\n"
|
|
|
|
|
<< "\\end_layout\n\n"
|
|
|
|
|
<< "\\begin_layout Plain Layout\n"
|
2013-01-19 19:02:33 +00:00
|
|
|
|
<< "The original LaTeX source for this document is in Japanese (pLaTeX).\n"
|
|
|
|
|
<< " It was therefore impossible for tex2lyx to determine the correct encoding.\n"
|
|
|
|
|
<< " The iconv encoding " << p.getEncoding() << " was used.\n"
|
|
|
|
|
<< " If this is incorrect, you must run the tex2lyx program on the command line\n"
|
|
|
|
|
<< " and specify the encoding using the -e command-line switch.\n"
|
|
|
|
|
<< " In addition, you might want to double check that the desired output encoding\n"
|
|
|
|
|
<< " is correctly selected in Document > Settings > Language.\n"
|
2012-06-24 16:10:36 +00:00
|
|
|
|
<< "\\end_layout\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
is_nonCJKJapanese = false;
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
#ifdef FILEDEBUG
|
2011-01-05 20:32:45 +00:00
|
|
|
|
debugToken(cerr, t, flags);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
if (flags & FLAG_ITEM) {
|
|
|
|
|
if (t.cat() == catSpace)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
flags &= ~FLAG_ITEM;
|
|
|
|
|
if (t.cat() == catBegin) {
|
|
|
|
|
// skip the brace and collect everything to the next matching
|
|
|
|
|
// closing brace
|
|
|
|
|
flags |= FLAG_BRACE_LAST;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// handle only this single token, leave the loop if done
|
|
|
|
|
flags |= FLAG_LEAVE;
|
|
|
|
|
}
|
|
|
|
|
|
2011-01-05 20:32:45 +00:00
|
|
|
|
if (t.cat() != catEscape && t.character() == ']' &&
|
|
|
|
|
(flags & FLAG_BRACK_LAST))
|
2003-04-23 15:14:43 +00:00
|
|
|
|
return;
|
2011-01-05 20:32:45 +00:00
|
|
|
|
if (t.cat() == catEnd && (flags & FLAG_BRACE_LAST))
|
2010-06-04 21:50:08 +00:00
|
|
|
|
return;
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2010-12-12 11:45:09 +00:00
|
|
|
|
// If there is anything between \end{env} and \begin{env} we
|
|
|
|
|
// don't need to output a separator.
|
|
|
|
|
if (t.cat() != catSpace && t.cat() != catNewline &&
|
|
|
|
|
t.asInput() != "\\begin")
|
|
|
|
|
last_env = "";
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
//
|
|
|
|
|
// cat codes
|
|
|
|
|
//
|
|
|
|
|
if (t.cat() == catMath) {
|
|
|
|
|
// we are inside some text mode thingy, so opening new math is allowed
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Formula ");
|
2003-04-23 15:14:43 +00:00
|
|
|
|
Token const & n = p.get_token();
|
2011-11-13 11:29:48 +00:00
|
|
|
|
bool const display(n.cat() == catMath && outer);
|
|
|
|
|
if (display) {
|
2003-04-17 09:47:21 +00:00
|
|
|
|
// TeX's $$...$$ syntax for displayed math
|
|
|
|
|
os << "\\[";
|
|
|
|
|
parse_math(p, os, FLAG_SIMPLE, MATH_MODE);
|
|
|
|
|
os << "\\]";
|
2003-04-23 15:14:43 +00:00
|
|
|
|
p.get_token(); // skip the second '$' token
|
2003-04-17 09:47:21 +00:00
|
|
|
|
} else {
|
|
|
|
|
// simple $...$ stuff
|
|
|
|
|
p.putback();
|
|
|
|
|
os << '$';
|
|
|
|
|
parse_math(p, os, FLAG_SIMPLE, MATH_MODE);
|
|
|
|
|
os << '$';
|
|
|
|
|
}
|
|
|
|
|
end_inset(os);
|
2011-11-13 11:29:48 +00:00
|
|
|
|
if (display) {
|
|
|
|
|
// Prevent the conversion of a line break to a
|
|
|
|
|
// space (bug 7668). This does not change the
|
|
|
|
|
// output, but looks ugly in LyX.
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catSuper || t.cat() == catSub)
|
|
|
|
|
cerr << "catcode " << t << " illegal in text mode\n";
|
|
|
|
|
|
2003-07-28 15:45:41 +00:00
|
|
|
|
// Basic support for english quotes. This should be
|
|
|
|
|
// extended to other quotes, but is not so easy (a
|
|
|
|
|
// left english quote is the same as a right german
|
|
|
|
|
// quote...)
|
2007-12-09 13:40:03 +00:00
|
|
|
|
else if (t.asInput() == "`" && p.next_token().asInput() == "`") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 15:45:41 +00:00
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
os << "eld";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.get_token();
|
|
|
|
|
skip_braces(p);
|
2003-09-09 18:27:24 +00:00
|
|
|
|
}
|
2007-12-09 13:40:03 +00:00
|
|
|
|
else if (t.asInput() == "'" && p.next_token().asInput() == "'") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-07-28 15:45:41 +00:00
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
os << "erd";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.get_token();
|
|
|
|
|
skip_braces(p);
|
2003-09-09 18:27:24 +00:00
|
|
|
|
}
|
2003-07-28 15:45:41 +00:00
|
|
|
|
|
2007-12-09 13:40:03 +00:00
|
|
|
|
else if (t.asInput() == ">" && p.next_token().asInput() == ">") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
os << "ald";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.get_token();
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
2012-11-28 11:54:34 +00:00
|
|
|
|
else if (t.asInput() == "<"
|
2013-02-22 14:49:18 +00:00
|
|
|
|
&& p.next_token().asInput() == "<") {
|
|
|
|
|
bool has_noweb = false;
|
|
|
|
|
if (noweb_mode) {
|
|
|
|
|
p.pushPosition();
|
|
|
|
|
p.get_token();
|
|
|
|
|
has_noweb = parse_noweb(p, os, context);
|
|
|
|
|
if (!has_noweb)
|
|
|
|
|
p.popPosition();
|
|
|
|
|
}
|
2012-11-28 11:54:34 +00:00
|
|
|
|
|
2013-02-22 14:49:18 +00:00
|
|
|
|
if (!has_noweb) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
//FIXME: this is a right danish quote;
|
|
|
|
|
// why not a left french quote?
|
|
|
|
|
os << "ard";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.get_token();
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
2007-12-09 13:40:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
2004-06-18 06:47:19 +00:00
|
|
|
|
else if (t.cat() == catSpace || (t.cat() == catNewline && ! p.isParagraph()))
|
2003-11-05 10:14:13 +00:00
|
|
|
|
check_space(p, os, context);
|
2003-07-28 15:45:41 +00:00
|
|
|
|
|
2006-08-13 09:46:28 +00:00
|
|
|
|
else if (t.character() == '[' && noweb_mode &&
|
2007-05-28 22:27:45 +00:00
|
|
|
|
p.next_token().character() == '[') {
|
2006-08-13 09:46:28 +00:00
|
|
|
|
// These can contain underscores
|
|
|
|
|
p.putback();
|
|
|
|
|
string const s = p.getFullOpt() + ']';
|
|
|
|
|
if (p.next_token().character() == ']')
|
|
|
|
|
p.get_token();
|
|
|
|
|
else
|
|
|
|
|
cerr << "Warning: Inserting missing ']' in '"
|
|
|
|
|
<< s << "'." << endl;
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, s, context);
|
2006-08-13 09:46:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-12-30 21:56:55 +00:00
|
|
|
|
else if (t.cat() == catLetter) {
|
|
|
|
|
context.check_layout(os);
|
2010-12-31 11:59:33 +00:00
|
|
|
|
// Workaround for bug 4752.
|
|
|
|
|
// FIXME: This whole code block needs to be removed
|
|
|
|
|
// when the bug is fixed and tex2lyx produces
|
|
|
|
|
// the updated file format.
|
|
|
|
|
// The replacement algorithm in LyX is so stupid that
|
|
|
|
|
// it even translates a phrase if it is part of a word.
|
|
|
|
|
bool handled = false;
|
|
|
|
|
for (int const * l = known_phrase_lengths; *l; ++l) {
|
|
|
|
|
string phrase = t.cs();
|
|
|
|
|
for (int i = 1; i < *l && p.next_token().isAlnumASCII(); ++i)
|
|
|
|
|
phrase += p.get_token().cs();
|
|
|
|
|
if (is_known(phrase, known_coded_phrases)) {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, phrase, context);
|
2010-12-31 11:59:33 +00:00
|
|
|
|
handled = true;
|
|
|
|
|
break;
|
|
|
|
|
} else {
|
|
|
|
|
for (size_t i = 1; i < phrase.length(); ++i)
|
|
|
|
|
p.putback();
|
|
|
|
|
}
|
2010-12-30 21:56:55 +00:00
|
|
|
|
}
|
2010-12-31 11:59:33 +00:00
|
|
|
|
if (!handled)
|
|
|
|
|
os << t.cs();
|
2010-12-30 21:56:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catOther ||
|
2003-04-17 09:47:21 +00:00
|
|
|
|
t.cat() == catAlign ||
|
2003-07-28 21:58:09 +00:00
|
|
|
|
t.cat() == catParameter) {
|
2005-03-21 17:26:34 +00:00
|
|
|
|
// This translates "&" to "\\&" which may be wrong...
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2008-11-16 17:02:00 +00:00
|
|
|
|
os << t.cs();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2004-06-18 06:47:19 +00:00
|
|
|
|
else if (p.isParagraph()) {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
if (context.new_layout_allowed)
|
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\par ", context);
|
2004-06-18 06:47:19 +00:00
|
|
|
|
eat_whitespace(p, os, context, true);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catActive) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
if (t.character() == '~') {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
if (context.layout->free_spacing)
|
2003-04-17 09:47:21 +00:00
|
|
|
|
os << ' ';
|
2010-12-19 20:23:55 +00:00
|
|
|
|
else {
|
2010-12-27 18:14:55 +00:00
|
|
|
|
begin_inset(os, "space ~\n");
|
2010-12-19 20:23:55 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
} else
|
2008-11-16 17:02:00 +00:00
|
|
|
|
os << t.cs();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 20:30:12 +00:00
|
|
|
|
else if (t.cat() == catBegin) {
|
|
|
|
|
Token const next = p.next_token();
|
|
|
|
|
Token const end = p.next_next_token();
|
|
|
|
|
if (next.cat() == catEnd) {
|
2013-02-22 00:11:30 +00:00
|
|
|
|
// {}
|
|
|
|
|
Token const prev = p.prev_token();
|
|
|
|
|
p.get_token();
|
|
|
|
|
if (p.next_token().character() == '`' ||
|
2013-02-22 00:21:17 +00:00
|
|
|
|
(prev.character() == '-' &&
|
|
|
|
|
p.next_token().character() == '-'))
|
2013-02-22 00:11:30 +00:00
|
|
|
|
; // ignore it in {}`` or -{}-
|
|
|
|
|
else
|
|
|
|
|
output_ert_inset(os, "{}", context);
|
2011-11-20 20:30:12 +00:00
|
|
|
|
} else if (next.cat() == catEscape &&
|
|
|
|
|
is_known(next.cs(), known_quotes) &&
|
|
|
|
|
end.cat() == catEnd) {
|
|
|
|
|
// Something like {\textquoteright} (e.g.
|
|
|
|
|
// from writer2latex). LyX writes
|
|
|
|
|
// \textquoteright{}, so we may skip the
|
|
|
|
|
// braces here for better readability.
|
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
|
|
|
|
outer, context);
|
2013-02-22 00:11:30 +00:00
|
|
|
|
} else if (p.next_token().asInput() == "\\ascii") {
|
|
|
|
|
// handle the \ascii characters
|
|
|
|
|
// (the case without braces is handled later)
|
|
|
|
|
// the code is "{\ascii\xxx}"
|
|
|
|
|
p.get_token(); // eat \ascii
|
|
|
|
|
string name2 = p.get_token().asInput();
|
|
|
|
|
p.get_token(); // eat the final '}'
|
|
|
|
|
string const name = "{\\ascii" + name2 + "}";
|
|
|
|
|
bool termination;
|
|
|
|
|
docstring rem;
|
|
|
|
|
set<string> req;
|
|
|
|
|
// get the character from unicodesymbols
|
|
|
|
|
docstring s = encodings.fromLaTeXCommand(from_utf8(name),
|
|
|
|
|
Encodings::TEXT_CMD, termination, rem, &req);
|
|
|
|
|
if (!s.empty()) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << to_utf8(s);
|
2013-02-24 14:44:16 +00:00
|
|
|
|
if (!rem.empty())
|
|
|
|
|
output_ert_inset(os,
|
|
|
|
|
to_utf8(rem), context);
|
2013-02-24 18:43:13 +00:00
|
|
|
|
for (set<string>::const_iterator it = req.begin();
|
|
|
|
|
it != req.end(); ++it)
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2013-02-22 00:11:30 +00:00
|
|
|
|
} else
|
|
|
|
|
// we did not find a non-ert version
|
|
|
|
|
output_ert_inset(os, name, context);
|
2011-11-20 20:30:12 +00:00
|
|
|
|
} else {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
// special handling of font attribute changes
|
2003-11-05 10:14:13 +00:00
|
|
|
|
Token const prev = p.prev_token();
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2005-07-26 11:58:43 +00:00
|
|
|
|
if (next.character() == '[' ||
|
|
|
|
|
next.character() == ']' ||
|
|
|
|
|
next.character() == '*') {
|
|
|
|
|
p.get_token();
|
|
|
|
|
if (p.next_token().cat() == catEnd) {
|
2008-11-16 17:02:00 +00:00
|
|
|
|
os << next.cs();
|
2005-07-26 11:58:43 +00:00
|
|
|
|
p.get_token();
|
|
|
|
|
} else {
|
|
|
|
|
p.putback();
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "{", context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
parse_text_snippet(p, os,
|
|
|
|
|
FLAG_BRACE_LAST,
|
|
|
|
|
outer, context);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
}
|
|
|
|
|
} else if (! context.new_layout_allowed) {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "{", context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
} else if (is_known(next.cs(), known_sizes)) {
|
|
|
|
|
// next will change the size, so we must
|
|
|
|
|
// reset it here
|
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2005-02-02 12:40:50 +00:00
|
|
|
|
if (!context.atParagraphStart())
|
2005-02-02 18:25:56 +00:00
|
|
|
|
os << "\n\\size "
|
2005-02-02 12:40:50 +00:00
|
|
|
|
<< context.font.size << "\n";
|
|
|
|
|
} else if (is_known(next.cs(), known_font_families)) {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
// next will change the font family, so we
|
|
|
|
|
// must reset it here
|
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2005-02-02 12:40:50 +00:00
|
|
|
|
if (!context.atParagraphStart())
|
|
|
|
|
os << "\n\\family "
|
|
|
|
|
<< context.font.family << "\n";
|
|
|
|
|
} else if (is_known(next.cs(), known_font_series)) {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
// next will change the font series, so we
|
|
|
|
|
// must reset it here
|
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2005-02-02 12:40:50 +00:00
|
|
|
|
if (!context.atParagraphStart())
|
|
|
|
|
os << "\n\\series "
|
|
|
|
|
<< context.font.series << "\n";
|
|
|
|
|
} else if (is_known(next.cs(), known_font_shapes)) {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
// next will change the font shape, so we
|
|
|
|
|
// must reset it here
|
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2005-02-02 12:40:50 +00:00
|
|
|
|
if (!context.atParagraphStart())
|
|
|
|
|
os << "\n\\shape "
|
|
|
|
|
<< context.font.shape << "\n";
|
|
|
|
|
} else if (is_known(next.cs(), known_old_font_families) ||
|
2006-04-05 23:56:29 +00:00
|
|
|
|
is_known(next.cs(), known_old_font_series) ||
|
|
|
|
|
is_known(next.cs(), known_old_font_shapes)) {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
// next will change the font family, series
|
2004-06-28 06:53:12 +00:00
|
|
|
|
// and shape, so we must reset it here
|
2005-07-26 11:58:43 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2005-02-02 12:40:50 +00:00
|
|
|
|
if (!context.atParagraphStart())
|
|
|
|
|
os << "\n\\family "
|
|
|
|
|
<< context.font.family
|
|
|
|
|
<< "\n\\series "
|
|
|
|
|
<< context.font.series
|
|
|
|
|
<< "\n\\shape "
|
|
|
|
|
<< context.font.shape << "\n";
|
|
|
|
|
} else {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "{", context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_BRACE_LAST,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
outer, context);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", context);
|
2011-11-20 20:30:12 +00:00
|
|
|
|
}
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cat() == catEnd) {
|
2003-07-28 23:50:24 +00:00
|
|
|
|
if (flags & FLAG_BRACE_LAST) {
|
2003-04-17 09:47:21 +00:00
|
|
|
|
return;
|
2003-07-28 23:50:24 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
cerr << "stray '}' in text\n";
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2004-06-18 06:47:19 +00:00
|
|
|
|
else if (t.cat() == catComment)
|
|
|
|
|
parse_comment(p, os, t, context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
//
|
|
|
|
|
// control sequences
|
|
|
|
|
//
|
|
|
|
|
|
2013-02-24 18:00:17 +00:00
|
|
|
|
else if (t.cs() == "(" || t.cs() == "[") {
|
|
|
|
|
bool const simple = t.cs() == "(";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Formula");
|
2013-02-24 18:00:17 +00:00
|
|
|
|
os << " \\" << t.cs();
|
|
|
|
|
parse_math(p, os, simple ? FLAG_SIMPLE2 : FLAG_EQUATION, MATH_MODE);
|
|
|
|
|
os << '\\' << (simple ? ')' : ']');
|
2003-04-17 09:47:21 +00:00
|
|
|
|
end_inset(os);
|
2013-02-24 18:00:17 +00:00
|
|
|
|
if (!simple) {
|
|
|
|
|
// Prevent the conversion of a line break to a
|
|
|
|
|
// space (bug 7668). This does not change the
|
|
|
|
|
// output, but looks ugly in LyX.
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
else if (t.cs() == "begin")
|
2011-10-30 13:57:49 +00:00
|
|
|
|
parse_environment(p, os, outer, last_env,
|
2012-04-19 18:46:08 +00:00
|
|
|
|
context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "end") {
|
|
|
|
|
if (flags & FLAG_END) {
|
|
|
|
|
// eat environment name
|
|
|
|
|
string const name = p.getArg('{', '}');
|
2003-04-23 15:14:43 +00:00
|
|
|
|
if (name != active_environment())
|
|
|
|
|
cerr << "\\end{" + name + "} does not match \\begin{"
|
|
|
|
|
+ active_environment() + "}\n";
|
2003-04-17 09:47:21 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
p.error("found 'end' unexpectedly");
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "item") {
|
2003-09-09 18:27:24 +00:00
|
|
|
|
string s;
|
2011-11-13 10:40:07 +00:00
|
|
|
|
bool const optarg = p.hasOpt();
|
|
|
|
|
if (optarg) {
|
|
|
|
|
// FIXME: This swallows comments, but we cannot use
|
|
|
|
|
// eat_whitespace() since we must not output
|
|
|
|
|
// anything before the item.
|
2011-11-27 13:29:56 +00:00
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
s = p.verbatimOption();
|
2011-11-13 10:40:07 +00:00
|
|
|
|
} else
|
|
|
|
|
p.skip_spaces(false);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
context.set_item();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2005-08-22 09:51:44 +00:00
|
|
|
|
if (context.has_item) {
|
|
|
|
|
// An item in an unknown list-like environment
|
|
|
|
|
// FIXME: Do this in check_layout()!
|
|
|
|
|
context.has_item = false;
|
|
|
|
|
if (optarg)
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\item", context);
|
2005-08-22 09:51:44 +00:00
|
|
|
|
else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\item ", context);
|
2005-08-22 09:51:44 +00:00
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
if (optarg) {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
if (context.layout->labeltype != LABEL_MANUAL) {
|
2009-06-14 21:29:24 +00:00
|
|
|
|
// LyX does not support \item[\mybullet]
|
2004-06-28 06:53:12 +00:00
|
|
|
|
// in itemize environments
|
2011-11-13 10:40:07 +00:00
|
|
|
|
Parser p2(s + ']');
|
|
|
|
|
os << parse_text_snippet(p2,
|
|
|
|
|
FLAG_BRACK_LAST, outer, context);
|
2003-11-19 10:35:50 +00:00
|
|
|
|
} else if (!s.empty()) {
|
2011-11-13 10:40:07 +00:00
|
|
|
|
// LyX adds braces around the argument,
|
|
|
|
|
// so we need to remove them here.
|
|
|
|
|
if (s.size() > 2 && s[0] == '{' &&
|
|
|
|
|
s[s.size()-1] == '}')
|
|
|
|
|
s = s.substr(1, s.size()-2);
|
|
|
|
|
// If the argument contains a space we
|
|
|
|
|
// must put it into ERT: Otherwise LyX
|
|
|
|
|
// would misinterpret the space as
|
|
|
|
|
// item delimiter (bug 7663)
|
|
|
|
|
if (contains(s, ' ')) {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, s, context);
|
2011-11-13 10:40:07 +00:00
|
|
|
|
} else {
|
|
|
|
|
Parser p2(s + ']');
|
|
|
|
|
os << parse_text_snippet(p2,
|
|
|
|
|
FLAG_BRACK_LAST,
|
|
|
|
|
outer, context);
|
|
|
|
|
}
|
2004-06-28 06:53:12 +00:00
|
|
|
|
// The space is needed to separate the
|
|
|
|
|
// item from the rest of the sentence.
|
2011-11-13 10:40:07 +00:00
|
|
|
|
os << ' ';
|
2004-06-18 06:47:19 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-08-06 22:47:22 +00:00
|
|
|
|
else if (t.cs() == "bibitem") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
context.set_item();
|
2003-08-06 22:47:22 +00:00
|
|
|
|
context.check_layout(os);
|
2011-11-27 13:29:56 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
string label = convert_command_inset_arg(p.verbatimOption());
|
2011-02-18 20:55:50 +00:00
|
|
|
|
string key = convert_command_inset_arg(p.verbatim_item());
|
|
|
|
|
if (contains(label, '\\') || contains(key, '\\')) {
|
|
|
|
|
// LyX can't handle LaTeX commands in labels or keys
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, t.asInput() + '[' + label +
|
2011-02-18 20:55:50 +00:00
|
|
|
|
"]{" + p.verbatim_item() + '}',
|
|
|
|
|
context);
|
|
|
|
|
} else {
|
|
|
|
|
begin_command_inset(os, "bibitem", "bibitem");
|
|
|
|
|
os << "label \"" << label << "\"\n"
|
|
|
|
|
"key \"" << key << "\"\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
2003-08-06 22:47:22 +00:00
|
|
|
|
}
|
2008-04-29 18:11:46 +00:00
|
|
|
|
|
2011-11-25 01:01:45 +00:00
|
|
|
|
else if (is_macro(p)) {
|
|
|
|
|
// catch the case of \def\inputGnumericTable
|
2011-12-04 16:00:00 +00:00
|
|
|
|
bool macro = true;
|
2011-11-25 01:01:45 +00:00
|
|
|
|
if (t.cs() == "def") {
|
2011-12-04 16:00:00 +00:00
|
|
|
|
Token second = p.next_token();
|
2011-11-25 01:01:45 +00:00
|
|
|
|
if (second.cs() == "inputGnumericTable") {
|
2011-12-04 16:00:00 +00:00
|
|
|
|
p.pushPosition();
|
|
|
|
|
p.get_token();
|
2011-11-25 01:01:45 +00:00
|
|
|
|
skip_braces(p);
|
|
|
|
|
Token third = p.get_token();
|
2011-12-04 16:00:00 +00:00
|
|
|
|
p.popPosition();
|
2011-11-25 01:01:45 +00:00
|
|
|
|
if (third.cs() == "input") {
|
2011-12-04 16:00:00 +00:00
|
|
|
|
p.get_token();
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
p.get_token();
|
2011-11-25 01:01:45 +00:00
|
|
|
|
string name = normalize_filename(p.verbatim_item());
|
2012-10-03 11:23:27 +00:00
|
|
|
|
string const path = getMasterFilePath(true);
|
2011-11-25 01:01:45 +00:00
|
|
|
|
// We want to preserve relative / absolute filenames,
|
|
|
|
|
// therefore path is only used for testing
|
2011-12-07 22:48:44 +00:00
|
|
|
|
// The file extension is in every case ".tex".
|
|
|
|
|
// So we need to remove this extension and check for
|
|
|
|
|
// the original one.
|
2011-12-11 23:17:02 +00:00
|
|
|
|
name = removeExtension(name);
|
2011-11-25 01:01:45 +00:00
|
|
|
|
if (!makeAbsPath(name, path).exists()) {
|
2011-12-11 23:17:02 +00:00
|
|
|
|
char const * const Gnumeric_formats[] = {"gnumeric",
|
2011-11-25 01:01:45 +00:00
|
|
|
|
"ods", "xls", 0};
|
|
|
|
|
string const Gnumeric_name =
|
|
|
|
|
find_file(name, path, Gnumeric_formats);
|
|
|
|
|
if (!Gnumeric_name.empty())
|
|
|
|
|
name = Gnumeric_name;
|
|
|
|
|
}
|
2012-10-03 11:23:27 +00:00
|
|
|
|
FileName const absname = makeAbsPath(name, path);
|
|
|
|
|
if (absname.exists()) {
|
|
|
|
|
fix_child_filename(name);
|
|
|
|
|
copy_file(absname, name);
|
|
|
|
|
} else
|
2011-11-25 01:01:45 +00:00
|
|
|
|
cerr << "Warning: Could not find file '"
|
|
|
|
|
<< name << "'." << endl;
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "External\n\ttemplate ");
|
|
|
|
|
os << "GnumericSpreadsheet\n\tfilename "
|
|
|
|
|
<< name << "\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
context.check_layout(os);
|
2011-12-04 16:00:00 +00:00
|
|
|
|
macro = false;
|
2012-10-03 11:23:27 +00:00
|
|
|
|
// register the packages that are automatically loaded
|
2011-12-07 21:21:22 +00:00
|
|
|
|
// by the Gnumeric template
|
2011-12-13 19:40:05 +00:00
|
|
|
|
registerExternalTemplatePackages("GnumericSpreadsheet");
|
2011-11-25 01:01:45 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2011-12-04 16:00:00 +00:00
|
|
|
|
if (macro)
|
2011-11-25 01:01:45 +00:00
|
|
|
|
parse_macro(p, os, context);
|
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
else if (t.cs() == "noindent") {
|
2003-04-25 15:54:29 +00:00
|
|
|
|
p.skip_spaces();
|
2008-05-07 13:55:03 +00:00
|
|
|
|
context.add_par_extra_stuff("\\noindent\n");
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (t.cs() == "appendix") {
|
2008-05-07 13:55:03 +00:00
|
|
|
|
context.add_par_extra_stuff("\\start_of_appendix\n");
|
2005-03-21 17:26:34 +00:00
|
|
|
|
// We need to start a new paragraph. Otherwise the
|
|
|
|
|
// appendix in 'bla\appendix\chapter{' would start
|
|
|
|
|
// too late.
|
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
// We need to make sure that the paragraph is
|
|
|
|
|
// generated even if it is empty. Otherwise the
|
|
|
|
|
// appendix in '\par\appendix\par\chapter{' would
|
|
|
|
|
// start too late.
|
|
|
|
|
context.check_layout(os);
|
2005-03-31 16:37:10 +00:00
|
|
|
|
// FIXME: This is a hack to prevent paragraph
|
|
|
|
|
// deletion if it is empty. Handle this better!
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os,
|
2005-03-31 16:37:10 +00:00
|
|
|
|
"%dummy comment inserted by tex2lyx to "
|
|
|
|
|
"ensure that this paragraph is not empty",
|
|
|
|
|
context);
|
2005-03-21 17:26:34 +00:00
|
|
|
|
// Both measures above may generate an additional
|
|
|
|
|
// empty paragraph, but that does not hurt, because
|
|
|
|
|
// whitespace does not matter here.
|
|
|
|
|
eat_whitespace(p, os, context, true);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-30 13:21:06 +00:00
|
|
|
|
// Must catch empty dates before findLayout is called below
|
|
|
|
|
else if (t.cs() == "date") {
|
2012-04-19 18:46:08 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
p.pushPosition();
|
2011-10-30 13:21:06 +00:00
|
|
|
|
string const date = p.verbatim_item();
|
2012-04-19 18:46:08 +00:00
|
|
|
|
p.popPosition();
|
|
|
|
|
if (date.empty()) {
|
2011-10-30 13:21:06 +00:00
|
|
|
|
preamble.suppressDate(true);
|
2012-04-19 18:46:08 +00:00
|
|
|
|
p.verbatim_item();
|
|
|
|
|
} else {
|
2011-10-30 13:21:06 +00:00
|
|
|
|
preamble.suppressDate(false);
|
|
|
|
|
if (context.new_layout_allowed &&
|
|
|
|
|
(newlayout = findLayout(context.textclass,
|
|
|
|
|
t.cs(), true))) {
|
|
|
|
|
// write the layout
|
|
|
|
|
output_command_layout(os, p, outer,
|
|
|
|
|
context, newlayout);
|
2012-04-19 18:46:08 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
if (!preamble.titleLayoutFound())
|
|
|
|
|
preamble.titleLayoutFound(newlayout->intitle);
|
2011-11-29 20:09:40 +00:00
|
|
|
|
set<string> const & req = newlayout->requires();
|
2012-05-28 22:21:22 +00:00
|
|
|
|
set<string>::const_iterator it = req.begin();
|
|
|
|
|
set<string>::const_iterator en = req.end();
|
|
|
|
|
for (; it != en; ++it)
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2011-10-30 13:21:06 +00:00
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os,
|
2012-04-19 18:46:08 +00:00
|
|
|
|
"\\date{" + p.verbatim_item() + '}',
|
|
|
|
|
context);
|
2011-10-30 13:21:06 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-04-29 00:03:55 +00:00
|
|
|
|
// Starred section headings
|
2003-07-27 00:39:35 +00:00
|
|
|
|
// Must attempt to parse "Section*" before "Section".
|
|
|
|
|
else if ((p.next_token().asInput() == "*") &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed &&
|
2011-01-23 21:10:20 +00:00
|
|
|
|
(newlayout = findLayout(context.textclass, t.cs() + '*', true))) {
|
2008-04-29 00:03:55 +00:00
|
|
|
|
// write the layout
|
2003-07-27 00:39:35 +00:00
|
|
|
|
p.get_token();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
output_command_layout(os, p, outer, context, newlayout);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2012-04-19 18:46:08 +00:00
|
|
|
|
if (!preamble.titleLayoutFound())
|
|
|
|
|
preamble.titleLayoutFound(newlayout->intitle);
|
2011-11-29 20:09:40 +00:00
|
|
|
|
set<string> const & req = newlayout->requires();
|
2012-05-28 20:41:32 +00:00
|
|
|
|
for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2003-07-27 00:39:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2008-04-29 00:03:55 +00:00
|
|
|
|
// Section headings and the like
|
2005-07-26 11:58:43 +00:00
|
|
|
|
else if (context.new_layout_allowed &&
|
2011-01-23 21:10:20 +00:00
|
|
|
|
(newlayout = findLayout(context.textclass, t.cs(), true))) {
|
2008-04-29 00:03:55 +00:00
|
|
|
|
// write the layout
|
2003-08-04 10:26:10 +00:00
|
|
|
|
output_command_layout(os, p, outer, context, newlayout);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2012-04-19 18:46:08 +00:00
|
|
|
|
if (!preamble.titleLayoutFound())
|
|
|
|
|
preamble.titleLayoutFound(newlayout->intitle);
|
2011-11-29 20:09:40 +00:00
|
|
|
|
set<string> const & req = newlayout->requires();
|
2012-05-28 20:41:32 +00:00
|
|
|
|
for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2009-06-14 14:21:20 +00:00
|
|
|
|
else if (t.cs() == "caption") {
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
p.skip_spaces();
|
2012-12-30 22:08:22 +00:00
|
|
|
|
begin_inset(os, "Caption Standard\n");
|
2012-10-05 21:44:21 +00:00
|
|
|
|
Context newcontext(true, context.textclass, 0, 0, context.font);
|
2011-01-07 19:58:31 +00:00
|
|
|
|
newcontext.check_layout(os);
|
2012-11-20 11:58:36 +00:00
|
|
|
|
// FIXME InsetArgument is now properly implemented in InsetLayout
|
|
|
|
|
// (for captions, but also for others)
|
2011-01-05 20:32:45 +00:00
|
|
|
|
if (p.next_token().cat() != catEscape &&
|
|
|
|
|
p.next_token().character() == '[') {
|
2009-06-14 14:21:20 +00:00
|
|
|
|
p.get_token(); // eat '['
|
2012-11-20 11:58:36 +00:00
|
|
|
|
begin_inset(os, "Argument 1\n");
|
2009-06-14 14:21:20 +00:00
|
|
|
|
os << "status collapsed\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
|
|
|
|
parse_text(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
context.check_end_layout(os);
|
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
end_inset(os);
|
2007-07-02 16:02:41 +00:00
|
|
|
|
p.skip_spaces();
|
2011-01-07 19:58:31 +00:00
|
|
|
|
newcontext.check_end_layout(os);
|
2007-07-02 16:02:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-27 02:29:03 +00:00
|
|
|
|
else if (t.cs() == "subfloat") {
|
|
|
|
|
// the syntax is \subfloat[caption]{content}
|
|
|
|
|
// if it is a table of figure depends on the surrounding float
|
|
|
|
|
bool has_caption = false;
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
// do nothing if there is no outer float
|
|
|
|
|
if (!float_type.empty()) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
begin_inset(os, "Float " + float_type + "\n");
|
|
|
|
|
os << "wide false"
|
|
|
|
|
<< "\nsideways false"
|
|
|
|
|
<< "\nstatus collapsed\n\n";
|
|
|
|
|
// test for caption
|
|
|
|
|
string caption;
|
|
|
|
|
if (p.next_token().cat() != catEscape &&
|
|
|
|
|
p.next_token().character() == '[') {
|
|
|
|
|
p.get_token(); // eat '['
|
|
|
|
|
caption = parse_text_snippet(p, FLAG_BRACK_LAST, outer, context);
|
|
|
|
|
has_caption = true;
|
|
|
|
|
}
|
|
|
|
|
// the content
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
// the caption comes always as the last
|
|
|
|
|
if (has_caption) {
|
|
|
|
|
// we must make sure that the caption gets a \begin_layout
|
|
|
|
|
os << "\n\\begin_layout Plain Layout";
|
|
|
|
|
p.skip_spaces();
|
2012-12-30 22:08:22 +00:00
|
|
|
|
begin_inset(os, "Caption Standard\n");
|
2012-10-05 21:44:21 +00:00
|
|
|
|
Context newcontext(true, context.textclass,
|
|
|
|
|
0, 0, context.font);
|
2011-10-27 02:29:03 +00:00
|
|
|
|
newcontext.check_layout(os);
|
|
|
|
|
os << caption << "\n";
|
|
|
|
|
newcontext.check_end_layout(os);
|
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
//newcontext.new_paragraph(os);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
// We don't need really a new paragraph, but
|
|
|
|
|
// we must make sure that the next item gets a \begin_layout.
|
|
|
|
|
if (has_caption)
|
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
context.check_end_layout(os);
|
|
|
|
|
// close the layout we opened
|
|
|
|
|
if (has_caption)
|
|
|
|
|
os << "\n\\end_layout\n";
|
|
|
|
|
} else {
|
|
|
|
|
// if the float type is not supported or there is no surrounding float
|
|
|
|
|
// output it as ERT
|
|
|
|
|
if (p.hasOpt()) {
|
|
|
|
|
string opt_arg = convert_command_inset_arg(p.getArg('[', ']'));
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, t.asInput() + '[' + opt_arg +
|
2011-10-27 02:29:03 +00:00
|
|
|
|
"]{" + p.verbatim_item() + '}', context);
|
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, t.asInput() + "{" + p.verbatim_item() + '}', context);
|
2011-10-27 02:29:03 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else if (t.cs() == "includegraphics") {
|
2005-02-03 14:20:19 +00:00
|
|
|
|
bool const clip = p.next_token().asInput() == "*";
|
|
|
|
|
if (clip)
|
|
|
|
|
p.get_token();
|
2010-12-12 17:47:36 +00:00
|
|
|
|
string const arg = p.getArg('[', ']');
|
|
|
|
|
map<string, string> opts;
|
|
|
|
|
vector<string> keys;
|
|
|
|
|
split_map(arg, opts, keys);
|
2005-02-03 14:20:19 +00:00
|
|
|
|
if (clip)
|
|
|
|
|
opts["clip"] = string();
|
2005-04-15 14:04:13 +00:00
|
|
|
|
string name = normalize_filename(p.verbatim_item());
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2012-10-03 11:23:27 +00:00
|
|
|
|
string const path = getMasterFilePath(true);
|
2004-07-29 17:03:37 +00:00
|
|
|
|
// We want to preserve relative / absolute filenames,
|
|
|
|
|
// therefore path is only used for testing
|
2007-10-18 19:29:32 +00:00
|
|
|
|
if (!makeAbsPath(name, path).exists()) {
|
2004-07-29 17:03:37 +00:00
|
|
|
|
// The file extension is probably missing.
|
|
|
|
|
// Now try to find it out.
|
|
|
|
|
string const dvips_name =
|
|
|
|
|
find_file(name, path,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
known_dvips_graphics_formats);
|
2004-07-29 17:03:37 +00:00
|
|
|
|
string const pdftex_name =
|
|
|
|
|
find_file(name, path,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
known_pdftex_graphics_formats);
|
2004-07-29 17:03:37 +00:00
|
|
|
|
if (!dvips_name.empty()) {
|
|
|
|
|
if (!pdftex_name.empty()) {
|
|
|
|
|
cerr << "This file contains the "
|
2006-04-05 23:56:29 +00:00
|
|
|
|
"latex snippet\n"
|
|
|
|
|
"\"\\includegraphics{"
|
2004-07-29 17:03:37 +00:00
|
|
|
|
<< name << "}\".\n"
|
2006-04-05 23:56:29 +00:00
|
|
|
|
"However, files\n\""
|
2004-07-29 17:03:37 +00:00
|
|
|
|
<< dvips_name << "\" and\n\""
|
|
|
|
|
<< pdftex_name << "\"\n"
|
2006-04-05 23:56:29 +00:00
|
|
|
|
"both exist, so I had to make a "
|
|
|
|
|
"choice and took the first one.\n"
|
|
|
|
|
"Please move the unwanted one "
|
|
|
|
|
"someplace else and try again\n"
|
|
|
|
|
"if my choice was wrong."
|
2004-07-29 17:03:37 +00:00
|
|
|
|
<< endl;
|
|
|
|
|
}
|
|
|
|
|
name = dvips_name;
|
2010-12-30 20:29:33 +00:00
|
|
|
|
} else if (!pdftex_name.empty()) {
|
2004-07-29 17:03:37 +00:00
|
|
|
|
name = pdftex_name;
|
2010-12-30 20:29:33 +00:00
|
|
|
|
pdflatex = true;
|
|
|
|
|
}
|
2004-07-29 17:03:37 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-10-03 11:23:27 +00:00
|
|
|
|
FileName const absname = makeAbsPath(name, path);
|
|
|
|
|
if (absname.exists()) {
|
|
|
|
|
fix_child_filename(name);
|
|
|
|
|
copy_file(absname, name);
|
|
|
|
|
} else
|
2005-05-25 16:01:19 +00:00
|
|
|
|
cerr << "Warning: Could not find graphics file '"
|
|
|
|
|
<< name << "'." << endl;
|
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Graphics ");
|
|
|
|
|
os << "\n\tfilename " << name << '\n';
|
|
|
|
|
if (opts.find("width") != opts.end())
|
2003-08-07 22:59:53 +00:00
|
|
|
|
os << "\twidth "
|
|
|
|
|
<< translate_len(opts["width"]) << '\n';
|
2003-04-17 09:47:21 +00:00
|
|
|
|
if (opts.find("height") != opts.end())
|
2003-08-07 22:59:53 +00:00
|
|
|
|
os << "\theight "
|
|
|
|
|
<< translate_len(opts["height"]) << '\n';
|
2003-10-23 11:46:33 +00:00
|
|
|
|
if (opts.find("scale") != opts.end()) {
|
|
|
|
|
istringstream iss(opts["scale"]);
|
|
|
|
|
double val;
|
|
|
|
|
iss >> val;
|
|
|
|
|
val = val*100;
|
|
|
|
|
os << "\tscale " << val << '\n';
|
|
|
|
|
}
|
2010-12-12 17:47:36 +00:00
|
|
|
|
if (opts.find("angle") != opts.end()) {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
os << "\trotateAngle "
|
|
|
|
|
<< opts["angle"] << '\n';
|
2010-12-12 17:47:36 +00:00
|
|
|
|
vector<string>::const_iterator a =
|
|
|
|
|
find(keys.begin(), keys.end(), "angle");
|
|
|
|
|
vector<string>::const_iterator s =
|
|
|
|
|
find(keys.begin(), keys.end(), "width");
|
|
|
|
|
if (s == keys.end())
|
|
|
|
|
s = find(keys.begin(), keys.end(), "height");
|
|
|
|
|
if (s == keys.end())
|
|
|
|
|
s = find(keys.begin(), keys.end(), "scale");
|
|
|
|
|
if (s != keys.end() && distance(s, a) > 0)
|
|
|
|
|
os << "\tscaleBeforeRotation\n";
|
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
if (opts.find("origin") != opts.end()) {
|
|
|
|
|
ostringstream ss;
|
|
|
|
|
string const opt = opts["origin"];
|
|
|
|
|
if (opt.find('l') != string::npos) ss << "left";
|
|
|
|
|
if (opt.find('r') != string::npos) ss << "right";
|
|
|
|
|
if (opt.find('c') != string::npos) ss << "center";
|
|
|
|
|
if (opt.find('t') != string::npos) ss << "Top";
|
|
|
|
|
if (opt.find('b') != string::npos) ss << "Bottom";
|
|
|
|
|
if (opt.find('B') != string::npos) ss << "Baseline";
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!ss.str().empty())
|
2003-10-23 11:46:33 +00:00
|
|
|
|
os << "\trotateOrigin " << ss.str() << '\n';
|
|
|
|
|
else
|
|
|
|
|
cerr << "Warning: Ignoring unknown includegraphics origin argument '" << opt << "'\n";
|
|
|
|
|
}
|
|
|
|
|
if (opts.find("keepaspectratio") != opts.end())
|
|
|
|
|
os << "\tkeepAspectRatio\n";
|
|
|
|
|
if (opts.find("clip") != opts.end())
|
|
|
|
|
os << "\tclip\n";
|
|
|
|
|
if (opts.find("draft") != opts.end())
|
|
|
|
|
os << "\tdraft\n";
|
|
|
|
|
if (opts.find("bb") != opts.end())
|
|
|
|
|
os << "\tBoundingBox "
|
|
|
|
|
<< opts["bb"] << '\n';
|
|
|
|
|
int numberOfbbOptions = 0;
|
|
|
|
|
if (opts.find("bbllx") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("bblly") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("bburx") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("bbury") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (numberOfbbOptions == 4)
|
|
|
|
|
os << "\tBoundingBox "
|
2007-12-11 17:14:01 +00:00
|
|
|
|
<< opts["bbllx"] << " " << opts["bblly"] << " "
|
|
|
|
|
<< opts["bburx"] << " " << opts["bbury"] << '\n';
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (numberOfbbOptions > 0)
|
|
|
|
|
cerr << "Warning: Ignoring incomplete includegraphics boundingbox arguments.\n";
|
|
|
|
|
numberOfbbOptions = 0;
|
|
|
|
|
if (opts.find("natwidth") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (opts.find("natheight") != opts.end())
|
|
|
|
|
numberOfbbOptions++;
|
|
|
|
|
if (numberOfbbOptions == 2)
|
|
|
|
|
os << "\tBoundingBox 0bp 0bp "
|
2007-12-11 17:14:01 +00:00
|
|
|
|
<< opts["natwidth"] << " " << opts["natheight"] << '\n';
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (numberOfbbOptions > 0)
|
|
|
|
|
cerr << "Warning: Ignoring incomplete includegraphics boundingbox arguments.\n";
|
|
|
|
|
ostringstream special;
|
|
|
|
|
if (opts.find("hiresbb") != opts.end())
|
|
|
|
|
special << "hiresbb,";
|
|
|
|
|
if (opts.find("trim") != opts.end())
|
|
|
|
|
special << "trim,";
|
|
|
|
|
if (opts.find("viewport") != opts.end())
|
|
|
|
|
special << "viewport=" << opts["viewport"] << ',';
|
|
|
|
|
if (opts.find("totalheight") != opts.end())
|
|
|
|
|
special << "totalheight=" << opts["totalheight"] << ',';
|
|
|
|
|
if (opts.find("type") != opts.end())
|
|
|
|
|
special << "type=" << opts["type"] << ',';
|
|
|
|
|
if (opts.find("ext") != opts.end())
|
|
|
|
|
special << "ext=" << opts["ext"] << ',';
|
|
|
|
|
if (opts.find("read") != opts.end())
|
|
|
|
|
special << "read=" << opts["read"] << ',';
|
|
|
|
|
if (opts.find("command") != opts.end())
|
|
|
|
|
special << "command=" << opts["command"] << ',';
|
|
|
|
|
string s_special = special.str();
|
2003-11-19 10:35:50 +00:00
|
|
|
|
if (!s_special.empty()) {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// We had special arguments. Remove the trailing ','.
|
|
|
|
|
os << "\tspecial " << s_special.substr(0, s_special.size() - 1) << '\n';
|
|
|
|
|
}
|
|
|
|
|
// TODO: Handle the unknown settings better.
|
|
|
|
|
// Warn about invalid options.
|
2004-06-18 06:47:19 +00:00
|
|
|
|
// Check whether some option was given twice.
|
2003-04-17 09:47:21 +00:00
|
|
|
|
end_inset(os);
|
2011-12-13 19:40:05 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("graphicx");
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2005-03-07 12:30:44 +00:00
|
|
|
|
else if (t.cs() == "footnote" ||
|
2006-04-05 23:56:29 +00:00
|
|
|
|
(t.cs() == "thanks" && context.layout->intitle)) {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Foot\n");
|
2003-12-19 10:40:07 +00:00
|
|
|
|
os << "status collapsed\n\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, false, context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "marginpar") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
begin_inset(os, "Marginal\n");
|
2003-12-19 10:40:07 +00:00
|
|
|
|
os << "status collapsed\n\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, false, context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
end_inset(os);
|
2003-08-04 10:26:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-03-05 22:04:22 +00:00
|
|
|
|
else if (t.cs() == "lstinline") {
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
parse_listings(p, os, context, true);
|
|
|
|
|
}
|
|
|
|
|
|
2003-08-04 10:26:10 +00:00
|
|
|
|
else if (t.cs() == "ensuremath") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces();
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
string const s = p.verbatim_item();
|
2008-11-16 21:51:18 +00:00
|
|
|
|
//FIXME: this never triggers in UTF8
|
2007-05-10 10:35:57 +00:00
|
|
|
|
if (s == "\xb1" || s == "\xb3" || s == "\xb2" || s == "\xb5")
|
2003-08-04 10:26:10 +00:00
|
|
|
|
os << s;
|
|
|
|
|
else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\ensuremath{" + s + "}",
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-11-05 10:14:13 +00:00
|
|
|
|
else if (t.cs() == "makeindex" || t.cs() == "maketitle") {
|
2012-04-19 18:46:08 +00:00
|
|
|
|
if (preamble.titleLayoutFound()) {
|
2011-10-30 13:57:49 +00:00
|
|
|
|
// swallow this
|
|
|
|
|
skip_spaces_braces(p);
|
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, t.asInput(), context);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2012-04-16 19:40:59 +00:00
|
|
|
|
else if (t.cs() == "tableofcontents" || t.cs() == "lstlistoflistings") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2012-04-16 19:40:59 +00:00
|
|
|
|
begin_command_inset(os, "toc", t.cs());
|
2003-07-28 14:06:04 +00:00
|
|
|
|
end_inset(os);
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2012-04-16 19:40:59 +00:00
|
|
|
|
if (t.cs() == "lstlistoflistings")
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("listings");
|
2003-07-28 14:06:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
2013-02-24 18:00:17 +00:00
|
|
|
|
else if (t.cs() == "listoffigures" || t.cs() == "listoftables") {
|
2003-08-07 22:59:53 +00:00
|
|
|
|
context.check_layout(os);
|
2013-02-24 18:00:17 +00:00
|
|
|
|
if (t.cs() == "listoffigures")
|
|
|
|
|
begin_inset(os, "FloatList figure\n");
|
|
|
|
|
else
|
|
|
|
|
begin_inset(os, "FloatList table\n");
|
2003-08-07 22:59:53 +00:00
|
|
|
|
end_inset(os);
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "listof") {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
p.skip_spaces(true);
|
2009-12-09 11:53:16 +00:00
|
|
|
|
string const name = p.get_token().cs();
|
2003-08-07 22:59:53 +00:00
|
|
|
|
if (context.textclass.floats().typeExist(name)) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "FloatList ");
|
|
|
|
|
os << name << "\n";
|
|
|
|
|
end_inset(os);
|
2007-11-28 00:03:11 +00:00
|
|
|
|
p.get_token(); // swallow second arg
|
2003-09-09 18:27:24 +00:00
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\listof{" + name + "}", context);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_text_font_families)))
|
2004-06-28 06:53:12 +00:00
|
|
|
|
parse_text_attributes(p, os, FLAG_ITEM, outer,
|
2011-11-20 17:03:00 +00:00
|
|
|
|
context, "\\family", context.font.family,
|
|
|
|
|
known_coded_font_families[where - known_text_font_families]);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_text_font_series)))
|
2004-06-28 06:53:12 +00:00
|
|
|
|
parse_text_attributes(p, os, FLAG_ITEM, outer,
|
2011-11-20 17:03:00 +00:00
|
|
|
|
context, "\\series", context.font.series,
|
|
|
|
|
known_coded_font_series[where - known_text_font_series]);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_text_font_shapes)))
|
2004-06-28 06:53:12 +00:00
|
|
|
|
parse_text_attributes(p, os, FLAG_ITEM, outer,
|
2011-11-20 17:03:00 +00:00
|
|
|
|
context, "\\shape", context.font.shape,
|
|
|
|
|
known_coded_font_shapes[where - known_text_font_shapes]);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "textnormal" || t.cs() == "normalfont") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont oldFont = context.font;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.init();
|
|
|
|
|
context.font.size = oldFont.size;
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << "\n\\family " << context.font.family << "\n";
|
|
|
|
|
os << "\n\\series " << context.font.series << "\n";
|
|
|
|
|
os << "\n\\shape " << context.font.shape << "\n";
|
2004-06-28 06:53:12 +00:00
|
|
|
|
if (t.cs() == "textnormal") {
|
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, context.font, oldFont);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font = oldFont;
|
|
|
|
|
} else
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2008-04-20 23:16:55 +00:00
|
|
|
|
else if (t.cs() == "textcolor") {
|
|
|
|
|
// scheme is \textcolor{color name}{text}
|
|
|
|
|
string const color = p.verbatim_item();
|
|
|
|
|
// we only support the predefined colors of the color package
|
|
|
|
|
if (color == "black" || color == "blue" || color == "cyan"
|
|
|
|
|
|| color == "green" || color == "magenta" || color == "red"
|
|
|
|
|
|| color == "white" || color == "yellow") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\n\\color " << color << "\n";
|
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\n\\color inherit\n";
|
2011-10-30 18:12:49 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("color");
|
2008-04-20 23:16:55 +00:00
|
|
|
|
} else
|
|
|
|
|
// for custom defined colors
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, t.asInput() + "{" + color + "}", context);
|
2008-04-20 23:16:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-06-21 13:24:26 +00:00
|
|
|
|
else if (t.cs() == "underbar" || t.cs() == "uline") {
|
2011-10-21 19:57:03 +00:00
|
|
|
|
// \underbar is not 100% correct (LyX outputs \uline
|
|
|
|
|
// of ulem.sty). The difference is that \ulem allows
|
|
|
|
|
// line breaks, and \underbar does not.
|
2005-01-06 13:22:20 +00:00
|
|
|
|
// Do NOT handle \underline.
|
|
|
|
|
// \underbar cuts through y, g, q, p etc.,
|
|
|
|
|
// \underline does not.
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << "\n\\bar under\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
context.check_layout(os);
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << "\n\\bar default\n";
|
2011-10-30 18:51:41 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("ulem");
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-23 10:58:05 +00:00
|
|
|
|
else if (t.cs() == "sout") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\n\\strikeout on\n";
|
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\n\\strikeout default\n";
|
2011-10-30 18:51:41 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("ulem");
|
2011-10-23 10:58:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "uuline" || t.cs() == "uwave" ||
|
|
|
|
|
t.cs() == "emph" || t.cs() == "noun") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << "\n\\" << t.cs() << " on\n";
|
2003-08-04 10:26:10 +00:00
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
context.check_layout(os);
|
2004-08-16 11:27:51 +00:00
|
|
|
|
os << "\n\\" << t.cs() << " default\n";
|
2011-10-30 18:51:41 +00:00
|
|
|
|
if (t.cs() == "uuline" || t.cs() == "uwave")
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("ulem");
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-11-06 17:03:59 +00:00
|
|
|
|
else if (t.cs() == "lyxadded" || t.cs() == "lyxdeleted") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
string name = p.getArg('{', '}');
|
|
|
|
|
string localtime = p.getArg('{', '}');
|
|
|
|
|
preamble.registerAuthor(name);
|
|
|
|
|
Author const & author = preamble.getAuthor(name);
|
2013-01-09 12:45:58 +00:00
|
|
|
|
// from_asctime_utc() will fail if LyX decides to output the
|
2013-01-07 14:46:47 +00:00
|
|
|
|
// time in the text language.
|
2013-01-09 12:45:58 +00:00
|
|
|
|
time_t ptime = from_asctime_utc(localtime);
|
2011-11-06 17:03:59 +00:00
|
|
|
|
if (ptime == static_cast<time_t>(-1)) {
|
|
|
|
|
cerr << "Warning: Could not parse time `" << localtime
|
|
|
|
|
<< "´ for change tracking, using current time instead.\n";
|
|
|
|
|
ptime = current_time();
|
|
|
|
|
}
|
|
|
|
|
if (t.cs() == "lyxadded")
|
|
|
|
|
os << "\n\\change_inserted ";
|
|
|
|
|
else
|
|
|
|
|
os << "\n\\change_deleted ";
|
|
|
|
|
os << author.bufferId() << ' ' << ptime << '\n';
|
|
|
|
|
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
bool dvipost = LaTeXPackages::isAvailable("dvipost");
|
|
|
|
|
bool xcolorulem = LaTeXPackages::isAvailable("ulem") &&
|
|
|
|
|
LaTeXPackages::isAvailable("xcolor");
|
|
|
|
|
// No need to test for luatex, since luatex comes in
|
|
|
|
|
// two flavours (dvi and pdf), like latex, and those
|
|
|
|
|
// are detected by pdflatex.
|
|
|
|
|
if (pdflatex || xetex) {
|
|
|
|
|
if (xcolorulem) {
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("ulem");
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("xcolor");
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("pdfcolmk");
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
if (dvipost) {
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("dvipost");
|
|
|
|
|
} else if (xcolorulem) {
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("ulem");
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("xcolor");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-17 01:11:54 +00:00
|
|
|
|
else if (t.cs() == "textipa") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "IPA\n");
|
2013-02-17 21:39:56 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, outer, context);
|
2013-02-17 01:11:54 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("tipa");
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("tipx");
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-18 02:23:34 +00:00
|
|
|
|
else if (t.cs() == "texttoptiebar" || t.cs() == "textbottomtiebar") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "IPADeco " + t.cs().substr(4) + "\n");
|
|
|
|
|
os << "status open\n";
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, outer, context);
|
|
|
|
|
end_inset(os);
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-24 14:44:16 +00:00
|
|
|
|
else if (t.cs() == "textvertline") {
|
|
|
|
|
// FIXME: This is not correct, \textvertline is higher than |
|
|
|
|
|
os << "|";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
continue;
|
2013-02-18 03:50:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "tone" ) {
|
|
|
|
|
context.check_layout(os);
|
2013-02-18 13:07:13 +00:00
|
|
|
|
// register the tone package
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("tone");
|
2013-02-18 03:50:18 +00:00
|
|
|
|
string content = trimSpaceAndEol(p.verbatim_item());
|
|
|
|
|
string command = t.asInput() + "{" + content + "}";
|
|
|
|
|
// some tones can be detected by unicodesymbols, some need special code
|
|
|
|
|
if (is_known(content, known_tones)) {
|
|
|
|
|
os << "\\IPAChar " << command << "\n";
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2013-02-18 13:07:13 +00:00
|
|
|
|
// try to see whether the string is in unicodesymbols
|
2013-02-18 03:50:18 +00:00
|
|
|
|
bool termination;
|
|
|
|
|
docstring rem;
|
|
|
|
|
set<string> req;
|
|
|
|
|
docstring s = encodings.fromLaTeXCommand(from_utf8(command),
|
|
|
|
|
Encodings::TEXT_CMD | Encodings::MATH_CMD,
|
|
|
|
|
termination, rem, &req);
|
|
|
|
|
if (!s.empty()) {
|
|
|
|
|
os << to_utf8(s);
|
2013-02-24 14:44:16 +00:00
|
|
|
|
if (!rem.empty())
|
|
|
|
|
output_ert_inset(os, to_utf8(rem), context);
|
2013-02-24 18:43:13 +00:00
|
|
|
|
for (set<string>::const_iterator it = req.begin();
|
|
|
|
|
it != req.end(); ++it)
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2013-02-18 02:23:34 +00:00
|
|
|
|
} else
|
|
|
|
|
// we did not find a non-ert version
|
|
|
|
|
output_ert_inset(os, command, context);
|
|
|
|
|
}
|
|
|
|
|
|
2011-10-24 22:39:54 +00:00
|
|
|
|
else if (t.cs() == "phantom" || t.cs() == "hphantom" ||
|
|
|
|
|
t.cs() == "vphantom") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
if (t.cs() == "phantom")
|
|
|
|
|
begin_inset(os, "Phantom Phantom\n");
|
|
|
|
|
if (t.cs() == "hphantom")
|
2011-10-26 13:37:32 +00:00
|
|
|
|
begin_inset(os, "Phantom HPhantom\n");
|
2011-10-24 22:39:54 +00:00
|
|
|
|
if (t.cs() == "vphantom")
|
2011-10-26 13:37:32 +00:00
|
|
|
|
begin_inset(os, "Phantom VPhantom\n");
|
2011-10-24 22:39:54 +00:00
|
|
|
|
os << "status open\n";
|
2011-10-31 21:31:32 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, outer, context,
|
|
|
|
|
"Phantom");
|
2011-10-24 22:39:54 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
2011-10-26 05:02:37 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "href") {
|
|
|
|
|
context.check_layout(os);
|
2012-10-04 22:12:18 +00:00
|
|
|
|
string target = convert_command_inset_arg(p.verbatim_item());
|
|
|
|
|
string name = convert_command_inset_arg(p.verbatim_item());
|
2011-10-26 05:02:37 +00:00
|
|
|
|
string type;
|
|
|
|
|
size_t i = target.find(':');
|
|
|
|
|
if (i != string::npos) {
|
|
|
|
|
type = target.substr(0, i + 1);
|
|
|
|
|
if (type == "mailto:" || type == "file:")
|
|
|
|
|
target = target.substr(i + 1);
|
|
|
|
|
// handle the case that name is equal to target, except of "http://"
|
|
|
|
|
else if (target.substr(i + 3) == name && type == "http:")
|
|
|
|
|
target = name;
|
|
|
|
|
}
|
|
|
|
|
begin_command_inset(os, "href", "href");
|
|
|
|
|
if (name != target)
|
|
|
|
|
os << "name \"" << name << "\"\n";
|
|
|
|
|
os << "target \"" << target << "\"\n";
|
|
|
|
|
if (type == "mailto:" || type == "file:")
|
|
|
|
|
os << "type \"" << type << "\"\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
skip_spaces_braces(p);
|
|
|
|
|
}
|
2011-12-14 01:08:54 +00:00
|
|
|
|
|
2008-04-27 10:54:06 +00:00
|
|
|
|
else if (t.cs() == "lyxline") {
|
2011-10-22 16:58:32 +00:00
|
|
|
|
// swallow size argument (it is not used anyway)
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
if (!context.atParagraphStart()) {
|
|
|
|
|
// so our line is in the middle of a paragraph
|
|
|
|
|
// we need to add a new line, lest this line
|
|
|
|
|
// follow the other content on that line and
|
|
|
|
|
// run off the side of the page
|
|
|
|
|
// FIXME: This may create an empty paragraph,
|
|
|
|
|
// but without that it would not be
|
|
|
|
|
// possible to set noindent below.
|
|
|
|
|
// Fortunately LaTeX does not care
|
|
|
|
|
// about the empty paragraph.
|
|
|
|
|
context.new_paragraph(os);
|
|
|
|
|
}
|
2011-10-30 12:47:45 +00:00
|
|
|
|
if (preamble.indentParagraphs()) {
|
2011-10-22 16:58:32 +00:00
|
|
|
|
// we need to unindent, lest the line be too long
|
|
|
|
|
context.add_par_extra_stuff("\\noindent\n");
|
|
|
|
|
}
|
2008-04-27 10:54:06 +00:00
|
|
|
|
context.check_layout(os);
|
2011-10-22 16:58:32 +00:00
|
|
|
|
begin_command_inset(os, "line", "rule");
|
|
|
|
|
os << "offset \"0.5ex\"\n"
|
|
|
|
|
"width \"100line%\"\n"
|
|
|
|
|
"height \"1pt\"\n";
|
|
|
|
|
end_inset(os);
|
2008-04-27 10:54:06 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-24 21:07:00 +00:00
|
|
|
|
else if (t.cs() == "rule") {
|
2011-10-26 14:04:22 +00:00
|
|
|
|
string const offset = (p.hasOpt() ? p.getArg('[', ']') : string());
|
|
|
|
|
string const width = p.getArg('{', '}');
|
|
|
|
|
string const thickness = p.getArg('{', '}');
|
2011-10-24 21:07:00 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_command_inset(os, "line", "rule");
|
|
|
|
|
if (!offset.empty())
|
|
|
|
|
os << "offset \"" << translate_len(offset) << "\"\n";
|
|
|
|
|
os << "width \"" << translate_len(width) << "\"\n"
|
|
|
|
|
"height \"" << translate_len(thickness) << "\"\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2011-01-14 20:06:15 +00:00
|
|
|
|
else if (is_known(t.cs(), known_phrases) ||
|
|
|
|
|
(t.cs() == "protect" &&
|
|
|
|
|
p.next_token().cat() == catEscape &&
|
|
|
|
|
is_known(p.next_token().cs(), known_phrases))) {
|
|
|
|
|
// LyX sometimes puts a \protect in front, so we have to ignore it
|
2010-12-31 11:59:33 +00:00
|
|
|
|
// FIXME: This needs to be changed when bug 4752 is fixed.
|
2011-11-20 17:03:00 +00:00
|
|
|
|
where = is_known(
|
2011-01-14 20:06:15 +00:00
|
|
|
|
t.cs() == "protect" ? p.get_token().cs() : t.cs(),
|
|
|
|
|
known_phrases);
|
2010-12-30 21:56:55 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << known_coded_phrases[where - known_phrases];
|
|
|
|
|
skip_spaces_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-22 20:59:23 +00:00
|
|
|
|
// handle refstyle first to catch \eqref which can also occur
|
|
|
|
|
// without refstyle. Only recognize these commands if
|
|
|
|
|
// refstyle.sty was found in the preamble (otherwise \eqref
|
|
|
|
|
// and user defined ref commands could be misdetected).
|
|
|
|
|
else if ((where = is_known(t.cs(), known_refstyle_commands)) &&
|
|
|
|
|
preamble.refstyle()) {
|
2013-02-04 00:41:56 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_command_inset(os, "ref", "formatted");
|
|
|
|
|
os << "reference \"";
|
|
|
|
|
os << known_refstyle_prefixes[where - known_refstyle_commands]
|
|
|
|
|
<< ":";
|
|
|
|
|
os << convert_command_inset_arg(p.verbatim_item())
|
|
|
|
|
<< "\"\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("refstyle");
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-22 20:59:23 +00:00
|
|
|
|
// if refstyle is used, we must not convert \prettyref to a
|
|
|
|
|
// formatted reference, since that would result in a refstyle command.
|
|
|
|
|
else if ((where = is_known(t.cs(), known_ref_commands)) &&
|
|
|
|
|
(t.cs() != "prettyref" || !preamble.refstyle())) {
|
|
|
|
|
string const opt = p.getOpt();
|
|
|
|
|
if (opt.empty()) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_command_inset(os, "ref",
|
|
|
|
|
known_coded_ref_commands[where - known_ref_commands]);
|
|
|
|
|
os << "reference \""
|
|
|
|
|
<< convert_command_inset_arg(p.verbatim_item())
|
|
|
|
|
<< "\"\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
if (t.cs() == "vref" || t.cs() == "vpageref")
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("varioref");
|
|
|
|
|
else if (t.cs() == "prettyref")
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("prettyref");
|
|
|
|
|
} else {
|
|
|
|
|
// LyX does not yet support optional arguments of ref commands
|
|
|
|
|
output_ert_inset(os, t.asInput() + '[' + opt + "]{" +
|
|
|
|
|
p.verbatim_item() + '}', context);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2004-08-10 09:40:53 +00:00
|
|
|
|
else if (use_natbib &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
is_known(t.cs(), known_natbib_commands) &&
|
|
|
|
|
((t.cs() != "citefullauthor" &&
|
|
|
|
|
t.cs() != "citeyear" &&
|
|
|
|
|
t.cs() != "citeyearpar") ||
|
|
|
|
|
p.next_token().asInput() != "*")) {
|
2004-08-10 09:40:53 +00:00
|
|
|
|
context.check_layout(os);
|
2009-06-11 23:46:41 +00:00
|
|
|
|
string command = t.cs();
|
2004-08-10 09:40:53 +00:00
|
|
|
|
if (p.next_token().asInput() == "*") {
|
|
|
|
|
command += '*';
|
|
|
|
|
p.get_token();
|
|
|
|
|
}
|
2009-06-11 23:46:41 +00:00
|
|
|
|
if (command == "citefullauthor")
|
2004-08-10 09:40:53 +00:00
|
|
|
|
// alternative name for "\\citeauthor*"
|
2009-06-11 23:46:41 +00:00
|
|
|
|
command = "citeauthor*";
|
2004-08-10 09:40:53 +00:00
|
|
|
|
|
|
|
|
|
// text before the citation
|
|
|
|
|
string before;
|
|
|
|
|
// text after the citation
|
|
|
|
|
string after;
|
2007-11-27 20:30:22 +00:00
|
|
|
|
get_cite_arguments(p, true, before, after);
|
2004-08-10 09:40:53 +00:00
|
|
|
|
|
2009-06-11 23:46:41 +00:00
|
|
|
|
if (command == "cite") {
|
2004-08-10 09:40:53 +00:00
|
|
|
|
// \cite without optional argument means
|
|
|
|
|
// \citet, \cite with at least one optional
|
|
|
|
|
// argument means \citep.
|
|
|
|
|
if (before.empty() && after.empty())
|
2009-06-11 23:46:41 +00:00
|
|
|
|
command = "citet";
|
2004-08-10 09:40:53 +00:00
|
|
|
|
else
|
2009-06-11 23:46:41 +00:00
|
|
|
|
command = "citep";
|
2004-08-10 09:40:53 +00:00
|
|
|
|
}
|
|
|
|
|
if (before.empty() && after == "[]")
|
|
|
|
|
// avoid \citet[]{a}
|
|
|
|
|
after.erase();
|
|
|
|
|
else if (before == "[]" && after == "[]") {
|
|
|
|
|
// avoid \citet[][]{a}
|
|
|
|
|
before.erase();
|
|
|
|
|
after.erase();
|
|
|
|
|
}
|
2009-06-11 23:46:41 +00:00
|
|
|
|
// remove the brackets around after and before
|
|
|
|
|
if (!after.empty()) {
|
|
|
|
|
after.erase(0, 1);
|
|
|
|
|
after.erase(after.length() - 1, 1);
|
2011-02-18 20:55:50 +00:00
|
|
|
|
after = convert_command_inset_arg(after);
|
2009-06-11 23:46:41 +00:00
|
|
|
|
}
|
|
|
|
|
if (!before.empty()) {
|
|
|
|
|
before.erase(0, 1);
|
|
|
|
|
before.erase(before.length() - 1, 1);
|
2011-02-18 20:55:50 +00:00
|
|
|
|
before = convert_command_inset_arg(before);
|
2009-06-11 23:46:41 +00:00
|
|
|
|
}
|
2010-12-17 21:02:39 +00:00
|
|
|
|
begin_command_inset(os, "citation", command);
|
2009-06-11 23:46:41 +00:00
|
|
|
|
os << "after " << '"' << after << '"' << "\n";
|
|
|
|
|
os << "before " << '"' << before << '"' << "\n";
|
2011-02-18 20:55:50 +00:00
|
|
|
|
os << "key \""
|
|
|
|
|
<< convert_command_inset_arg(p.verbatim_item())
|
|
|
|
|
<< "\"\n";
|
2004-08-10 09:40:53 +00:00
|
|
|
|
end_inset(os);
|
2012-12-28 13:29:46 +00:00
|
|
|
|
// Need to set the cite engine if natbib is loaded by
|
|
|
|
|
// the document class directly
|
|
|
|
|
if (preamble.citeEngine() == "basic")
|
|
|
|
|
preamble.citeEngine("natbib");
|
2004-08-10 09:40:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (use_jurabib &&
|
2010-12-17 21:02:39 +00:00
|
|
|
|
is_known(t.cs(), known_jurabib_commands) &&
|
|
|
|
|
(t.cs() == "cite" || p.next_token().asInput() != "*")) {
|
2004-08-10 09:40:53 +00:00
|
|
|
|
context.check_layout(os);
|
2010-12-17 21:02:39 +00:00
|
|
|
|
string command = t.cs();
|
|
|
|
|
if (p.next_token().asInput() == "*") {
|
|
|
|
|
command += '*';
|
|
|
|
|
p.get_token();
|
|
|
|
|
}
|
2004-08-10 09:40:53 +00:00
|
|
|
|
char argumentOrder = '\0';
|
2011-10-30 12:47:45 +00:00
|
|
|
|
vector<string> const options =
|
|
|
|
|
preamble.getPackageOptions("jurabib");
|
2007-12-12 19:28:07 +00:00
|
|
|
|
if (find(options.begin(), options.end(),
|
2006-04-05 23:56:29 +00:00
|
|
|
|
"natbiborder") != options.end())
|
2004-08-10 09:40:53 +00:00
|
|
|
|
argumentOrder = 'n';
|
2007-12-12 19:28:07 +00:00
|
|
|
|
else if (find(options.begin(), options.end(),
|
2006-04-05 23:56:29 +00:00
|
|
|
|
"jurabiborder") != options.end())
|
2004-08-10 09:40:53 +00:00
|
|
|
|
argumentOrder = 'j';
|
|
|
|
|
|
|
|
|
|
// text before the citation
|
|
|
|
|
string before;
|
|
|
|
|
// text after the citation
|
|
|
|
|
string after;
|
2007-11-27 20:30:22 +00:00
|
|
|
|
get_cite_arguments(p, argumentOrder != 'j', before, after);
|
2004-08-10 09:40:53 +00:00
|
|
|
|
|
|
|
|
|
string const citation = p.verbatim_item();
|
|
|
|
|
if (!before.empty() && argumentOrder == '\0') {
|
|
|
|
|
cerr << "Warning: Assuming argument order "
|
2006-04-05 23:56:29 +00:00
|
|
|
|
"of jurabib version 0.6 for\n'"
|
2004-08-10 09:40:53 +00:00
|
|
|
|
<< command << before << after << '{'
|
|
|
|
|
<< citation << "}'.\n"
|
2006-04-05 23:56:29 +00:00
|
|
|
|
"Add 'jurabiborder' to the jurabib "
|
|
|
|
|
"package options if you used an\n"
|
|
|
|
|
"earlier jurabib version." << endl;
|
2004-08-10 09:40:53 +00:00
|
|
|
|
}
|
2009-06-11 23:46:41 +00:00
|
|
|
|
if (!after.empty()) {
|
|
|
|
|
after.erase(0, 1);
|
|
|
|
|
after.erase(after.length() - 1, 1);
|
|
|
|
|
}
|
|
|
|
|
if (!before.empty()) {
|
|
|
|
|
before.erase(0, 1);
|
|
|
|
|
before.erase(before.length() - 1, 1);
|
|
|
|
|
}
|
2010-12-17 21:02:39 +00:00
|
|
|
|
begin_command_inset(os, "citation", command);
|
2009-06-11 23:46:41 +00:00
|
|
|
|
os << "after " << '"' << after << '"' << "\n";
|
|
|
|
|
os << "before " << '"' << before << '"' << "\n";
|
|
|
|
|
os << "key " << '"' << citation << '"' << "\n";
|
|
|
|
|
end_inset(os);
|
2012-12-28 13:29:46 +00:00
|
|
|
|
// Need to set the cite engine if jurabib is loaded by
|
|
|
|
|
// the document class directly
|
|
|
|
|
if (preamble.citeEngine() == "basic")
|
|
|
|
|
preamble.citeEngine("jurabib");
|
2009-06-11 23:46:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-12-20 02:08:22 +00:00
|
|
|
|
else if (t.cs() == "cite"
|
|
|
|
|
|| t.cs() == "nocite") {
|
2009-06-11 23:46:41 +00:00
|
|
|
|
context.check_layout(os);
|
2011-02-18 20:55:50 +00:00
|
|
|
|
string after = convert_command_inset_arg(p.getArg('[', ']'));
|
|
|
|
|
string key = convert_command_inset_arg(p.verbatim_item());
|
2010-12-20 02:47:58 +00:00
|
|
|
|
// store the case that it is "\nocite{*}" to use it later for
|
|
|
|
|
// the BibTeX inset
|
|
|
|
|
if (key != "*") {
|
|
|
|
|
begin_command_inset(os, "citation", t.cs());
|
|
|
|
|
os << "after " << '"' << after << '"' << "\n";
|
|
|
|
|
os << "key " << '"' << key << '"' << "\n";
|
|
|
|
|
end_inset(os);
|
2011-01-02 18:16:23 +00:00
|
|
|
|
} else if (t.cs() == "nocite")
|
2010-12-20 02:47:58 +00:00
|
|
|
|
btprint = key;
|
2009-06-11 23:46:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-02-28 20:34:50 +00:00
|
|
|
|
else if (t.cs() == "index" ||
|
|
|
|
|
(t.cs() == "sindex" && preamble.use_indices() == "true")) {
|
2009-06-11 23:46:41 +00:00
|
|
|
|
context.check_layout(os);
|
2012-03-01 20:37:34 +00:00
|
|
|
|
string const arg = (t.cs() == "sindex" && p.hasOpt()) ?
|
|
|
|
|
p.getArg('[', ']') : "";
|
|
|
|
|
string const kind = arg.empty() ? "idx" : arg;
|
2012-02-28 20:34:50 +00:00
|
|
|
|
begin_inset(os, "Index ");
|
|
|
|
|
os << kind << "\nstatus collapsed\n";
|
2011-01-22 12:00:33 +00:00
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, false, context, "Index");
|
2009-06-11 23:46:41 +00:00
|
|
|
|
end_inset(os);
|
2012-03-01 20:37:34 +00:00
|
|
|
|
if (kind != "idx")
|
2012-02-28 20:34:50 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("splitidx");
|
2009-06-11 23:46:41 +00:00
|
|
|
|
}
|
2009-06-14 02:16:51 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "nomenclature") {
|
|
|
|
|
context.check_layout(os);
|
2010-12-17 21:02:39 +00:00
|
|
|
|
begin_command_inset(os, "nomenclature", "nomenclature");
|
2011-02-18 20:55:50 +00:00
|
|
|
|
string prefix = convert_command_inset_arg(p.getArg('[', ']'));
|
2009-06-14 02:16:51 +00:00
|
|
|
|
if (!prefix.empty())
|
|
|
|
|
os << "prefix " << '"' << prefix << '"' << "\n";
|
2011-02-18 20:55:50 +00:00
|
|
|
|
os << "symbol " << '"'
|
|
|
|
|
<< convert_command_inset_arg(p.verbatim_item());
|
|
|
|
|
os << "\"\ndescription \""
|
|
|
|
|
<< convert_command_inset_arg(p.verbatim_item())
|
|
|
|
|
<< "\"\n";
|
2009-06-14 02:16:51 +00:00
|
|
|
|
end_inset(os);
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("nomencl");
|
2009-06-14 02:16:51 +00:00
|
|
|
|
}
|
2011-12-14 01:08:54 +00:00
|
|
|
|
|
2009-06-11 23:46:41 +00:00
|
|
|
|
else if (t.cs() == "label") {
|
|
|
|
|
context.check_layout(os);
|
2010-12-17 21:02:39 +00:00
|
|
|
|
begin_command_inset(os, "label", "label");
|
2011-02-18 20:55:50 +00:00
|
|
|
|
os << "name \""
|
|
|
|
|
<< convert_command_inset_arg(p.verbatim_item())
|
|
|
|
|
<< "\"\n";
|
2009-06-11 23:46:41 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-17 05:18:42 +00:00
|
|
|
|
else if (t.cs() == "printindex" || t.cs() == "printsubindex") {
|
2009-06-11 23:46:41 +00:00
|
|
|
|
context.check_layout(os);
|
2013-02-17 05:18:42 +00:00
|
|
|
|
string commandname = t.cs();
|
|
|
|
|
bool star = false;
|
|
|
|
|
if (p.next_token().asInput() == "*") {
|
|
|
|
|
commandname += "*";
|
|
|
|
|
star = true;
|
|
|
|
|
p.get_token();
|
|
|
|
|
}
|
|
|
|
|
begin_command_inset(os, "index_print", commandname);
|
|
|
|
|
string const indexname = p.getArg('[', ']');
|
|
|
|
|
if (!star) {
|
|
|
|
|
if (indexname.empty())
|
|
|
|
|
os << "type \"idx\"\n";
|
|
|
|
|
else
|
|
|
|
|
os << "type \"" << indexname << "\"\n";
|
|
|
|
|
}
|
2009-06-14 02:16:51 +00:00
|
|
|
|
end_inset(os);
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("makeidx");
|
|
|
|
|
if (preamble.use_indices() == "true")
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("splitidx");
|
2009-06-14 02:16:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "printnomenclature") {
|
2011-10-25 00:11:02 +00:00
|
|
|
|
string width = "";
|
|
|
|
|
string width_type = "";
|
2009-06-14 02:16:51 +00:00
|
|
|
|
context.check_layout(os);
|
2010-12-17 21:02:39 +00:00
|
|
|
|
begin_command_inset(os, "nomencl_print", "printnomenclature");
|
2011-10-25 00:11:02 +00:00
|
|
|
|
// case of a custom width
|
|
|
|
|
if (p.hasOpt()) {
|
|
|
|
|
width = p.getArg('[', ']');
|
|
|
|
|
width = translate_len(width);
|
|
|
|
|
width_type = "custom";
|
|
|
|
|
}
|
|
|
|
|
// case of no custom width
|
|
|
|
|
// the case of no custom width but the width set
|
|
|
|
|
// via \settowidth{\nomlabelwidth}{***} cannot be supported
|
|
|
|
|
// because the user could have set anything, not only the width
|
|
|
|
|
// of the longest label (which would be width_type = "auto")
|
|
|
|
|
string label = convert_command_inset_arg(p.getArg('{', '}'));
|
|
|
|
|
if (label.empty() && width_type.empty())
|
|
|
|
|
width_type = "none";
|
|
|
|
|
os << "set_width \"" << width_type << "\"\n";
|
|
|
|
|
if (width_type == "custom")
|
|
|
|
|
os << "width \"" << width << '\"';
|
2004-08-10 09:40:53 +00:00
|
|
|
|
end_inset(os);
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2011-11-29 20:09:40 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("nomencl");
|
2004-08-10 09:40:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-22 19:09:56 +00:00
|
|
|
|
else if ((t.cs() == "textsuperscript" || t.cs() == "textsubscript")) {
|
2010-12-12 12:16:18 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "script ");
|
|
|
|
|
os << t.cs().substr(4) << '\n';
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, false, context);
|
|
|
|
|
end_inset(os);
|
2011-10-30 18:51:41 +00:00
|
|
|
|
if (t.cs() == "textsubscript")
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("subscript");
|
2010-12-12 12:16:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_quotes))) {
|
2003-11-05 10:14:13 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
begin_inset(os, "Quotes ");
|
|
|
|
|
os << known_coded_quotes[where - known_quotes];
|
|
|
|
|
end_inset(os);
|
2004-06-18 06:47:19 +00:00
|
|
|
|
// LyX adds {} after the quote, so we have to eat
|
|
|
|
|
// spaces here if there are any before a possible
|
|
|
|
|
// {} pair.
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_sizes)) &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed) {
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2011-01-07 19:58:31 +00:00
|
|
|
|
context.font.size = known_coded_sizes[where - known_sizes];
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, oldFont, context.font);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_font_families)) &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed) {
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.family =
|
|
|
|
|
known_coded_font_families[where - known_font_families];
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, oldFont, context.font);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_font_series)) &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed) {
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.series =
|
|
|
|
|
known_coded_font_series[where - known_font_series];
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, oldFont, context.font);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_font_shapes)) &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed) {
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.shape =
|
|
|
|
|
known_coded_font_shapes[where - known_font_shapes];
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, oldFont, context.font);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_old_font_families)) &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed) {
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.init();
|
2005-07-13 11:38:55 +00:00
|
|
|
|
context.font.size = oldFont.size;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.family =
|
|
|
|
|
known_coded_font_families[where - known_old_font_families];
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, oldFont, context.font);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_old_font_series)) &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed) {
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.init();
|
2005-07-13 11:38:55 +00:00
|
|
|
|
context.font.size = oldFont.size;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.series =
|
|
|
|
|
known_coded_font_series[where - known_old_font_series];
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, oldFont, context.font);
|
2004-06-28 06:53:12 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_old_font_shapes)) &&
|
2006-04-05 23:56:29 +00:00
|
|
|
|
context.new_layout_allowed) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2007-04-29 18:17:15 +00:00
|
|
|
|
TeXFont const oldFont = context.font;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.init();
|
2005-07-13 11:38:55 +00:00
|
|
|
|
context.font.size = oldFont.size;
|
2004-06-28 06:53:12 +00:00
|
|
|
|
context.font.shape =
|
|
|
|
|
known_coded_font_shapes[where - known_old_font_shapes];
|
2005-07-13 11:38:55 +00:00
|
|
|
|
output_font_change(os, oldFont, context.font);
|
2004-06-18 06:47:19 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2007-12-04 20:09:40 +00:00
|
|
|
|
else if (t.cs() == "selectlanguage") {
|
|
|
|
|
context.check_layout(os);
|
2008-11-27 14:16:02 +00:00
|
|
|
|
// save the language for the case that a
|
2011-12-14 01:08:54 +00:00
|
|
|
|
// \foreignlanguage is used
|
2011-01-12 21:04:39 +00:00
|
|
|
|
context.font.language = babel2lyx(p.verbatim_item());
|
|
|
|
|
os << "\n\\lang " << context.font.language << "\n";
|
2007-12-04 20:09:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "foreignlanguage") {
|
2011-01-12 21:04:39 +00:00
|
|
|
|
string const lang = babel2lyx(p.verbatim_item());
|
|
|
|
|
parse_text_attributes(p, os, FLAG_ITEM, outer,
|
|
|
|
|
context, "\\lang",
|
|
|
|
|
context.font.language, lang);
|
2007-12-04 20:09:40 +00:00
|
|
|
|
}
|
2012-12-14 11:30:08 +00:00
|
|
|
|
|
|
|
|
|
else if (prefixIs(t.cs(), "text")
|
2012-06-30 23:47:39 +00:00
|
|
|
|
&& is_known(t.cs().substr(4), preamble.polyglossia_languages)) {
|
2012-06-22 01:41:00 +00:00
|
|
|
|
// scheme is \textLANGUAGE{text} where LANGUAGE is in polyglossia_languages[]
|
2012-06-23 02:28:52 +00:00
|
|
|
|
string lang;
|
|
|
|
|
// We have to output the whole command if it has an option
|
|
|
|
|
// because LyX doesn't support this yet, see bug #8214,
|
|
|
|
|
// only if there is a single option specifying a variant, we can handle it.
|
|
|
|
|
if (p.hasOpt()) {
|
|
|
|
|
string langopts = p.getOpt();
|
|
|
|
|
// check if the option contains a variant, if yes, extract it
|
|
|
|
|
string::size_type pos_var = langopts.find("variant");
|
|
|
|
|
string::size_type i = langopts.find(',');
|
2012-06-25 22:46:02 +00:00
|
|
|
|
string::size_type k = langopts.find('=', pos_var);
|
|
|
|
|
if (pos_var != string::npos && i == string::npos) {
|
2012-06-23 02:28:52 +00:00
|
|
|
|
string variant;
|
2012-06-25 22:46:02 +00:00
|
|
|
|
variant = langopts.substr(k + 1, langopts.length() - k - 2);
|
2012-06-30 23:47:39 +00:00
|
|
|
|
lang = preamble.polyglossia2lyx(variant);
|
2012-06-25 22:46:02 +00:00
|
|
|
|
parse_text_attributes(p, os, FLAG_ITEM, outer,
|
|
|
|
|
context, "\\lang",
|
|
|
|
|
context.font.language, lang);
|
2012-06-23 02:28:52 +00:00
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, t.asInput() + langopts, context);
|
2012-06-23 02:28:52 +00:00
|
|
|
|
} else {
|
2012-06-30 23:47:39 +00:00
|
|
|
|
lang = preamble.polyglossia2lyx(t.cs().substr(4, string::npos));
|
2012-06-22 15:19:46 +00:00
|
|
|
|
parse_text_attributes(p, os, FLAG_ITEM, outer,
|
2012-06-23 02:28:52 +00:00
|
|
|
|
context, "\\lang",
|
|
|
|
|
context.font.language, lang);
|
|
|
|
|
}
|
2012-06-22 01:41:00 +00:00
|
|
|
|
}
|
2007-12-04 20:09:40 +00:00
|
|
|
|
|
2008-11-27 14:16:02 +00:00
|
|
|
|
else if (t.cs() == "inputencoding") {
|
|
|
|
|
// nothing to write here
|
|
|
|
|
string const enc = subst(p.verbatim_item(), "\n", " ");
|
2013-01-19 18:47:15 +00:00
|
|
|
|
p.setEncoding(enc, Encoding::inputenc);
|
2008-11-27 14:16:02 +00:00
|
|
|
|
}
|
2009-06-14 21:29:24 +00:00
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_special_chars))) {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2013-02-24 18:00:17 +00:00
|
|
|
|
os << known_coded_special_chars[where - known_special_chars];
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2010-12-12 13:23:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2013-02-24 18:00:17 +00:00
|
|
|
|
else if ((t.cs() == "nobreakdash" && p.next_token().asInput() == "-") ||
|
|
|
|
|
(t.cs() == "@" && p.next_token().asInput() == ".")) {
|
2010-12-12 13:23:05 +00:00
|
|
|
|
context.check_layout(os);
|
2013-02-24 18:00:17 +00:00
|
|
|
|
os << "\\SpecialChar \\" << t.cs()
|
|
|
|
|
<< p.get_token().asInput() << '\n';
|
2010-12-12 13:23:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-04-01 22:49:52 +00:00
|
|
|
|
else if (t.cs() == "textquotedbl") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << "\"";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
2003-09-09 18:27:24 +00:00
|
|
|
|
else if (t.cs() == "_" || t.cs() == "&" || t.cs() == "#"
|
|
|
|
|
|| t.cs() == "$" || t.cs() == "{" || t.cs() == "}"
|
2013-02-24 18:12:56 +00:00
|
|
|
|
|| t.cs() == "%" || t.cs() == "-") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2013-02-24 18:12:56 +00:00
|
|
|
|
if (t.cs() == "-")
|
|
|
|
|
os << "\\SpecialChar \\-\n";
|
|
|
|
|
else
|
|
|
|
|
os << t.cs();
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
|
2003-04-23 15:14:43 +00:00
|
|
|
|
else if (t.cs() == "char") {
|
2003-08-04 10:26:10 +00:00
|
|
|
|
context.check_layout(os);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
if (p.next_token().character() == '`') {
|
|
|
|
|
p.get_token();
|
|
|
|
|
if (p.next_token().cs() == "\"") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
os << '"';
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\char`", context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
} else {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\char", context);
|
2003-04-23 15:14:43 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2006-03-10 14:57:39 +00:00
|
|
|
|
else if (t.cs() == "verb") {
|
|
|
|
|
context.check_layout(os);
|
2013-01-25 11:48:52 +00:00
|
|
|
|
// set catcodes to verbatim early, just in case.
|
|
|
|
|
p.setCatcodes(VERBATIM_CATCODES);
|
|
|
|
|
string delim = p.get_token().asInput();
|
2013-02-22 14:49:18 +00:00
|
|
|
|
Parser::Arg arg = p.verbatimStuff(delim);
|
|
|
|
|
if (arg.first)
|
|
|
|
|
output_ert_inset(os, "\\verb" + delim
|
|
|
|
|
+ arg.second + delim, context);
|
|
|
|
|
else
|
|
|
|
|
cerr << "invalid \\verb command. Skipping" << endl;
|
2006-03-10 14:57:39 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// Problem: \= creates a tabstop inside the tabbing environment
|
|
|
|
|
// and else an accent. In the latter case we really would want
|
|
|
|
|
// \={o} instead of \= o.
|
2003-12-10 08:33:37 +00:00
|
|
|
|
else if (t.cs() == "=" && (flags & FLAG_TABBING))
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, t.asInput(), context);
|
2003-12-10 08:33:37 +00:00
|
|
|
|
|
2003-07-28 21:58:09 +00:00
|
|
|
|
else if (t.cs() == "\\") {
|
2003-10-23 11:46:33 +00:00
|
|
|
|
context.check_layout(os);
|
2011-01-02 15:39:48 +00:00
|
|
|
|
if (p.hasOpt())
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\\\" + p.getOpt(), context);
|
2011-01-02 15:39:48 +00:00
|
|
|
|
else if (p.next_token().asInput() == "*") {
|
2003-08-07 22:59:53 +00:00
|
|
|
|
p.get_token();
|
2011-01-02 15:39:48 +00:00
|
|
|
|
// getOpt() eats the following space if there
|
|
|
|
|
// is no optional argument, but that is OK
|
|
|
|
|
// here since it has no effect in the output.
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "\\\\*" + p.getOpt(), context);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2010-12-27 18:03:26 +00:00
|
|
|
|
begin_inset(os, "Newline newline");
|
|
|
|
|
end_inset(os);
|
2003-08-07 22:59:53 +00:00
|
|
|
|
}
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-09-09 18:27:24 +00:00
|
|
|
|
|
2010-12-30 21:03:36 +00:00
|
|
|
|
else if (t.cs() == "newline" ||
|
2011-01-02 15:39:48 +00:00
|
|
|
|
(t.cs() == "linebreak" && !p.hasOpt())) {
|
2007-11-24 18:04:26 +00:00
|
|
|
|
context.check_layout(os);
|
2010-12-27 18:03:26 +00:00
|
|
|
|
begin_inset(os, "Newline ");
|
|
|
|
|
os << t.cs();
|
|
|
|
|
end_inset(os);
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2007-11-24 18:04:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-08-07 22:59:53 +00:00
|
|
|
|
else if (t.cs() == "input" || t.cs() == "include"
|
|
|
|
|
|| t.cs() == "verbatiminput") {
|
2010-12-19 15:08:35 +00:00
|
|
|
|
string name = t.cs();
|
2003-08-07 22:59:53 +00:00
|
|
|
|
if (t.cs() == "verbatiminput"
|
2003-09-09 18:27:24 +00:00
|
|
|
|
&& p.next_token().asInput() == "*")
|
2003-08-07 22:59:53 +00:00
|
|
|
|
name += p.get_token().asInput();
|
|
|
|
|
context.check_layout(os);
|
2005-04-15 14:04:13 +00:00
|
|
|
|
string filename(normalize_filename(p.getArg('{', '}')));
|
2012-10-03 11:23:27 +00:00
|
|
|
|
string const path = getMasterFilePath(true);
|
2005-04-15 14:04:13 +00:00
|
|
|
|
// We want to preserve relative / absolute filenames,
|
|
|
|
|
// therefore path is only used for testing
|
2007-05-23 07:27:13 +00:00
|
|
|
|
if ((t.cs() == "include" || t.cs() == "input") &&
|
2007-10-18 19:29:32 +00:00
|
|
|
|
!makeAbsPath(filename, path).exists()) {
|
2005-04-15 14:04:13 +00:00
|
|
|
|
// The file extension is probably missing.
|
|
|
|
|
// Now try to find it out.
|
|
|
|
|
string const tex_name =
|
|
|
|
|
find_file(filename, path,
|
2006-04-05 23:56:29 +00:00
|
|
|
|
known_tex_extensions);
|
2005-04-15 14:04:13 +00:00
|
|
|
|
if (!tex_name.empty())
|
|
|
|
|
filename = tex_name;
|
|
|
|
|
}
|
2010-12-20 21:35:08 +00:00
|
|
|
|
bool external = false;
|
2010-12-11 18:22:47 +00:00
|
|
|
|
string outname;
|
2007-10-18 19:29:32 +00:00
|
|
|
|
if (makeAbsPath(filename, path).exists()) {
|
2005-04-15 14:04:13 +00:00
|
|
|
|
string const abstexname =
|
2010-04-21 01:19:09 +00:00
|
|
|
|
makeAbsPath(filename, path).absFileName();
|
2010-12-11 18:22:47 +00:00
|
|
|
|
string const absfigname =
|
|
|
|
|
changeExtension(abstexname, ".fig");
|
2012-10-03 11:23:27 +00:00
|
|
|
|
fix_child_filename(filename);
|
2005-04-15 14:04:13 +00:00
|
|
|
|
string const lyxname =
|
2006-04-08 22:31:11 +00:00
|
|
|
|
changeExtension(filename, ".lyx");
|
2012-10-03 11:23:27 +00:00
|
|
|
|
string const abslyxname = makeAbsPath(
|
|
|
|
|
lyxname, getParentFilePath(false)).absFileName();
|
2010-12-20 21:35:08 +00:00
|
|
|
|
bool xfig = false;
|
2012-10-18 20:01:32 +00:00
|
|
|
|
if (!skipChildren())
|
|
|
|
|
external = FileName(absfigname).exists();
|
|
|
|
|
if (t.cs() == "input" && !skipChildren()) {
|
2010-12-20 21:35:08 +00:00
|
|
|
|
string const ext = getExtension(abstexname);
|
|
|
|
|
|
|
|
|
|
// Combined PS/LaTeX:
|
|
|
|
|
// x.eps, x.pstex_t (old xfig)
|
|
|
|
|
// x.pstex, x.pstex_t (new xfig, e.g. 3.2.5)
|
2010-12-11 18:22:47 +00:00
|
|
|
|
FileName const absepsname(
|
|
|
|
|
changeExtension(abstexname, ".eps"));
|
2010-12-20 21:35:08 +00:00
|
|
|
|
FileName const abspstexname(
|
|
|
|
|
changeExtension(abstexname, ".pstex"));
|
|
|
|
|
bool const xfigeps =
|
|
|
|
|
(absepsname.exists() ||
|
|
|
|
|
abspstexname.exists()) &&
|
|
|
|
|
ext == "pstex_t";
|
|
|
|
|
|
|
|
|
|
// Combined PDF/LaTeX:
|
|
|
|
|
// x.pdf, x.pdftex_t (old xfig)
|
|
|
|
|
// x.pdf, x.pdf_t (new xfig, e.g. 3.2.5)
|
2010-12-11 18:22:47 +00:00
|
|
|
|
FileName const abspdfname(
|
|
|
|
|
changeExtension(abstexname, ".pdf"));
|
|
|
|
|
bool const xfigpdf =
|
2010-12-20 21:35:08 +00:00
|
|
|
|
abspdfname.exists() &&
|
|
|
|
|
(ext == "pdftex_t" || ext == "pdf_t");
|
2010-12-30 20:29:33 +00:00
|
|
|
|
if (xfigpdf)
|
|
|
|
|
pdflatex = true;
|
2010-12-20 21:35:08 +00:00
|
|
|
|
|
|
|
|
|
// Combined PS/PDF/LaTeX:
|
|
|
|
|
// x_pspdftex.eps, x_pspdftex.pdf, x.pspdftex
|
|
|
|
|
string const absbase2(
|
|
|
|
|
removeExtension(abstexname) + "_pspdftex");
|
|
|
|
|
FileName const abseps2name(
|
|
|
|
|
addExtension(absbase2, ".eps"));
|
|
|
|
|
FileName const abspdf2name(
|
|
|
|
|
addExtension(absbase2, ".pdf"));
|
|
|
|
|
bool const xfigboth =
|
|
|
|
|
abspdf2name.exists() &&
|
|
|
|
|
abseps2name.exists() && ext == "pspdftex";
|
|
|
|
|
|
|
|
|
|
xfig = xfigpdf || xfigeps || xfigboth;
|
|
|
|
|
external = external && xfig;
|
2010-12-11 18:22:47 +00:00
|
|
|
|
}
|
2010-12-20 21:35:08 +00:00
|
|
|
|
if (external) {
|
2010-12-11 18:22:47 +00:00
|
|
|
|
outname = changeExtension(filename, ".fig");
|
2012-10-03 11:23:27 +00:00
|
|
|
|
FileName abssrc(changeExtension(abstexname, ".fig"));
|
|
|
|
|
copy_file(abssrc, outname);
|
2010-12-20 21:35:08 +00:00
|
|
|
|
} else if (xfig) {
|
|
|
|
|
// Don't try to convert, the result
|
|
|
|
|
// would be full of ERT.
|
|
|
|
|
outname = filename;
|
2012-10-03 11:23:27 +00:00
|
|
|
|
FileName abssrc(abstexname);
|
|
|
|
|
copy_file(abssrc, outname);
|
2010-12-11 18:22:47 +00:00
|
|
|
|
} else if (t.cs() != "verbatiminput" &&
|
2012-10-18 20:01:32 +00:00
|
|
|
|
!skipChildren() &&
|
2009-01-30 14:47:06 +00:00
|
|
|
|
tex2lyx(abstexname, FileName(abslyxname),
|
|
|
|
|
p.getEncoding())) {
|
2010-12-11 18:22:47 +00:00
|
|
|
|
outname = lyxname;
|
2012-10-03 11:23:27 +00:00
|
|
|
|
// no need to call copy_file
|
|
|
|
|
// tex2lyx creates the file
|
2005-04-15 14:04:13 +00:00
|
|
|
|
} else {
|
2010-12-11 18:22:47 +00:00
|
|
|
|
outname = filename;
|
2012-10-03 11:23:27 +00:00
|
|
|
|
FileName abssrc(abstexname);
|
|
|
|
|
copy_file(abssrc, outname);
|
2005-04-15 14:04:13 +00:00
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
} else {
|
2005-04-15 14:04:13 +00:00
|
|
|
|
cerr << "Warning: Could not find included file '"
|
|
|
|
|
<< filename << "'." << endl;
|
2010-12-11 18:22:47 +00:00
|
|
|
|
outname = filename;
|
|
|
|
|
}
|
2010-12-20 21:35:08 +00:00
|
|
|
|
if (external) {
|
2010-12-11 18:22:47 +00:00
|
|
|
|
begin_inset(os, "External\n");
|
|
|
|
|
os << "\ttemplate XFig\n"
|
|
|
|
|
<< "\tfilename " << outname << '\n';
|
2011-12-13 19:40:05 +00:00
|
|
|
|
registerExternalTemplatePackages("XFig");
|
2010-12-11 18:22:47 +00:00
|
|
|
|
} else {
|
2010-12-19 15:08:35 +00:00
|
|
|
|
begin_command_inset(os, "include", name);
|
2012-12-29 12:16:22 +00:00
|
|
|
|
outname = subst(outname, "\"", "\\\"");
|
2010-12-19 15:08:35 +00:00
|
|
|
|
os << "preview false\n"
|
|
|
|
|
"filename \"" << outname << "\"\n";
|
2011-12-08 20:05:51 +00:00
|
|
|
|
if (t.cs() == "verbatiminput")
|
|
|
|
|
preamble.registerAutomaticallyLoadedPackage("verbatim");
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
2003-08-07 22:59:53 +00:00
|
|
|
|
end_inset(os);
|
2003-07-28 21:58:09 +00:00
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "bibliographystyle") {
|
|
|
|
|
// store new bibliographystyle
|
|
|
|
|
bibliographystyle = p.verbatim_item();
|
2013-02-16 04:51:23 +00:00
|
|
|
|
// If any other command than \bibliography, \addcontentsline
|
|
|
|
|
// and \nocite{*} follows, we need to output the style
|
2011-01-02 18:16:23 +00:00
|
|
|
|
// (because it might be used by that command).
|
|
|
|
|
// Otherwise, it will automatically be output by LyX.
|
|
|
|
|
p.pushPosition();
|
|
|
|
|
bool output = true;
|
|
|
|
|
for (Token t2 = p.get_token(); p.good(); t2 = p.get_token()) {
|
|
|
|
|
if (t2.cat() == catBegin)
|
|
|
|
|
break;
|
|
|
|
|
if (t2.cat() != catEscape)
|
|
|
|
|
continue;
|
|
|
|
|
if (t2.cs() == "nocite") {
|
|
|
|
|
if (p.getArg('{', '}') == "*")
|
|
|
|
|
continue;
|
|
|
|
|
} else if (t2.cs() == "bibliography")
|
|
|
|
|
output = false;
|
2013-02-17 03:17:02 +00:00
|
|
|
|
else if (t2.cs() == "phantomsection") {
|
|
|
|
|
output = false;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2013-02-16 04:51:23 +00:00
|
|
|
|
else if (t2.cs() == "addcontentsline") {
|
|
|
|
|
// get the 3 arguments of \addcontentsline
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
contentslineContent = p.getArg('{', '}');
|
|
|
|
|
// if the last argument is not \refname we must output
|
|
|
|
|
if (contentslineContent == "\\refname")
|
|
|
|
|
output = false;
|
|
|
|
|
}
|
2011-01-02 18:16:23 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
p.popPosition();
|
|
|
|
|
if (output) {
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os,
|
2011-01-02 18:16:23 +00:00
|
|
|
|
"\\bibliographystyle{" + bibliographystyle + '}',
|
|
|
|
|
context);
|
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2013-02-17 03:17:02 +00:00
|
|
|
|
else if (t.cs() == "phantomsection") {
|
|
|
|
|
// we only support this if it occurs between
|
|
|
|
|
// \bibliographystyle and \bibliography
|
|
|
|
|
if (bibliographystyle.empty())
|
|
|
|
|
output_ert_inset(os, "\\phantomsection", context);
|
|
|
|
|
}
|
|
|
|
|
|
2013-02-16 04:51:23 +00:00
|
|
|
|
else if (t.cs() == "addcontentsline") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
// get the 3 arguments of \addcontentsline
|
|
|
|
|
string const one = p.getArg('{', '}');
|
|
|
|
|
string const two = p.getArg('{', '}');
|
|
|
|
|
string const three = p.getArg('{', '}');
|
|
|
|
|
// only if it is a \refname, we support if for the bibtex inset
|
|
|
|
|
if (contentslineContent != "\\refname") {
|
|
|
|
|
output_ert_inset(os,
|
|
|
|
|
"\\addcontentsline{" + one + "}{" + two + "}{"+ three + '}',
|
|
|
|
|
context);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2003-10-23 11:46:33 +00:00
|
|
|
|
else if (t.cs() == "bibliography") {
|
|
|
|
|
context.check_layout(os);
|
2013-02-16 04:51:23 +00:00
|
|
|
|
string BibOpts;
|
2010-12-17 21:35:27 +00:00
|
|
|
|
begin_command_inset(os, "bibtex", "bibtex");
|
2010-12-20 02:47:58 +00:00
|
|
|
|
if (!btprint.empty()) {
|
|
|
|
|
os << "btprint " << '"' << "btPrintAll" << '"' << "\n";
|
|
|
|
|
// clear the string because the next BibTeX inset can be without the
|
|
|
|
|
// \nocite{*} option
|
|
|
|
|
btprint.clear();
|
|
|
|
|
}
|
2009-06-11 23:46:41 +00:00
|
|
|
|
os << "bibfiles " << '"' << p.verbatim_item() << '"' << "\n";
|
2013-02-16 04:51:23 +00:00
|
|
|
|
// Do we have addcontentsline?
|
|
|
|
|
if (contentslineContent == "\\refname") {
|
|
|
|
|
BibOpts = "bibtotoc";
|
|
|
|
|
// clear string because next BibTeX inset can be without addcontentsline
|
|
|
|
|
contentslineContent.clear();
|
|
|
|
|
}
|
2003-10-23 11:46:33 +00:00
|
|
|
|
// Do we have a bibliographystyle set?
|
2013-02-16 04:51:23 +00:00
|
|
|
|
if (!bibliographystyle.empty()) {
|
|
|
|
|
if (BibOpts.empty())
|
|
|
|
|
BibOpts = bibliographystyle;
|
|
|
|
|
else
|
|
|
|
|
BibOpts = BibOpts + ',' + bibliographystyle;
|
2013-02-17 03:17:02 +00:00
|
|
|
|
// clear it because each bibtex entry has its style
|
|
|
|
|
// and we need an empty string to handle \phantomsection
|
|
|
|
|
bibliographystyle.clear();
|
2013-02-16 04:51:23 +00:00
|
|
|
|
}
|
|
|
|
|
os << "options " << '"' << BibOpts << '"' << "\n";
|
2007-11-27 01:38:50 +00:00
|
|
|
|
end_inset(os);
|
2003-10-23 11:46:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-11-06 15:06:19 +00:00
|
|
|
|
else if (t.cs() == "parbox") {
|
|
|
|
|
// Test whether this is an outer box of a shaded box
|
|
|
|
|
p.pushPosition();
|
|
|
|
|
// swallow arguments
|
|
|
|
|
while (p.hasOpt()) {
|
|
|
|
|
p.getArg('[', ']');
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
}
|
|
|
|
|
p.getArg('{', '}');
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
// eat the '{'
|
|
|
|
|
if (p.next_token().cat() == catBegin)
|
|
|
|
|
p.get_token();
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
Token to = p.get_token();
|
|
|
|
|
bool shaded = false;
|
|
|
|
|
if (to.asInput() == "\\begin") {
|
|
|
|
|
p.skip_spaces(true);
|
|
|
|
|
if (p.getArg('{', '}') == "shaded")
|
|
|
|
|
shaded = true;
|
|
|
|
|
}
|
|
|
|
|
p.popPosition();
|
|
|
|
|
if (shaded) {
|
|
|
|
|
parse_outer_box(p, os, FLAG_ITEM, outer,
|
|
|
|
|
context, "parbox", "shaded");
|
|
|
|
|
} else
|
|
|
|
|
parse_box(p, os, 0, FLAG_ITEM, outer, context,
|
|
|
|
|
"", "", t.cs());
|
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
|
2013-03-22 00:33:58 +00:00
|
|
|
|
else if (t.cs() == "fbox" || t.cs() == "mbox" ||
|
|
|
|
|
t.cs() == "ovalbox" || t.cs() == "Ovalbox" ||
|
2011-01-21 18:29:10 +00:00
|
|
|
|
t.cs() == "shadowbox" || t.cs() == "doublebox")
|
|
|
|
|
parse_outer_box(p, os, FLAG_ITEM, outer, context, t.cs(), "");
|
|
|
|
|
|
|
|
|
|
else if (t.cs() == "framebox") {
|
2011-11-27 11:57:47 +00:00
|
|
|
|
if (p.next_token().character() == '(') {
|
|
|
|
|
//the syntax is: \framebox(x,y)[position]{content}
|
|
|
|
|
string arg = t.asInput();
|
|
|
|
|
arg += p.getFullParentheseArg();
|
|
|
|
|
arg += p.getFullOpt();
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, arg + '{', context);
|
2011-11-27 11:57:47 +00:00
|
|
|
|
parse_text(p, os, FLAG_ITEM, outer, context);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", context);
|
2011-11-27 11:57:47 +00:00
|
|
|
|
} else {
|
2013-03-22 00:33:58 +00:00
|
|
|
|
//the syntax is: \framebox[width][position]{content}
|
2011-11-27 11:57:47 +00:00
|
|
|
|
string special = p.getFullOpt();
|
|
|
|
|
special += p.getOpt();
|
2013-03-22 00:33:58 +00:00
|
|
|
|
parse_outer_box(p, os, FLAG_ITEM, outer,
|
|
|
|
|
context, t.cs(), special);
|
2011-11-27 11:57:47 +00:00
|
|
|
|
}
|
2011-01-21 18:29:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2008-04-12 12:50:04 +00:00
|
|
|
|
//\makebox() is part of the picture environment and different from \makebox{}
|
2011-11-12 18:41:44 +00:00
|
|
|
|
//\makebox{} will be parsed by parse_box
|
2008-04-12 12:50:04 +00:00
|
|
|
|
else if (t.cs() == "makebox") {
|
2011-11-06 22:42:05 +00:00
|
|
|
|
if (p.next_token().character() == '(') {
|
2008-04-12 12:50:04 +00:00
|
|
|
|
//the syntax is: \makebox(x,y)[position]{content}
|
2011-11-27 11:57:47 +00:00
|
|
|
|
string arg = t.asInput();
|
2008-04-17 00:22:16 +00:00
|
|
|
|
arg += p.getFullParentheseArg();
|
2008-04-12 12:50:04 +00:00
|
|
|
|
arg += p.getFullOpt();
|
2011-11-12 18:41:44 +00:00
|
|
|
|
eat_whitespace(p, os, context, false);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, arg + '{', context);
|
2011-11-12 18:41:44 +00:00
|
|
|
|
parse_text(p, os, FLAG_ITEM, outer, context);
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, "}", context);
|
2011-11-06 22:42:05 +00:00
|
|
|
|
} else
|
|
|
|
|
//the syntax is: \makebox[width][position]{content}
|
|
|
|
|
parse_box(p, os, 0, FLAG_ITEM, outer, context,
|
|
|
|
|
"", "", t.cs());
|
2008-04-12 12:50:04 +00:00
|
|
|
|
}
|
2003-12-19 10:40:07 +00:00
|
|
|
|
|
2003-12-10 08:33:37 +00:00
|
|
|
|
else if (t.cs() == "smallskip" ||
|
2006-04-05 23:56:29 +00:00
|
|
|
|
t.cs() == "medskip" ||
|
2003-12-10 08:33:37 +00:00
|
|
|
|
t.cs() == "bigskip" ||
|
|
|
|
|
t.cs() == "vfill") {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "VSpace ");
|
|
|
|
|
os << t.cs();
|
|
|
|
|
end_inset(os);
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2003-12-10 08:33:37 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-11-20 17:03:00 +00:00
|
|
|
|
else if ((where = is_known(t.cs(), known_spaces))) {
|
2006-08-24 16:34:47 +00:00
|
|
|
|
context.check_layout(os);
|
2010-12-27 18:14:55 +00:00
|
|
|
|
begin_inset(os, "space ");
|
2006-08-24 16:34:47 +00:00
|
|
|
|
os << '\\' << known_coded_spaces[where - known_spaces]
|
|
|
|
|
<< '\n';
|
2010-12-19 20:23:55 +00:00
|
|
|
|
end_inset(os);
|
2006-08-24 16:34:47 +00:00
|
|
|
|
// LaTeX swallows whitespace after all spaces except
|
|
|
|
|
// "\\,". We have to do that here, too, because LyX
|
|
|
|
|
// adds "{}" which would make the spaces significant.
|
|
|
|
|
if (t.cs() != ",")
|
|
|
|
|
eat_whitespace(p, os, context, false);
|
|
|
|
|
// LyX adds "{}" after all spaces except "\\ " and
|
|
|
|
|
// "\\,", so we have to remove "{}".
|
|
|
|
|
// "\\,{}" is equivalent to "\\," in LaTeX, so we
|
|
|
|
|
// remove the braces after "\\,", too.
|
|
|
|
|
if (t.cs() != " ")
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
|
2006-11-25 15:09:01 +00:00
|
|
|
|
else if (t.cs() == "newpage" ||
|
2011-01-02 15:39:48 +00:00
|
|
|
|
(t.cs() == "pagebreak" && !p.hasOpt()) ||
|
2010-12-30 21:03:36 +00:00
|
|
|
|
t.cs() == "clearpage" ||
|
|
|
|
|
t.cs() == "cleardoublepage") {
|
2005-07-26 11:58:43 +00:00
|
|
|
|
context.check_layout(os);
|
2010-12-27 17:58:36 +00:00
|
|
|
|
begin_inset(os, "Newpage ");
|
|
|
|
|
os << t.cs();
|
|
|
|
|
end_inset(os);
|
2010-12-19 14:54:23 +00:00
|
|
|
|
skip_spaces_braces(p);
|
2005-07-26 11:58:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-01-21 21:21:58 +00:00
|
|
|
|
else if (t.cs() == "DeclareRobustCommand" ||
|
|
|
|
|
t.cs() == "DeclareRobustCommandx" ||
|
|
|
|
|
t.cs() == "newcommand" ||
|
|
|
|
|
t.cs() == "newcommandx" ||
|
2011-11-25 01:01:45 +00:00
|
|
|
|
t.cs() == "providecommand" ||
|
2011-01-21 21:21:58 +00:00
|
|
|
|
t.cs() == "providecommandx" ||
|
|
|
|
|
t.cs() == "renewcommand" ||
|
|
|
|
|
t.cs() == "renewcommandx") {
|
|
|
|
|
// DeclareRobustCommand, DeclareRobustCommandx,
|
|
|
|
|
// providecommand and providecommandx could be handled
|
|
|
|
|
// by parse_command(), but we need to call
|
|
|
|
|
// add_known_command() here.
|
2005-01-06 13:22:20 +00:00
|
|
|
|
string name = t.asInput();
|
|
|
|
|
if (p.next_token().asInput() == "*") {
|
|
|
|
|
// Starred form. Eat '*'
|
|
|
|
|
p.get_token();
|
|
|
|
|
name += '*';
|
|
|
|
|
}
|
|
|
|
|
string const command = p.verbatim_item();
|
2011-01-28 20:29:06 +00:00
|
|
|
|
string const opt1 = p.getFullOpt();
|
2008-04-29 18:11:46 +00:00
|
|
|
|
string const opt2 = p.getFullOpt();
|
|
|
|
|
add_known_command(command, opt1, !opt2.empty());
|
|
|
|
|
string const ert = name + '{' + command + '}' +
|
|
|
|
|
opt1 + opt2 +
|
|
|
|
|
'{' + p.verbatim_item() + '}';
|
2007-05-28 22:27:45 +00:00
|
|
|
|
|
2011-01-21 21:21:58 +00:00
|
|
|
|
if (t.cs() == "DeclareRobustCommand" ||
|
|
|
|
|
t.cs() == "DeclareRobustCommandx" ||
|
|
|
|
|
t.cs() == "providecommand" ||
|
|
|
|
|
t.cs() == "providecommandx" ||
|
2011-01-07 20:21:10 +00:00
|
|
|
|
name[name.length()-1] == '*')
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, ert, context);
|
2010-12-19 19:21:53 +00:00
|
|
|
|
else {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "FormulaMacro");
|
|
|
|
|
os << "\n" << ert;
|
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
2005-01-06 13:22:20 +00:00
|
|
|
|
}
|
2011-01-02 15:39:48 +00:00
|
|
|
|
|
|
|
|
|
else if (t.cs() == "let" && p.next_token().asInput() != "*") {
|
|
|
|
|
// let could be handled by parse_command(),
|
|
|
|
|
// but we need to call add_known_command() here.
|
|
|
|
|
string ert = t.asInput();
|
|
|
|
|
string name;
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
if (p.next_token().cat() == catBegin) {
|
|
|
|
|
name = p.verbatim_item();
|
|
|
|
|
ert += '{' + name + '}';
|
|
|
|
|
} else {
|
|
|
|
|
name = p.verbatim_item();
|
|
|
|
|
ert += name;
|
|
|
|
|
}
|
|
|
|
|
string command;
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
if (p.next_token().cat() == catBegin) {
|
|
|
|
|
command = p.verbatim_item();
|
|
|
|
|
ert += '{' + command + '}';
|
|
|
|
|
} else {
|
|
|
|
|
command = p.verbatim_item();
|
|
|
|
|
ert += command;
|
|
|
|
|
}
|
|
|
|
|
// If command is known, make name known too, to parse
|
|
|
|
|
// its arguments correctly. For this reason we also
|
|
|
|
|
// have commands in syntax.default that are hardcoded.
|
|
|
|
|
CommandMap::iterator it = known_commands.find(command);
|
|
|
|
|
if (it != known_commands.end())
|
|
|
|
|
known_commands[t.asInput()] = it->second;
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, ert, context);
|
2011-01-02 15:39:48 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-12-19 21:24:24 +00:00
|
|
|
|
else if (t.cs() == "hspace" || t.cs() == "vspace") {
|
2003-12-10 08:33:37 +00:00
|
|
|
|
bool starred = false;
|
|
|
|
|
if (p.next_token().asInput() == "*") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
starred = true;
|
|
|
|
|
}
|
2010-12-19 21:24:24 +00:00
|
|
|
|
string name = t.asInput();
|
2003-12-10 08:33:37 +00:00
|
|
|
|
string const length = p.verbatim_item();
|
|
|
|
|
string unit;
|
|
|
|
|
string valstring;
|
|
|
|
|
bool valid = splitLatexLength(length, valstring, unit);
|
2010-12-19 21:24:24 +00:00
|
|
|
|
bool known_hspace = false;
|
2003-12-10 08:33:37 +00:00
|
|
|
|
bool known_vspace = false;
|
|
|
|
|
bool known_unit = false;
|
|
|
|
|
double value;
|
|
|
|
|
if (valid) {
|
|
|
|
|
istringstream iss(valstring);
|
|
|
|
|
iss >> value;
|
|
|
|
|
if (value == 1.0) {
|
2010-12-19 21:24:24 +00:00
|
|
|
|
if (t.cs()[0] == 'h') {
|
|
|
|
|
if (unit == "\\fill") {
|
|
|
|
|
if (!starred) {
|
|
|
|
|
unit = "";
|
2010-12-20 20:45:18 +00:00
|
|
|
|
name = "\\hfill";
|
2010-12-19 21:24:24 +00:00
|
|
|
|
}
|
|
|
|
|
known_hspace = true;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
if (unit == "\\smallskipamount") {
|
|
|
|
|
unit = "smallskip";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
} else if (unit == "\\medskipamount") {
|
|
|
|
|
unit = "medskip";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
} else if (unit == "\\bigskipamount") {
|
|
|
|
|
unit = "bigskip";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
} else if (unit == "\\fill") {
|
|
|
|
|
unit = "vfill";
|
|
|
|
|
known_vspace = true;
|
|
|
|
|
}
|
2003-12-10 08:33:37 +00:00
|
|
|
|
}
|
2004-03-27 16:51:21 +00:00
|
|
|
|
}
|
2010-12-19 21:24:24 +00:00
|
|
|
|
if (!known_hspace && !known_vspace) {
|
2003-12-10 08:33:37 +00:00
|
|
|
|
switch (unitFromString(unit)) {
|
2007-04-28 12:58:49 +00:00
|
|
|
|
case Length::SP:
|
|
|
|
|
case Length::PT:
|
|
|
|
|
case Length::BP:
|
|
|
|
|
case Length::DD:
|
|
|
|
|
case Length::MM:
|
|
|
|
|
case Length::PC:
|
|
|
|
|
case Length::CC:
|
|
|
|
|
case Length::CM:
|
|
|
|
|
case Length::IN:
|
|
|
|
|
case Length::EX:
|
|
|
|
|
case Length::EM:
|
|
|
|
|
case Length::MU:
|
2003-12-10 08:33:37 +00:00
|
|
|
|
known_unit = true;
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2008-04-29 18:11:46 +00:00
|
|
|
|
|
2010-12-19 21:24:24 +00:00
|
|
|
|
if (t.cs()[0] == 'h' && (known_unit || known_hspace)) {
|
|
|
|
|
// Literal horizontal length or known variable
|
|
|
|
|
context.check_layout(os);
|
2010-12-27 18:14:55 +00:00
|
|
|
|
begin_inset(os, "space ");
|
2010-12-19 21:24:24 +00:00
|
|
|
|
os << name;
|
|
|
|
|
if (starred)
|
|
|
|
|
os << '*';
|
|
|
|
|
os << '{';
|
|
|
|
|
if (known_hspace)
|
|
|
|
|
os << unit;
|
2010-12-20 20:45:18 +00:00
|
|
|
|
os << "}";
|
2010-12-19 21:24:24 +00:00
|
|
|
|
if (known_unit && !known_hspace)
|
2010-12-20 20:45:18 +00:00
|
|
|
|
os << "\n\\length "
|
|
|
|
|
<< translate_len(length);
|
2010-12-19 21:24:24 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
} else if (known_unit || known_vspace) {
|
|
|
|
|
// Literal vertical length or known variable
|
2003-12-10 08:33:37 +00:00
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "VSpace ");
|
|
|
|
|
if (known_unit)
|
|
|
|
|
os << value;
|
|
|
|
|
os << unit;
|
|
|
|
|
if (starred)
|
|
|
|
|
os << '*';
|
|
|
|
|
end_inset(os);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
} else {
|
2010-12-27 18:14:55 +00:00
|
|
|
|
// LyX can't handle other length variables in Inset VSpace/space
|
2003-12-10 08:33:37 +00:00
|
|
|
|
if (starred)
|
|
|
|
|
name += '*';
|
|
|
|
|
if (valid) {
|
|
|
|
|
if (value == 1.0)
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, name + '{' + unit + '}', context);
|
2003-12-10 08:33:37 +00:00
|
|
|
|
else if (value == -1.0)
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, name + "{-" + unit + '}', context);
|
2003-12-10 08:33:37 +00:00
|
|
|
|
else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, name + '{' + valstring + unit + '}', context);
|
2003-12-10 08:33:37 +00:00
|
|
|
|
} else
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, name + '{' + length + '}', context);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2011-01-07 21:04:16 +00:00
|
|
|
|
// The single '=' is meant here.
|
|
|
|
|
else if ((newinsetlayout = findInsetLayout(context.textclass, t.cs(), true))) {
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "Flex ");
|
|
|
|
|
os << to_utf8(newinsetlayout->name()) << '\n'
|
|
|
|
|
<< "status collapsed\n";
|
2013-02-12 17:36:25 +00:00
|
|
|
|
if (newinsetlayout->isPassThru()) {
|
|
|
|
|
// set catcodes to verbatim early, just in case.
|
|
|
|
|
p.setCatcodes(VERBATIM_CATCODES);
|
|
|
|
|
string delim = p.get_token().asInput();
|
|
|
|
|
if (delim != "{")
|
|
|
|
|
cerr << "Warning: bad delimiter for command " << t.asInput() << endl;
|
2013-02-22 14:35:38 +00:00
|
|
|
|
//FIXME: handle error condition
|
|
|
|
|
string const arg = p.verbatimStuff("}").second;
|
2013-02-12 17:36:25 +00:00
|
|
|
|
Context newcontext(true, context.textclass);
|
|
|
|
|
if (newinsetlayout->forcePlainLayout())
|
|
|
|
|
newcontext.layout = &context.textclass.plainLayout();
|
|
|
|
|
output_ert(os, arg, newcontext);
|
|
|
|
|
} else
|
|
|
|
|
|
|
|
|
|
parse_text_in_inset(p, os, FLAG_ITEM, false, context, newinsetlayout);
|
2011-01-07 21:04:16 +00:00
|
|
|
|
end_inset(os);
|
|
|
|
|
}
|
|
|
|
|
|
2011-12-05 00:57:42 +00:00
|
|
|
|
else if (t.cs() == "includepdf") {
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
string const arg = p.getArg('[', ']');
|
|
|
|
|
map<string, string> opts;
|
|
|
|
|
vector<string> keys;
|
|
|
|
|
split_map(arg, opts, keys);
|
|
|
|
|
string name = normalize_filename(p.verbatim_item());
|
2012-10-03 11:23:27 +00:00
|
|
|
|
string const path = getMasterFilePath(true);
|
2011-12-05 00:57:42 +00:00
|
|
|
|
// We want to preserve relative / absolute filenames,
|
|
|
|
|
// therefore path is only used for testing
|
|
|
|
|
if (!makeAbsPath(name, path).exists()) {
|
|
|
|
|
// The file extension is probably missing.
|
|
|
|
|
// Now try to find it out.
|
|
|
|
|
char const * const pdfpages_format[] = {"pdf", 0};
|
|
|
|
|
string const pdftex_name =
|
|
|
|
|
find_file(name, path, pdfpages_format);
|
|
|
|
|
if (!pdftex_name.empty()) {
|
|
|
|
|
name = pdftex_name;
|
|
|
|
|
pdflatex = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
2012-10-03 11:23:27 +00:00
|
|
|
|
FileName const absname = makeAbsPath(name, path);
|
|
|
|
|
if (absname.exists())
|
|
|
|
|
{
|
|
|
|
|
fix_child_filename(name);
|
|
|
|
|
copy_file(absname, name);
|
|
|
|
|
} else
|
2011-12-05 00:57:42 +00:00
|
|
|
|
cerr << "Warning: Could not find file '"
|
|
|
|
|
<< name << "'." << endl;
|
|
|
|
|
// write output
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "External\n\ttemplate ");
|
|
|
|
|
os << "PDFPages\n\tfilename "
|
|
|
|
|
<< name << "\n";
|
|
|
|
|
// parse the options
|
|
|
|
|
if (opts.find("pages") != opts.end())
|
|
|
|
|
os << "\textra LaTeX \"pages="
|
|
|
|
|
<< opts["pages"] << "\"\n";
|
|
|
|
|
if (opts.find("angle") != opts.end())
|
|
|
|
|
os << "\trotateAngle "
|
|
|
|
|
<< opts["angle"] << '\n';
|
|
|
|
|
if (opts.find("origin") != opts.end()) {
|
|
|
|
|
ostringstream ss;
|
|
|
|
|
string const opt = opts["origin"];
|
|
|
|
|
if (opt == "tl") ss << "topleft";
|
|
|
|
|
if (opt == "bl") ss << "bottomleft";
|
|
|
|
|
if (opt == "Bl") ss << "baselineleft";
|
|
|
|
|
if (opt == "c") ss << "center";
|
|
|
|
|
if (opt == "tc") ss << "topcenter";
|
|
|
|
|
if (opt == "bc") ss << "bottomcenter";
|
|
|
|
|
if (opt == "Bc") ss << "baselinecenter";
|
|
|
|
|
if (opt == "tr") ss << "topright";
|
|
|
|
|
if (opt == "br") ss << "bottomright";
|
|
|
|
|
if (opt == "Br") ss << "baselineright";
|
|
|
|
|
if (!ss.str().empty())
|
|
|
|
|
os << "\trotateOrigin " << ss.str() << '\n';
|
|
|
|
|
else
|
|
|
|
|
cerr << "Warning: Ignoring unknown includegraphics origin argument '" << opt << "'\n";
|
|
|
|
|
}
|
|
|
|
|
if (opts.find("width") != opts.end())
|
|
|
|
|
os << "\twidth "
|
|
|
|
|
<< translate_len(opts["width"]) << '\n';
|
|
|
|
|
if (opts.find("height") != opts.end())
|
|
|
|
|
os << "\theight "
|
|
|
|
|
<< translate_len(opts["height"]) << '\n';
|
|
|
|
|
if (opts.find("keepaspectratio") != opts.end())
|
|
|
|
|
os << "\tkeepAspectRatio\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
context.check_layout(os);
|
2011-12-13 19:40:05 +00:00
|
|
|
|
registerExternalTemplatePackages("PDFPages");
|
2011-12-05 00:57:42 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-11-25 01:01:45 +00:00
|
|
|
|
else if (t.cs() == "loadgame") {
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
string name = normalize_filename(p.verbatim_item());
|
2012-10-03 11:23:27 +00:00
|
|
|
|
string const path = getMasterFilePath(true);
|
2011-11-25 01:01:45 +00:00
|
|
|
|
// We want to preserve relative / absolute filenames,
|
|
|
|
|
// therefore path is only used for testing
|
|
|
|
|
if (!makeAbsPath(name, path).exists()) {
|
|
|
|
|
// The file extension is probably missing.
|
|
|
|
|
// Now try to find it out.
|
|
|
|
|
char const * const lyxskak_format[] = {"fen", 0};
|
|
|
|
|
string const lyxskak_name =
|
|
|
|
|
find_file(name, path, lyxskak_format);
|
|
|
|
|
if (!lyxskak_name.empty())
|
|
|
|
|
name = lyxskak_name;
|
|
|
|
|
}
|
2012-10-03 11:23:27 +00:00
|
|
|
|
FileName const absname = makeAbsPath(name, path);
|
|
|
|
|
if (absname.exists())
|
|
|
|
|
{
|
|
|
|
|
fix_child_filename(name);
|
|
|
|
|
copy_file(absname, name);
|
|
|
|
|
} else
|
2011-11-25 01:01:45 +00:00
|
|
|
|
cerr << "Warning: Could not find file '"
|
|
|
|
|
<< name << "'." << endl;
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
begin_inset(os, "External\n\ttemplate ");
|
|
|
|
|
os << "ChessDiagram\n\tfilename "
|
|
|
|
|
<< name << "\n";
|
|
|
|
|
end_inset(os);
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
// after a \loadgame follows a \showboard
|
|
|
|
|
if (p.get_token().asInput() == "showboard")
|
|
|
|
|
p.get_token();
|
2011-12-13 19:40:05 +00:00
|
|
|
|
registerExternalTemplatePackages("ChessDiagram");
|
2011-11-25 01:01:45 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
else {
|
2008-11-16 23:24:56 +00:00
|
|
|
|
// try to see whether the string is in unicodesymbols
|
2010-12-12 13:23:05 +00:00
|
|
|
|
// Only use text mode commands, since we are in text mode here,
|
|
|
|
|
// and math commands may be invalid (bug 6797)
|
2013-02-18 23:07:24 +00:00
|
|
|
|
string name = t.asInput();
|
2013-02-22 00:11:30 +00:00
|
|
|
|
// handle the dingbats and Cyrillic
|
|
|
|
|
if (name == "\\ding" || name == "\\textcyr")
|
|
|
|
|
name = name + '{' + p.getArg('{', '}') + '}';
|
|
|
|
|
// handle the ifsym characters
|
2013-02-24 14:44:16 +00:00
|
|
|
|
else if (name == "\\textifsymbol") {
|
2013-02-22 00:11:30 +00:00
|
|
|
|
string const optif = p.getFullOpt();
|
|
|
|
|
string const argif = p.getArg('{', '}');
|
|
|
|
|
name = name + optif + '{' + argif + '}';
|
|
|
|
|
}
|
|
|
|
|
// handle the \ascii characters
|
|
|
|
|
// the case of \ascii within braces, as LyX outputs it, is already
|
|
|
|
|
// handled for t.cat() == catBegin
|
2013-02-24 14:44:16 +00:00
|
|
|
|
else if (name == "\\ascii") {
|
2013-02-22 00:11:30 +00:00
|
|
|
|
// the code is "\asci\xxx"
|
|
|
|
|
name = "{" + name + p.get_token().asInput() + "}";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
2013-02-18 23:07:24 +00:00
|
|
|
|
// handle some TIPA special characters
|
2013-02-24 14:44:16 +00:00
|
|
|
|
else if (preamble.isPackageUsed("tipa")) {
|
|
|
|
|
if (name == "\\textglobfall") {
|
|
|
|
|
name = "End";
|
2013-02-18 23:07:24 +00:00
|
|
|
|
skip_braces(p);
|
2013-02-24 14:44:16 +00:00
|
|
|
|
} else if (name == "\\s") {
|
|
|
|
|
// fromLaTeXCommand() does not yet
|
|
|
|
|
// recognize tipa short cuts
|
|
|
|
|
name = "\\textsyllabic";
|
|
|
|
|
} else if (name == "\\=" &&
|
|
|
|
|
p.next_token().asInput() == "*") {
|
|
|
|
|
// fromLaTeXCommand() does not yet
|
|
|
|
|
// recognize tipa short cuts
|
2013-02-18 23:07:24 +00:00
|
|
|
|
p.get_token();
|
2013-02-24 14:44:16 +00:00
|
|
|
|
name = "\\b";
|
|
|
|
|
} else if (name == "\\textdoublevertline") {
|
|
|
|
|
// FIXME: This is not correct,
|
|
|
|
|
// \textvertline is higher than \textbardbl
|
|
|
|
|
name = "\\textbardbl";
|
2013-02-18 23:07:24 +00:00
|
|
|
|
skip_braces(p);
|
2013-02-24 14:44:16 +00:00
|
|
|
|
} else if (name == "\\!" ) {
|
|
|
|
|
if (p.next_token().asInput() == "b") {
|
|
|
|
|
p.get_token(); // eat 'b'
|
|
|
|
|
name = "\\texthtb";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "d") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\texthtd";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "g") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\texthtg";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "G") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\texthtscg";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "j") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\texthtbardotlessj";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "o") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\textbullseye";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
|
|
|
|
} else if (name == "\\*" ) {
|
|
|
|
|
if (p.next_token().asInput() == "k") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\textturnk";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "r") {
|
|
|
|
|
p.get_token(); // eat 'b'
|
|
|
|
|
name = "\\textturnr";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "t") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\textturnt";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
} else if (p.next_token().asInput() == "w") {
|
|
|
|
|
p.get_token();
|
|
|
|
|
name = "\\textturnw";
|
|
|
|
|
skip_braces(p);
|
|
|
|
|
}
|
2013-02-18 23:07:24 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2013-02-24 14:44:16 +00:00
|
|
|
|
if ((name.size() == 2 &&
|
|
|
|
|
contains("\"'.=^`bcdHkrtuv~", name[1]) &&
|
|
|
|
|
p.next_token().asInput() != "*") ||
|
|
|
|
|
is_known(name.substr(1), known_tipa_marks)) {
|
|
|
|
|
// name is a command that corresponds to a
|
|
|
|
|
// combining character in unicodesymbols.
|
|
|
|
|
// Append the argument, fromLaTeXCommand()
|
|
|
|
|
// will either convert it to a single
|
|
|
|
|
// character or a combining sequence.
|
|
|
|
|
name += '{' + p.verbatim_item() + '}';
|
2013-02-18 23:07:24 +00:00
|
|
|
|
}
|
|
|
|
|
// now get the character from unicodesymbols
|
2013-02-24 14:44:16 +00:00
|
|
|
|
bool termination;
|
|
|
|
|
docstring rem;
|
|
|
|
|
set<string> req;
|
2013-02-18 23:07:24 +00:00
|
|
|
|
docstring s = encodings.fromLaTeXCommand(from_utf8(name),
|
2012-03-25 13:36:00 +00:00
|
|
|
|
Encodings::TEXT_CMD, termination, rem, &req);
|
2008-11-16 23:24:56 +00:00
|
|
|
|
if (!s.empty()) {
|
|
|
|
|
context.check_layout(os);
|
|
|
|
|
os << to_utf8(s);
|
2013-02-24 14:44:16 +00:00
|
|
|
|
if (!rem.empty())
|
|
|
|
|
output_ert_inset(os, to_utf8(rem), context);
|
2012-03-25 13:36:00 +00:00
|
|
|
|
if (termination)
|
|
|
|
|
skip_spaces_braces(p);
|
2012-05-28 20:41:32 +00:00
|
|
|
|
for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
|
2011-12-08 20:05:51 +00:00
|
|
|
|
preamble.registerAutomaticallyLoadedPackage(*it);
|
2008-11-16 23:24:56 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
//cerr << "#: " << t << " mode: " << mode << endl;
|
|
|
|
|
// heuristic: read up to next non-nested space
|
|
|
|
|
/*
|
|
|
|
|
string s = t.asInput();
|
2003-04-23 15:14:43 +00:00
|
|
|
|
string z = p.verbatim_item();
|
2012-10-21 19:14:16 +00:00
|
|
|
|
while (p.good() && z != " " && !z.empty()) {
|
2003-04-17 09:47:21 +00:00
|
|
|
|
//cerr << "read: " << z << endl;
|
|
|
|
|
s += z;
|
2003-04-23 15:14:43 +00:00
|
|
|
|
z = p.verbatim_item();
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
cerr << "found ERT: " << s << endl;
|
2013-02-04 14:14:30 +00:00
|
|
|
|
output_ert_inset(os, s + ' ', context);
|
2003-04-17 09:47:21 +00:00
|
|
|
|
*/
|
2008-11-16 23:24:56 +00:00
|
|
|
|
else {
|
2013-02-24 14:44:16 +00:00
|
|
|
|
if (t.asInput() == name &&
|
|
|
|
|
p.next_token().asInput() == "*") {
|
2008-11-16 23:24:56 +00:00
|
|
|
|
// Starred commands like \vspace*{}
|
|
|
|
|
p.get_token(); // Eat '*'
|
2013-02-24 14:44:16 +00:00
|
|
|
|
name += '*';
|
2008-11-16 23:24:56 +00:00
|
|
|
|
}
|
2013-02-24 14:44:16 +00:00
|
|
|
|
if (!parse_command(name, p, os, outer, context))
|
|
|
|
|
output_ert_inset(os, name, context);
|
2003-11-05 10:14:13 +00:00
|
|
|
|
}
|
2003-04-17 09:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (flags & FLAG_LEAVE) {
|
|
|
|
|
flags &= ~FLAG_LEAVE;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-10-06 07:38:14 +00:00
|
|
|
|
|
|
|
|
|
string guessLanguage(Parser & p, string const & lang)
|
|
|
|
|
{
|
|
|
|
|
typedef std::map<std::string, size_t> LangMap;
|
|
|
|
|
// map from language names to number of characters
|
|
|
|
|
LangMap used;
|
|
|
|
|
used[lang] = 0;
|
|
|
|
|
for (char const * const * i = supported_CJK_languages; *i; i++)
|
|
|
|
|
used[string(*i)] = 0;
|
|
|
|
|
|
|
|
|
|
while (p.good()) {
|
|
|
|
|
Token const t = p.get_token();
|
|
|
|
|
// comments are not counted for any language
|
|
|
|
|
if (t.cat() == catComment)
|
|
|
|
|
continue;
|
|
|
|
|
// commands are not counted as well, but we need to detect
|
|
|
|
|
// \begin{CJK} and switch encoding if needed
|
|
|
|
|
if (t.cat() == catEscape) {
|
|
|
|
|
if (t.cs() == "inputencoding") {
|
|
|
|
|
string const enc = subst(p.verbatim_item(), "\n", " ");
|
2013-01-19 18:47:15 +00:00
|
|
|
|
p.setEncoding(enc, Encoding::inputenc);
|
2012-10-06 07:38:14 +00:00
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if (t.cs() != "begin")
|
|
|
|
|
continue;
|
|
|
|
|
} else {
|
|
|
|
|
// Non-CJK content is counted for lang.
|
|
|
|
|
// We do not care about the real language here:
|
|
|
|
|
// If we have more non-CJK contents than CJK contents,
|
|
|
|
|
// we simply use the language that was specified as
|
|
|
|
|
// babel main language.
|
|
|
|
|
used[lang] += t.asInput().length();
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
// Now we are starting an environment
|
|
|
|
|
p.pushPosition();
|
|
|
|
|
string const name = p.getArg('{', '}');
|
|
|
|
|
if (name != "CJK") {
|
|
|
|
|
p.popPosition();
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
// It is a CJK environment
|
|
|
|
|
p.popPosition();
|
|
|
|
|
/* name = */ p.getArg('{', '}');
|
|
|
|
|
string const encoding = p.getArg('{', '}');
|
|
|
|
|
/* mapping = */ p.getArg('{', '}');
|
|
|
|
|
string const encoding_old = p.getEncoding();
|
|
|
|
|
char const * const * const where =
|
|
|
|
|
is_known(encoding, supported_CJK_encodings);
|
|
|
|
|
if (where)
|
2013-01-19 18:47:15 +00:00
|
|
|
|
p.setEncoding(encoding, Encoding::CJK);
|
2012-10-06 07:38:14 +00:00
|
|
|
|
else
|
2013-01-19 18:47:15 +00:00
|
|
|
|
p.setEncoding("UTF-8");
|
2013-01-25 11:48:52 +00:00
|
|
|
|
string const text = p.ertEnvironment("CJK");
|
2012-10-06 07:38:14 +00:00
|
|
|
|
p.setEncoding(encoding_old);
|
|
|
|
|
p.skip_spaces();
|
|
|
|
|
if (!where) {
|
|
|
|
|
// ignore contents in unknown CJK encoding
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
// the language of the text
|
|
|
|
|
string const cjk =
|
|
|
|
|
supported_CJK_languages[where - supported_CJK_encodings];
|
|
|
|
|
used[cjk] += text.length();
|
|
|
|
|
}
|
|
|
|
|
LangMap::const_iterator use = used.begin();
|
|
|
|
|
for (LangMap::const_iterator it = used.begin(); it != used.end(); ++it) {
|
|
|
|
|
if (it->second > use->second)
|
|
|
|
|
use = it;
|
|
|
|
|
}
|
|
|
|
|
return use->first;
|
|
|
|
|
}
|
|
|
|
|
|
2003-04-17 09:47:21 +00:00
|
|
|
|
// }])
|
2006-10-21 00:16:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
} // namespace lyx
|