2003-08-23 00:17:00 +00:00
|
|
|
|
/**
|
2007-04-26 04:41:58 +00:00
|
|
|
|
* \file lyxfind.cpp
|
2003-08-23 00:17:00 +00:00
|
|
|
|
* This file is part of LyX, the document processor.
|
2008-12-20 16:00:47 +00:00
|
|
|
|
* License details can be found in the file COPYING.
|
2003-08-23 00:17:00 +00:00
|
|
|
|
*
|
2008-11-14 15:58:50 +00:00
|
|
|
|
* \author Lars Gullik Bjønnes
|
2003-08-23 00:17:00 +00:00
|
|
|
|
* \author John Levon
|
2008-11-14 15:58:50 +00:00
|
|
|
|
* \author Jürgen Vigna
|
2003-11-04 12:01:15 +00:00
|
|
|
|
* \author Alfredo Braunstein
|
2008-11-15 23:30:27 +00:00
|
|
|
|
* \author Tommaso Cucinotta
|
2003-08-23 00:17:00 +00:00
|
|
|
|
*
|
|
|
|
|
* Full author contact details are available in file CREDITS.
|
|
|
|
|
*/
|
|
|
|
|
|
2001-03-06 10:20:33 +00:00
|
|
|
|
#include <config.h>
|
|
|
|
|
|
|
|
|
|
#include "lyxfind.h"
|
2003-09-09 22:13:45 +00:00
|
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Buffer.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "BufferList.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "BufferParams.h"
|
|
|
|
|
#include "BufferView.h"
|
2007-10-18 15:29:51 +00:00
|
|
|
|
#include "Changes.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "Cursor.h"
|
|
|
|
|
#include "CutAndPaste.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "FuncRequest.h"
|
2010-02-09 16:11:13 +00:00
|
|
|
|
#include "LyX.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "output_latex.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "OutputParams.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Paragraph.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "Text.h"
|
2019-03-03 13:08:27 +00:00
|
|
|
|
#include "Encoding.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
#include "frontends/Application.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "frontends/alert.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
#include "mathed/InsetMath.h"
|
|
|
|
|
#include "mathed/InsetMathHull.h"
|
2016-09-30 14:50:09 +00:00
|
|
|
|
#include "mathed/MathData.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "mathed/MathStream.h"
|
2011-05-29 13:40:34 +00:00
|
|
|
|
#include "mathed/MathSupport.h"
|
2003-09-09 22:13:45 +00:00
|
|
|
|
|
2008-02-18 07:14:42 +00:00
|
|
|
|
#include "support/debug.h"
|
2006-12-10 11:52:46 +00:00
|
|
|
|
#include "support/docstream.h"
|
2013-05-20 01:07:53 +00:00
|
|
|
|
#include "support/FileName.h"
|
2008-02-18 07:14:42 +00:00
|
|
|
|
#include "support/gettext.h"
|
2008-12-20 16:00:47 +00:00
|
|
|
|
#include "support/lassert.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "support/lstrings.h"
|
2009-12-30 18:40:18 +00:00
|
|
|
|
|
2010-06-29 17:09:40 +00:00
|
|
|
|
#include "support/regex.h"
|
2019-02-28 12:00:12 +00:00
|
|
|
|
#include "support/textutils.h"
|
2018-10-05 18:26:44 +00:00
|
|
|
|
#include <map>
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2007-12-12 10:16:00 +00:00
|
|
|
|
using namespace std;
|
2007-12-12 18:57:56 +00:00
|
|
|
|
using namespace lyx::support;
|
2007-12-12 10:16:00 +00:00
|
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
|
namespace lyx {
|
2004-01-07 17:00:03 +00:00
|
|
|
|
|
2018-11-09 05:07:17 +00:00
|
|
|
|
|
|
|
|
|
// Helper class for deciding what should be ignored
|
|
|
|
|
class IgnoreFormats {
|
|
|
|
|
public:
|
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
IgnoreFormats() = default;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getFamily() const { return ignoreFamily_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getSeries() const { return ignoreSeries_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getShape() const { return ignoreShape_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getUnderline() const { return ignoreUnderline_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getMarkUp() const { return ignoreMarkUp_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getStrikeOut() const { return ignoreStrikeOut_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getSectioning() const { return ignoreSectioning_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getFrontMatter() const { return ignoreFrontMatter_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getColor() const { return ignoreColor_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getLanguage() const { return ignoreLanguage_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2019-09-13 14:23:49 +00:00
|
|
|
|
void setIgnoreFormat(string const & type, bool value);
|
2018-11-09 05:07:17 +00:00
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreFamily_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreSeries_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreShape_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreUnderline_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreMarkUp_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreStrikeOut_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreSectioning_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreFrontMatter_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreColor_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreLanguage_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
2019-09-13 14:23:49 +00:00
|
|
|
|
void IgnoreFormats::setIgnoreFormat(string const & type, bool value)
|
2018-11-09 05:07:17 +00:00
|
|
|
|
{
|
|
|
|
|
if (type == "color") {
|
|
|
|
|
ignoreColor_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "language") {
|
|
|
|
|
ignoreLanguage_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "sectioning") {
|
|
|
|
|
ignoreSectioning_ = value;
|
|
|
|
|
ignoreFrontMatter_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "font") {
|
|
|
|
|
ignoreSeries_ = value;
|
|
|
|
|
ignoreShape_ = value;
|
|
|
|
|
ignoreFamily_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "series") {
|
|
|
|
|
ignoreSeries_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "shape") {
|
|
|
|
|
ignoreShape_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "family") {
|
|
|
|
|
ignoreFamily_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "markup") {
|
|
|
|
|
ignoreMarkUp_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "underline") {
|
|
|
|
|
ignoreUnderline_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "strike") {
|
|
|
|
|
ignoreStrikeOut_ = value;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The global variable that can be changed from outside
|
|
|
|
|
IgnoreFormats ignoreFormats;
|
|
|
|
|
|
|
|
|
|
|
2019-09-13 14:23:49 +00:00
|
|
|
|
void setIgnoreFormat(string const & type, bool value)
|
2018-11-09 05:07:17 +00:00
|
|
|
|
{
|
2019-03-18 17:19:44 +00:00
|
|
|
|
ignoreFormats.setIgnoreFormat(type, value);
|
2018-11-09 05:07:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2004-01-07 17:00:03 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
2006-12-10 11:52:46 +00:00
|
|
|
|
bool parse_bool(docstring & howto)
|
2004-01-07 17:00:03 +00:00
|
|
|
|
{
|
|
|
|
|
if (howto.empty())
|
|
|
|
|
return false;
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring var;
|
2004-01-07 17:00:03 +00:00
|
|
|
|
howto = split(howto, var, ' ');
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return var == "1";
|
2004-01-07 17:00:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-05-04 23:41:18 +00:00
|
|
|
|
class MatchString
|
2003-07-27 12:02:58 +00:00
|
|
|
|
{
|
2003-11-04 12:01:15 +00:00
|
|
|
|
public:
|
2019-10-26 22:06:54 +00:00
|
|
|
|
MatchString(docstring const & s, bool cs, bool mw)
|
|
|
|
|
: str(s), case_sens(cs), whole_words(mw)
|
2004-01-07 14:07:46 +00:00
|
|
|
|
{}
|
|
|
|
|
|
|
|
|
|
// returns true if the specified string is at the specified position
|
2007-06-10 15:07:21 +00:00
|
|
|
|
// del specifies whether deleted strings in ct mode will be considered
|
2010-11-22 07:50:34 +00:00
|
|
|
|
int operator()(Paragraph const & par, pos_type pos, bool del = true) const
|
2004-01-07 17:00:03 +00:00
|
|
|
|
{
|
2010-10-13 18:24:53 +00:00
|
|
|
|
return par.find(str, case_sens, whole_words, pos, del);
|
2003-07-27 12:02:58 +00:00
|
|
|
|
}
|
2004-01-26 17:00:09 +00:00
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
private:
|
2004-01-07 14:07:46 +00:00
|
|
|
|
// search string
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring str;
|
2004-01-07 14:07:46 +00:00
|
|
|
|
// case sensitive
|
2010-10-13 18:24:53 +00:00
|
|
|
|
bool case_sens;
|
2004-01-07 14:07:46 +00:00
|
|
|
|
// match whole words only
|
2010-10-13 18:24:53 +00:00
|
|
|
|
bool whole_words;
|
2003-11-04 12:01:15 +00:00
|
|
|
|
};
|
2003-07-27 12:02:58 +00:00
|
|
|
|
|
|
|
|
|
|
2010-11-22 07:50:34 +00:00
|
|
|
|
int findForward(DocIterator & cur, MatchString const & match,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
bool find_del = true)
|
2001-03-06 10:20:33 +00:00
|
|
|
|
{
|
2004-04-01 08:58:45 +00:00
|
|
|
|
for (; cur; cur.forwardChar())
|
2010-11-22 07:50:34 +00:00
|
|
|
|
if (cur.inTexted()) {
|
|
|
|
|
int len = match(cur.paragraph(), cur.pos(), find_del);
|
|
|
|
|
if (len > 0)
|
|
|
|
|
return len;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
2002-03-21 17:27:08 +00:00
|
|
|
|
|
2001-08-13 10:09:50 +00:00
|
|
|
|
|
2010-11-22 07:50:34 +00:00
|
|
|
|
int findBackwards(DocIterator & cur, MatchString const & match,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
bool find_del = true)
|
2003-11-04 12:01:15 +00:00
|
|
|
|
{
|
2004-09-17 16:28:47 +00:00
|
|
|
|
while (cur) {
|
|
|
|
|
cur.backwardChar();
|
2010-11-22 07:50:34 +00:00
|
|
|
|
if (cur.inTexted()) {
|
|
|
|
|
int len = match(cur.paragraph(), cur.pos(), find_del);
|
|
|
|
|
if (len > 0)
|
|
|
|
|
return len;
|
|
|
|
|
}
|
2004-09-17 16:28:47 +00:00
|
|
|
|
}
|
2010-11-22 07:50:34 +00:00
|
|
|
|
return 0;
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
2003-03-19 14:45:22 +00:00
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
|
2010-10-13 18:27:40 +00:00
|
|
|
|
bool searchAllowed(docstring const & str)
|
2001-03-06 10:20:33 +00:00
|
|
|
|
{
|
2003-11-04 12:01:15 +00:00
|
|
|
|
if (str.empty()) {
|
2007-11-13 23:50:28 +00:00
|
|
|
|
frontend::Alert::error(_("Search error"), _("Search string is empty"));
|
2001-07-20 14:18:48 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
2007-08-21 13:03:55 +00:00
|
|
|
|
return true;
|
2001-03-06 10:20:33 +00:00
|
|
|
|
}
|
2002-06-24 20:28:12 +00:00
|
|
|
|
|
2003-11-10 09:06:48 +00:00
|
|
|
|
|
2010-10-13 18:19:21 +00:00
|
|
|
|
bool findOne(BufferView * bv, docstring const & searchstr,
|
2016-09-14 08:23:39 +00:00
|
|
|
|
bool case_sens, bool whole, bool forward,
|
|
|
|
|
bool find_del = true, bool check_wrap = false)
|
2002-06-18 15:44:30 +00:00
|
|
|
|
{
|
2010-10-13 18:27:40 +00:00
|
|
|
|
if (!searchAllowed(searchstr))
|
2003-11-04 12:01:15 +00:00
|
|
|
|
return false;
|
|
|
|
|
|
2015-05-17 15:27:12 +00:00
|
|
|
|
DocIterator cur = forward
|
|
|
|
|
? bv->cursor().selectionEnd()
|
2013-02-22 10:06:25 +00:00
|
|
|
|
: bv->cursor().selectionBegin();
|
2002-06-18 15:44:30 +00:00
|
|
|
|
|
2010-10-13 18:24:53 +00:00
|
|
|
|
MatchString const match(searchstr, case_sens, whole);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2012-10-23 20:58:10 +00:00
|
|
|
|
int match_len = forward
|
|
|
|
|
? findForward(cur, match, find_del)
|
|
|
|
|
: findBackwards(cur, match, find_del);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2010-11-22 07:50:34 +00:00
|
|
|
|
if (match_len > 0)
|
|
|
|
|
bv->putSelectionAt(cur, match_len, !forward);
|
2016-09-14 08:23:39 +00:00
|
|
|
|
else if (check_wrap) {
|
|
|
|
|
DocIterator cur_orig(bv->cursor());
|
|
|
|
|
docstring q;
|
|
|
|
|
if (forward)
|
|
|
|
|
q = _("End of file reached while searching forward.\n"
|
|
|
|
|
"Continue searching from the beginning?");
|
|
|
|
|
else
|
|
|
|
|
q = _("Beginning of file reached while searching backward.\n"
|
|
|
|
|
"Continue searching from the end?");
|
|
|
|
|
int wrap_answer = frontend::Alert::prompt(_("Wrap search?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
if (wrap_answer == 0) {
|
|
|
|
|
if (forward) {
|
|
|
|
|
bv->cursor().clear();
|
|
|
|
|
bv->cursor().push_back(CursorSlice(bv->buffer().inset()));
|
|
|
|
|
} else {
|
|
|
|
|
bv->cursor().setCursor(doc_iterator_end(&bv->buffer()));
|
|
|
|
|
bv->cursor().backwardPos();
|
|
|
|
|
}
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
if (findOne(bv, searchstr, case_sens, whole, forward, find_del, false))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
bv->cursor().setCursor(cur_orig);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2002-06-18 15:44:30 +00:00
|
|
|
|
|
2010-11-22 07:50:34 +00:00
|
|
|
|
return match_len > 0;
|
2002-06-18 15:44:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-03-04 09:27:27 +00:00
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
int replaceAll(BufferView * bv,
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring const & searchstr, docstring const & replacestr,
|
2010-10-13 18:24:53 +00:00
|
|
|
|
bool case_sens, bool whole)
|
2003-02-08 19:18:01 +00:00
|
|
|
|
{
|
2007-08-21 13:03:55 +00:00
|
|
|
|
Buffer & buf = bv->buffer();
|
2003-02-08 19:18:01 +00:00
|
|
|
|
|
2010-10-13 18:27:40 +00:00
|
|
|
|
if (!searchAllowed(searchstr) || buf.isReadonly())
|
2003-11-04 12:01:15 +00:00
|
|
|
|
return 0;
|
2004-01-26 17:00:09 +00:00
|
|
|
|
|
2010-03-21 10:36:59 +00:00
|
|
|
|
DocIterator cur_orig(bv->cursor());
|
|
|
|
|
|
2010-10-13 18:24:53 +00:00
|
|
|
|
MatchString const match(searchstr, case_sens, whole);
|
2003-11-04 12:01:15 +00:00
|
|
|
|
int num = 0;
|
|
|
|
|
|
|
|
|
|
int const rsize = replacestr.size();
|
|
|
|
|
int const ssize = searchstr.size();
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2008-08-15 19:24:56 +00:00
|
|
|
|
Cursor cur(*bv);
|
2008-11-17 11:46:07 +00:00
|
|
|
|
cur.setCursor(doc_iterator_begin(&buf));
|
2010-11-22 07:50:34 +00:00
|
|
|
|
int match_len = findForward(cur, match, false);
|
|
|
|
|
while (match_len > 0) {
|
2008-10-28 18:00:21 +00:00
|
|
|
|
// Backup current cursor position and font.
|
|
|
|
|
pos_type const pos = cur.pos();
|
|
|
|
|
Font const font = cur.paragraph().getFontSettings(buf.params(), pos);
|
2008-08-15 19:24:56 +00:00
|
|
|
|
cur.recordUndo();
|
2012-10-23 20:58:10 +00:00
|
|
|
|
int striked = ssize -
|
|
|
|
|
cur.paragraph().eraseChars(pos, pos + match_len,
|
2014-03-29 22:52:36 +00:00
|
|
|
|
buf.params().track_changes);
|
2006-12-10 11:52:46 +00:00
|
|
|
|
cur.paragraph().insert(pos, replacestr, font,
|
2014-03-29 22:52:36 +00:00
|
|
|
|
Change(buf.params().track_changes
|
2012-10-23 20:58:10 +00:00
|
|
|
|
? Change::INSERTED
|
|
|
|
|
: Change::UNCHANGED));
|
2004-03-25 09:16:36 +00:00
|
|
|
|
for (int i = 0; i < rsize + striked; ++i)
|
|
|
|
|
cur.forwardChar();
|
2003-11-04 12:01:15 +00:00
|
|
|
|
++num;
|
2010-11-22 07:50:34 +00:00
|
|
|
|
match_len = findForward(cur, match, false);
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
|
bv->putSelectionAt(doc_iterator_begin(&buf), 0, false);
|
2010-03-21 10:36:59 +00:00
|
|
|
|
|
|
|
|
|
cur_orig.fixIfBroken();
|
|
|
|
|
bv->setCursor(cur_orig);
|
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
return num;
|
|
|
|
|
}
|
2003-04-16 00:39:24 +00:00
|
|
|
|
|
2003-02-08 19:18:01 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// the idea here is that we are going to replace the string that
|
2015-05-17 15:27:12 +00:00
|
|
|
|
// is selected IF it is the search string.
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// if there is a selection, but it is not the search string, then
|
|
|
|
|
// we basically ignore it. (FIXME We ought to replace only within
|
|
|
|
|
// the selection.)
|
|
|
|
|
// if there is no selection, then:
|
|
|
|
|
// (i) if some search string has been provided, then we find it.
|
|
|
|
|
// (think of how the dialog works when you hit "replace" the
|
2015-05-17 15:27:12 +00:00
|
|
|
|
// first time.)
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// (ii) if no search string has been provided, then we treat the
|
|
|
|
|
// word the cursor is in as the search string. (why? i have no
|
|
|
|
|
// idea.) but this only works in text?
|
|
|
|
|
//
|
2015-05-17 15:27:12 +00:00
|
|
|
|
// returns the number of replacements made (one, if any) and
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// whether anything at all was done.
|
|
|
|
|
pair<bool, int> replaceOne(BufferView * bv, docstring searchstr,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
docstring const & replacestr, bool case_sens,
|
|
|
|
|
bool whole, bool forward, bool findnext)
|
2003-11-17 09:02:10 +00:00
|
|
|
|
{
|
2010-10-14 14:32:58 +00:00
|
|
|
|
Cursor & cur = bv->cursor();
|
|
|
|
|
if (!cur.selection()) {
|
|
|
|
|
// no selection, non-empty search string: find it
|
|
|
|
|
if (!searchstr.empty()) {
|
2020-10-05 10:38:09 +00:00
|
|
|
|
bool const found = findOne(bv, searchstr, case_sens, whole, forward, true, findnext);
|
2016-09-14 08:23:39 +00:00
|
|
|
|
return make_pair(found, 0);
|
2010-10-14 14:32:58 +00:00
|
|
|
|
}
|
|
|
|
|
// empty search string
|
|
|
|
|
if (!cur.inTexted())
|
|
|
|
|
// bail in math
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(false, 0);
|
2014-02-19 21:18:28 +00:00
|
|
|
|
// select current word and treat it as the search string.
|
|
|
|
|
// This causes a minor bug as undo will restore this selection,
|
|
|
|
|
// which the user did not create (#8986).
|
2010-10-14 14:32:58 +00:00
|
|
|
|
cur.innerText()->selectWord(cur, WHOLE_WORD);
|
|
|
|
|
searchstr = cur.selectionAsString(false);
|
2009-07-04 23:02:27 +00:00
|
|
|
|
}
|
2015-05-17 15:27:12 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// if we still don't have a search string, report the error
|
|
|
|
|
// and abort.
|
|
|
|
|
if (!searchAllowed(searchstr))
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(false, 0);
|
2015-05-17 15:27:12 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
bool have_selection = cur.selection();
|
|
|
|
|
docstring const selected = cur.selectionAsString(false);
|
2015-05-17 15:27:12 +00:00
|
|
|
|
bool match =
|
2012-10-23 20:58:10 +00:00
|
|
|
|
case_sens
|
|
|
|
|
? searchstr == selected
|
|
|
|
|
: compare_no_case(searchstr, selected) == 0;
|
2010-10-14 14:32:58 +00:00
|
|
|
|
|
|
|
|
|
// no selection or current selection is not search word:
|
|
|
|
|
// just find the search word
|
|
|
|
|
if (!have_selection || !match) {
|
2020-10-05 10:38:09 +00:00
|
|
|
|
bool const found = findOne(bv, searchstr, case_sens, whole, forward, true, findnext);
|
2016-09-14 08:23:39 +00:00
|
|
|
|
return make_pair(found, 0);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// we're now actually ready to replace. if the buffer is
|
|
|
|
|
// read-only, we can't, though.
|
|
|
|
|
if (bv->buffer().isReadonly())
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(false, 0);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2012-04-18 10:44:31 +00:00
|
|
|
|
cap::replaceSelectionWithString(cur, replacestr);
|
2010-12-30 15:34:09 +00:00
|
|
|
|
if (forward) {
|
|
|
|
|
cur.pos() += replacestr.length();
|
2013-04-25 21:27:10 +00:00
|
|
|
|
LASSERT(cur.pos() <= cur.lastpos(),
|
|
|
|
|
cur.pos() = cur.lastpos());
|
2010-12-30 15:34:09 +00:00
|
|
|
|
}
|
2011-05-18 20:33:57 +00:00
|
|
|
|
if (findnext)
|
2016-09-14 08:23:39 +00:00
|
|
|
|
findOne(bv, searchstr, case_sens, whole, forward, false, findnext);
|
2004-01-26 17:00:09 +00:00
|
|
|
|
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(true, 1);
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring const find2string(docstring const & search,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
bool casesensitive, bool matchword, bool forward)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2006-12-10 11:52:46 +00:00
|
|
|
|
odocstringstream ss;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
ss << search << '\n'
|
|
|
|
|
<< int(casesensitive) << ' '
|
|
|
|
|
<< int(matchword) << ' '
|
|
|
|
|
<< int(forward);
|
|
|
|
|
return ss.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-07-04 23:02:27 +00:00
|
|
|
|
docstring const replace2string(docstring const & replace,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
docstring const & search,
|
|
|
|
|
bool casesensitive, bool matchword,
|
|
|
|
|
bool all, bool forward, bool findnext)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2006-12-10 11:52:46 +00:00
|
|
|
|
odocstringstream ss;
|
2009-07-04 23:02:27 +00:00
|
|
|
|
ss << replace << '\n'
|
|
|
|
|
<< search << '\n'
|
2004-03-25 09:16:36 +00:00
|
|
|
|
<< int(casesensitive) << ' '
|
|
|
|
|
<< int(matchword) << ' '
|
|
|
|
|
<< int(all) << ' '
|
2011-05-18 20:33:57 +00:00
|
|
|
|
<< int(forward) << ' '
|
|
|
|
|
<< int(findnext);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
return ss.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-10-13 18:53:41 +00:00
|
|
|
|
bool lyxfind(BufferView * bv, FuncRequest const & ev)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2010-04-09 19:00:42 +00:00
|
|
|
|
if (!bv || ev.action() != LFUN_WORD_FIND)
|
2008-02-10 18:52:32 +00:00
|
|
|
|
return false;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
|
//lyxerr << "find called, cmd: " << ev << endl;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
// data is of the form
|
|
|
|
|
// "<search>
|
|
|
|
|
// <casesensitive> <matchword> <forward>"
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring search;
|
|
|
|
|
docstring howto = split(ev.argument(), search, '\n');
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
bool casesensitive = parse_bool(howto);
|
|
|
|
|
bool matchword = parse_bool(howto);
|
|
|
|
|
bool forward = parse_bool(howto);
|
|
|
|
|
|
2016-09-14 08:23:39 +00:00
|
|
|
|
return findOne(bv, search, casesensitive, matchword, forward, true, true);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2015-05-17 15:27:12 +00:00
|
|
|
|
bool lyxreplace(BufferView * bv,
|
2010-10-14 14:32:58 +00:00
|
|
|
|
FuncRequest const & ev, bool has_deleted)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2010-04-09 19:00:42 +00:00
|
|
|
|
if (!bv || ev.action() != LFUN_WORD_REPLACE)
|
2010-10-13 18:53:41 +00:00
|
|
|
|
return false;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
// data is of the form
|
|
|
|
|
// "<search>
|
|
|
|
|
// <replace>
|
2011-05-18 20:33:57 +00:00
|
|
|
|
// <casesensitive> <matchword> <all> <forward> <findnext>"
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring search;
|
|
|
|
|
docstring rplc;
|
2009-07-04 23:02:27 +00:00
|
|
|
|
docstring howto = split(ev.argument(), rplc, '\n');
|
|
|
|
|
howto = split(howto, search, '\n');
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
bool casesensitive = parse_bool(howto);
|
|
|
|
|
bool matchword = parse_bool(howto);
|
|
|
|
|
bool all = parse_bool(howto);
|
|
|
|
|
bool forward = parse_bool(howto);
|
2011-05-19 05:50:55 +00:00
|
|
|
|
bool findnext = howto.empty() ? true : parse_bool(howto);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
bool update = false;
|
|
|
|
|
|
2007-06-19 21:48:04 +00:00
|
|
|
|
if (!has_deleted) {
|
2015-09-20 08:42:35 +00:00
|
|
|
|
int replace_count = 0;
|
2010-10-14 14:32:58 +00:00
|
|
|
|
if (all) {
|
|
|
|
|
replace_count = replaceAll(bv, search, rplc, casesensitive, matchword);
|
|
|
|
|
update = replace_count > 0;
|
|
|
|
|
} else {
|
2012-10-23 20:58:10 +00:00
|
|
|
|
pair<bool, int> rv =
|
2011-05-18 20:33:57 +00:00
|
|
|
|
replaceOne(bv, search, rplc, casesensitive, matchword, forward, findnext);
|
2010-10-14 14:32:58 +00:00
|
|
|
|
update = rv.first;
|
|
|
|
|
replace_count = rv.second;
|
|
|
|
|
}
|
2009-05-07 12:40:35 +00:00
|
|
|
|
|
2010-10-13 18:59:24 +00:00
|
|
|
|
Buffer const & buf = bv->buffer();
|
2010-10-14 14:32:58 +00:00
|
|
|
|
if (!update) {
|
2006-08-23 21:14:43 +00:00
|
|
|
|
// emit message signal.
|
2013-04-01 05:35:48 +00:00
|
|
|
|
buf.message(_("String not found."));
|
2004-03-25 09:16:36 +00:00
|
|
|
|
} else {
|
2010-10-14 14:32:58 +00:00
|
|
|
|
if (replace_count == 0) {
|
|
|
|
|
buf.message(_("String found."));
|
|
|
|
|
} else if (replace_count == 1) {
|
2007-08-21 13:03:55 +00:00
|
|
|
|
buf.message(_("String has been replaced."));
|
2007-06-19 21:48:04 +00:00
|
|
|
|
} else {
|
2015-05-17 15:27:12 +00:00
|
|
|
|
docstring const str =
|
2010-10-13 18:57:57 +00:00
|
|
|
|
bformat(_("%1$d strings have been replaced."), replace_count);
|
2007-08-21 13:03:55 +00:00
|
|
|
|
buf.message(str);
|
2007-06-19 21:48:04 +00:00
|
|
|
|
}
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
2011-05-18 20:33:57 +00:00
|
|
|
|
} else if (findnext) {
|
2007-06-19 21:48:04 +00:00
|
|
|
|
// if we have deleted characters, we do not replace at all, but
|
|
|
|
|
// rather search for the next occurence
|
2016-09-14 08:23:39 +00:00
|
|
|
|
if (findOne(bv, search, casesensitive, matchword, forward, true, findnext))
|
2010-10-14 14:32:58 +00:00
|
|
|
|
update = true;
|
2008-02-10 19:05:09 +00:00
|
|
|
|
else
|
2013-04-01 05:35:48 +00:00
|
|
|
|
bv->message(_("String not found."));
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
2010-10-14 14:32:58 +00:00
|
|
|
|
return update;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2016-10-15 14:34:16 +00:00
|
|
|
|
bool findNextChange(BufferView * bv, Cursor & cur, bool const check_wrap)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2014-10-14 07:58:40 +00:00
|
|
|
|
for (; cur; cur.forwardPos())
|
|
|
|
|
if (cur.inTexted() && cur.paragraph().isChanged(cur.pos()))
|
2013-05-06 20:50:26 +00:00
|
|
|
|
return true;
|
2016-10-15 14:34:16 +00:00
|
|
|
|
|
|
|
|
|
if (check_wrap) {
|
|
|
|
|
DocIterator cur_orig(bv->cursor());
|
|
|
|
|
docstring q = _("End of file reached while searching forward.\n"
|
|
|
|
|
"Continue searching from the beginning?");
|
|
|
|
|
int wrap_answer = frontend::Alert::prompt(_("Wrap search?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
if (wrap_answer == 0) {
|
|
|
|
|
bv->cursor().clear();
|
|
|
|
|
bv->cursor().push_back(CursorSlice(bv->buffer().inset()));
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
cur.setCursor(bv->cursor().selectionBegin());
|
|
|
|
|
if (findNextChange(bv, cur, false))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
bv->cursor().setCursor(cur_orig);
|
|
|
|
|
}
|
|
|
|
|
|
2013-05-06 20:50:26 +00:00
|
|
|
|
return false;
|
2009-04-05 19:11:25 +00:00
|
|
|
|
}
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2009-04-05 19:11:25 +00:00
|
|
|
|
|
2016-10-15 14:34:16 +00:00
|
|
|
|
bool findPreviousChange(BufferView * bv, Cursor & cur, bool const check_wrap)
|
2009-04-05 19:11:25 +00:00
|
|
|
|
{
|
2014-10-14 07:58:40 +00:00
|
|
|
|
for (cur.backwardPos(); cur; cur.backwardPos()) {
|
|
|
|
|
if (cur.inTexted() && cur.paragraph().isChanged(cur.pos()))
|
|
|
|
|
return true;
|
2009-04-15 22:07:59 +00:00
|
|
|
|
}
|
2016-10-15 14:34:16 +00:00
|
|
|
|
|
|
|
|
|
if (check_wrap) {
|
|
|
|
|
DocIterator cur_orig(bv->cursor());
|
|
|
|
|
docstring q = _("Beginning of file reached while searching backward.\n"
|
|
|
|
|
"Continue searching from the end?");
|
|
|
|
|
int wrap_answer = frontend::Alert::prompt(_("Wrap search?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
if (wrap_answer == 0) {
|
|
|
|
|
bv->cursor().setCursor(doc_iterator_end(&bv->buffer()));
|
|
|
|
|
bv->cursor().backwardPos();
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
cur.setCursor(bv->cursor().selectionBegin());
|
|
|
|
|
if (findPreviousChange(bv, cur, false))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
bv->cursor().setCursor(cur_orig);
|
|
|
|
|
}
|
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
2009-04-15 22:07:59 +00:00
|
|
|
|
|
2009-04-05 19:11:25 +00:00
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
bool selectChange(Cursor & cur, bool forward)
|
|
|
|
|
{
|
|
|
|
|
if (!cur.inTexted() || !cur.paragraph().isChanged(cur.pos()))
|
|
|
|
|
return false;
|
|
|
|
|
Change ch = cur.paragraph().lookupChange(cur.pos());
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
CursorSlice tip1 = cur.top();
|
|
|
|
|
for (; tip1.pit() < tip1.lastpit() || tip1.pos() < tip1.lastpos(); tip1.forwardPos()) {
|
|
|
|
|
Change ch2 = tip1.paragraph().lookupChange(tip1.pos());
|
|
|
|
|
if (!ch2.isSimilarTo(ch))
|
|
|
|
|
break;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
2014-10-14 07:58:40 +00:00
|
|
|
|
CursorSlice tip2 = cur.top();
|
|
|
|
|
for (; tip2.pit() > 0 || tip2.pos() > 0;) {
|
|
|
|
|
tip2.backwardPos();
|
|
|
|
|
Change ch2 = tip2.paragraph().lookupChange(tip2.pos());
|
|
|
|
|
if (!ch2.isSimilarTo(ch)) {
|
|
|
|
|
// take a step forward to correctly set the selection
|
|
|
|
|
tip2.forwardPos();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2009-04-15 22:07:59 +00:00
|
|
|
|
}
|
2014-10-14 07:58:40 +00:00
|
|
|
|
if (forward)
|
|
|
|
|
swap(tip1, tip2);
|
|
|
|
|
cur.top() = tip1;
|
|
|
|
|
cur.bv().mouseSetCursor(cur, false);
|
|
|
|
|
cur.top() = tip2;
|
|
|
|
|
cur.bv().mouseSetCursor(cur, true);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
2014-10-14 07:58:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool findChange(BufferView * bv, bool forward)
|
|
|
|
|
{
|
|
|
|
|
Cursor cur(*bv);
|
|
|
|
|
cur.setCursor(forward ? bv->cursor().selectionEnd()
|
|
|
|
|
: bv->cursor().selectionBegin());
|
2016-10-15 14:34:16 +00:00
|
|
|
|
forward ? findNextChange(bv, cur, true) : findPreviousChange(bv, cur, true);
|
2014-10-14 07:58:40 +00:00
|
|
|
|
return selectChange(cur, forward);
|
2013-05-06 20:50:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2013-05-06 20:50:26 +00:00
|
|
|
|
|
|
|
|
|
bool findNextChange(BufferView * bv)
|
|
|
|
|
{
|
|
|
|
|
return findChange(bv, true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool findPreviousChange(BufferView * bv)
|
|
|
|
|
{
|
|
|
|
|
return findChange(bv, false);
|
|
|
|
|
}
|
|
|
|
|
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
typedef vector<pair<string, string> > Escapes;
|
|
|
|
|
|
|
|
|
|
/// A map of symbols and their escaped equivalent needed within a regex.
|
2011-06-19 16:41:23 +00:00
|
|
|
|
/// @note Beware of order
|
2008-11-15 23:30:27 +00:00
|
|
|
|
Escapes const & get_regexp_escapes()
|
|
|
|
|
{
|
2012-10-23 21:01:18 +00:00
|
|
|
|
typedef std::pair<std::string, std::string> P;
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
static Escapes escape_map;
|
|
|
|
|
if (escape_map.empty()) {
|
2012-10-23 21:01:18 +00:00
|
|
|
|
escape_map.push_back(P("$", "_x_$"));
|
|
|
|
|
escape_map.push_back(P("{", "_x_{"));
|
|
|
|
|
escape_map.push_back(P("}", "_x_}"));
|
|
|
|
|
escape_map.push_back(P("[", "_x_["));
|
|
|
|
|
escape_map.push_back(P("]", "_x_]"));
|
|
|
|
|
escape_map.push_back(P("(", "_x_("));
|
|
|
|
|
escape_map.push_back(P(")", "_x_)"));
|
|
|
|
|
escape_map.push_back(P("+", "_x_+"));
|
|
|
|
|
escape_map.push_back(P("*", "_x_*"));
|
|
|
|
|
escape_map.push_back(P(".", "_x_."));
|
|
|
|
|
escape_map.push_back(P("\\", "(?:\\\\|\\\\backslash)"));
|
|
|
|
|
escape_map.push_back(P("~", "(?:\\\\textasciitilde|\\\\sim)"));
|
2017-04-16 22:38:31 +00:00
|
|
|
|
escape_map.push_back(P("^", "(?:\\^|\\\\textasciicircum\\{\\}|\\\\textasciicircum|\\\\mathcircumflex)"));
|
2012-10-23 21:01:18 +00:00
|
|
|
|
escape_map.push_back(P("_x_", "\\"));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
return escape_map;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// A map of lyx escaped strings and their unescaped equivalent.
|
2012-10-23 21:01:18 +00:00
|
|
|
|
Escapes const & get_lyx_unescapes()
|
|
|
|
|
{
|
|
|
|
|
typedef std::pair<std::string, std::string> P;
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
static Escapes escape_map;
|
|
|
|
|
if (escape_map.empty()) {
|
2012-10-23 21:01:18 +00:00
|
|
|
|
escape_map.push_back(P("\\%", "%"));
|
|
|
|
|
escape_map.push_back(P("\\mathcircumflex ", "^"));
|
|
|
|
|
escape_map.push_back(P("\\mathcircumflex", "^"));
|
|
|
|
|
escape_map.push_back(P("\\backslash ", "\\"));
|
|
|
|
|
escape_map.push_back(P("\\backslash", "\\"));
|
|
|
|
|
escape_map.push_back(P("\\\\{", "_x_<"));
|
|
|
|
|
escape_map.push_back(P("\\\\}", "_x_>"));
|
|
|
|
|
escape_map.push_back(P("\\sim ", "~"));
|
|
|
|
|
escape_map.push_back(P("\\sim", "~"));
|
2011-06-19 16:41:23 +00:00
|
|
|
|
}
|
|
|
|
|
return escape_map;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// A map of escapes turning a regexp matching text to one matching latex.
|
2012-10-23 21:01:18 +00:00
|
|
|
|
Escapes const & get_regexp_latex_escapes()
|
|
|
|
|
{
|
|
|
|
|
typedef std::pair<std::string, std::string> P;
|
|
|
|
|
|
2011-06-19 16:41:23 +00:00
|
|
|
|
static Escapes escape_map;
|
|
|
|
|
if (escape_map.empty()) {
|
2017-04-16 22:38:31 +00:00
|
|
|
|
escape_map.push_back(P("\\\\", "(?:\\\\\\\\|\\\\backslash|\\\\textbackslash\\{\\}|\\\\textbackslash)"));
|
2012-10-23 21:01:18 +00:00
|
|
|
|
escape_map.push_back(P("(<?!\\\\\\\\textbackslash)\\{", "\\\\\\{"));
|
|
|
|
|
escape_map.push_back(P("(<?!\\\\\\\\textbackslash\\\\\\{)\\}", "\\\\\\}"));
|
|
|
|
|
escape_map.push_back(P("\\[", "\\{\\[\\}"));
|
|
|
|
|
escape_map.push_back(P("\\]", "\\{\\]\\}"));
|
2017-04-16 22:38:31 +00:00
|
|
|
|
escape_map.push_back(P("\\^", "(?:\\^|\\\\textasciicircum\\{\\}|\\\\textasciicircum|\\\\mathcircumflex)"));
|
2012-10-23 21:01:18 +00:00
|
|
|
|
escape_map.push_back(P("%", "\\\\\\%"));
|
2018-10-29 06:37:32 +00:00
|
|
|
|
escape_map.push_back(P("#", "\\\\#"));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
return escape_map;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/** @todo Probably the maps need to be migrated to regexps, in order to distinguish if
|
|
|
|
|
** the found occurrence were escaped.
|
|
|
|
|
**/
|
|
|
|
|
string apply_escapes(string s, Escapes const & escape_map)
|
|
|
|
|
{
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Escaping: '" << s << "'");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
Escapes::const_iterator it;
|
|
|
|
|
for (it = escape_map.begin(); it != escape_map.end(); ++it) {
|
2009-08-17 14:39:00 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "Escaping " << it->first << " as " << it->second);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
unsigned int pos = 0;
|
|
|
|
|
while (pos < s.length() && (pos = s.find(it->first, pos)) < s.length()) {
|
|
|
|
|
s.replace(pos, it->first.length(), it->second);
|
2011-06-19 16:41:23 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After escape: " << s);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
pos += it->second.length();
|
2009-08-17 14:39:00 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "pos: " << pos);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Escaped : '" << s << "'");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return s;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-06-12 00:12:26 +00:00
|
|
|
|
/// Within \regexp{} apply get_lyx_unescapes() only (i.e., preserve regexp semantics of the string),
|
|
|
|
|
/// while outside apply get_lyx_unescapes()+get_regexp_escapes().
|
2011-06-19 16:41:23 +00:00
|
|
|
|
/// If match_latex is true, then apply regexp_latex_escapes() to \regexp{} contents as well.
|
|
|
|
|
string escape_for_regex(string s, bool match_latex)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
|
|
|
|
size_t pos = 0;
|
|
|
|
|
while (pos < s.size()) {
|
2011-08-25 19:16:14 +00:00
|
|
|
|
size_t new_pos = s.find("\\regexp{", pos);
|
2010-02-07 21:44:31 +00:00
|
|
|
|
if (new_pos == string::npos)
|
|
|
|
|
new_pos = s.size();
|
2019-02-23 12:11:34 +00:00
|
|
|
|
string t;
|
|
|
|
|
if (new_pos > pos) {
|
|
|
|
|
LYXERR(Debug::FIND, "new_pos: " << new_pos);
|
|
|
|
|
t = apply_escapes(s.substr(pos, new_pos - pos), get_lyx_unescapes());
|
|
|
|
|
LYXERR(Debug::FIND, "t [lyx]: " << t);
|
|
|
|
|
t = apply_escapes(t, get_regexp_escapes());
|
|
|
|
|
LYXERR(Debug::FIND, "t [rxp]: " << t);
|
|
|
|
|
s.replace(pos, new_pos - pos, t);
|
|
|
|
|
new_pos = pos + t.size();
|
|
|
|
|
LYXERR(Debug::FIND, "Regexp after escaping: " << s);
|
|
|
|
|
LYXERR(Debug::FIND, "new_pos: " << new_pos);
|
|
|
|
|
if (new_pos == s.size())
|
|
|
|
|
break;
|
|
|
|
|
}
|
2011-09-27 23:09:31 +00:00
|
|
|
|
// Might fail if \\endregexp{} is preceeded by unexpected stuff (weird escapes)
|
|
|
|
|
size_t end_pos = s.find("\\endregexp{}}", new_pos + 8);
|
2010-02-07 21:44:31 +00:00
|
|
|
|
LYXERR(Debug::FIND, "end_pos: " << end_pos);
|
2011-08-25 19:16:14 +00:00
|
|
|
|
t = s.substr(new_pos + 8, end_pos - (new_pos + 8));
|
|
|
|
|
LYXERR(Debug::FIND, "t in regexp : " << t);
|
|
|
|
|
t = apply_escapes(t, get_lyx_unescapes());
|
|
|
|
|
LYXERR(Debug::FIND, "t in regexp [lyx]: " << t);
|
|
|
|
|
if (match_latex) {
|
2011-06-19 16:41:23 +00:00
|
|
|
|
t = apply_escapes(t, get_regexp_latex_escapes());
|
2011-08-25 19:16:14 +00:00
|
|
|
|
LYXERR(Debug::FIND, "t in regexp [ltx]: " << t);
|
|
|
|
|
}
|
2010-02-07 21:44:31 +00:00
|
|
|
|
if (end_pos == s.size()) {
|
|
|
|
|
s.replace(new_pos, end_pos - new_pos, t);
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Regexp after \\regexp{} removal: " << s);
|
2010-02-07 21:44:31 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2011-08-25 19:16:14 +00:00
|
|
|
|
s.replace(new_pos, end_pos + 13 - new_pos, t);
|
|
|
|
|
LYXERR(Debug::FIND, "Regexp after \\regexp{...\\endregexp{}} removal: " << s);
|
2010-02-07 21:44:31 +00:00
|
|
|
|
pos = new_pos + t.size();
|
|
|
|
|
LYXERR(Debug::FIND, "pos: " << pos);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
return s;
|
|
|
|
|
}
|
|
|
|
|
|
2012-10-27 13:45:27 +00:00
|
|
|
|
|
2010-06-29 17:09:40 +00:00
|
|
|
|
/// Wrapper for lyx::regex_replace with simpler interface
|
2008-11-15 23:30:27 +00:00
|
|
|
|
bool regex_replace(string const & s, string & t, string const & searchstr,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
string const & replacestr)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2017-04-16 21:10:17 +00:00
|
|
|
|
lyx::regex e(searchstr, regex_constants::ECMAScript);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
ostringstream oss;
|
|
|
|
|
ostream_iterator<char, char> it(oss);
|
2010-06-29 17:09:40 +00:00
|
|
|
|
lyx::regex_replace(it, s.begin(), s.end(), e, replacestr);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// tolerate t and s be references to the same variable
|
|
|
|
|
bool rv = (s != oss.str());
|
|
|
|
|
t = oss.str();
|
|
|
|
|
return rv;
|
|
|
|
|
}
|
|
|
|
|
|
2012-10-27 13:45:27 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/** Checks if supplied string segment is well-formed from the standpoint of matching open-closed braces.
|
|
|
|
|
**
|
|
|
|
|
** Verify that closed braces exactly match open braces. This avoids that, for example,
|
|
|
|
|
** \frac{.*}{x} matches \frac{x+\frac{y}{x}}{z} with .* being 'x+\frac{y'.
|
|
|
|
|
**
|
|
|
|
|
** @param unmatched
|
|
|
|
|
** Number of open braces that must remain open at the end for the verification to succeed.
|
|
|
|
|
**/
|
|
|
|
|
bool braces_match(string::const_iterator const & beg,
|
2010-02-07 21:44:31 +00:00
|
|
|
|
string::const_iterator const & end,
|
|
|
|
|
int unmatched = 0)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
|
|
|
|
int open_pars = 0;
|
|
|
|
|
string::const_iterator it = beg;
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Checking " << unmatched << " unmatched braces in '" << string(beg, end) << "'");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
for (; it != end; ++it) {
|
|
|
|
|
// Skip escaped braces in the count
|
|
|
|
|
if (*it == '\\') {
|
|
|
|
|
++it;
|
|
|
|
|
if (it == end)
|
|
|
|
|
break;
|
|
|
|
|
} else if (*it == '{') {
|
|
|
|
|
++open_pars;
|
|
|
|
|
} else if (*it == '}') {
|
|
|
|
|
if (open_pars == 0) {
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Found unmatched closed brace");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return false;
|
|
|
|
|
} else
|
|
|
|
|
--open_pars;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (open_pars != unmatched) {
|
2015-05-17 15:27:12 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Found " << open_pars
|
|
|
|
|
<< " instead of " << unmatched
|
2011-06-19 16:41:23 +00:00
|
|
|
|
<< " unmatched open braces at the end of count");
|
|
|
|
|
return false;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Braces match as expected");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2012-10-27 13:45:27 +00:00
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
class MatchResult {
|
|
|
|
|
public:
|
|
|
|
|
int match_len;
|
|
|
|
|
int match2end;
|
|
|
|
|
int pos;
|
|
|
|
|
MatchResult(): match_len(0),match2end(0), pos(0) {};
|
|
|
|
|
};
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/** The class performing a match between a position in the document and the FindAdvOptions.
|
|
|
|
|
**/
|
|
|
|
|
class MatchStringAdv {
|
|
|
|
|
public:
|
2009-12-30 18:40:18 +00:00
|
|
|
|
MatchStringAdv(lyx::Buffer & buf, FindAndReplaceOptions const & opt);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
/** Tests if text starting at the supplied position matches with the one provided to the MatchStringAdv
|
|
|
|
|
** constructor as opt.search, under the opt.* options settings.
|
|
|
|
|
**
|
2008-12-20 16:00:47 +00:00
|
|
|
|
** @param at_begin
|
|
|
|
|
** If set, then match is searched only against beginning of text starting at cur.
|
|
|
|
|
** If unset, then match is searched anywhere in text starting at cur.
|
2009-05-07 12:40:35 +00:00
|
|
|
|
**
|
2008-11-15 23:30:27 +00:00
|
|
|
|
** @return
|
|
|
|
|
** The length of the matching text, or zero if no match was found.
|
|
|
|
|
**/
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult operator()(DocIterator const & cur, int len = -1, bool at_begin = true) const;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
/// buffer
|
2009-12-30 18:40:18 +00:00
|
|
|
|
lyx::Buffer * p_buf;
|
|
|
|
|
/// first buffer on which search was started
|
|
|
|
|
lyx::Buffer * const p_first_buf;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/// options
|
2009-01-14 15:34:56 +00:00
|
|
|
|
FindAndReplaceOptions const & opt;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
private:
|
2010-01-09 12:39:29 +00:00
|
|
|
|
/// Auxiliary find method (does not account for opt.matchword)
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult findAux(DocIterator const & cur, int len = -1, bool at_begin = true) const;
|
2010-01-09 12:39:29 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/** Normalize a stringified or latexified LyX paragraph.
|
|
|
|
|
**
|
|
|
|
|
** Normalize means:
|
|
|
|
|
** <ul>
|
|
|
|
|
** <li>if search is not casesensitive, then lowercase the string;
|
|
|
|
|
** <li>remove any newline at begin or end of the string;
|
|
|
|
|
** <li>replace any newline in the middle of the string with a simple space;
|
|
|
|
|
** <li>remove stale empty styles and environments, like \emph{} and \textbf{}.
|
|
|
|
|
** </ul>
|
|
|
|
|
**
|
|
|
|
|
** @todo Normalization should also expand macros, if the corresponding
|
|
|
|
|
** search option was checked.
|
|
|
|
|
**/
|
2011-08-25 19:16:14 +00:00
|
|
|
|
string normalize(docstring const & s, bool hack_braces) const;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// normalized string to search
|
|
|
|
|
string par_as_string;
|
|
|
|
|
// regular expression to use for searching
|
2010-06-29 17:09:40 +00:00
|
|
|
|
lyx::regex regexp;
|
2019-02-10 17:00:55 +00:00
|
|
|
|
// same as regexp, but prefixed with a ".*?"
|
2010-06-29 17:09:40 +00:00
|
|
|
|
lyx::regex regexp2;
|
2011-06-19 16:41:23 +00:00
|
|
|
|
// leading format material as string
|
|
|
|
|
string lead_as_string;
|
|
|
|
|
// par_as_string after removal of lead_as_string
|
|
|
|
|
string par_as_string_nolead;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// unmatched open braces in the search string/regexp
|
|
|
|
|
int open_braces;
|
|
|
|
|
// number of (.*?) subexpressions added at end of search regexp for closing
|
|
|
|
|
// environments, math mode, styles, etc...
|
|
|
|
|
int close_wildcards;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
// Are we searching with regular expressions ?
|
|
|
|
|
bool use_regexp;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
2015-05-17 15:27:12 +00:00
|
|
|
|
static docstring buffer_to_latex(Buffer & buffer)
|
2011-02-07 20:36:40 +00:00
|
|
|
|
{
|
2019-03-03 13:08:27 +00:00
|
|
|
|
//OutputParams runparams(&buffer.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2011-02-07 20:36:40 +00:00
|
|
|
|
odocstringstream ods;
|
2016-09-04 02:02:47 +00:00
|
|
|
|
otexstream os(ods);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
runparams.nice = true;
|
2019-03-02 14:42:38 +00:00
|
|
|
|
runparams.flavor = OutputParams::XETEX;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
runparams.linelen = 10000; //lyxrc.plaintext_linelen;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
|
runparams.dryrun = true;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
runparams.for_search = true;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
pit_type const endpit = buffer.paragraphs().size();
|
|
|
|
|
for (pit_type pit = 0; pit != endpit; ++pit) {
|
2011-02-10 20:02:48 +00:00
|
|
|
|
TeXOnePar(buffer, buffer.text(), pit, os, runparams);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "searchString up to here: " << ods.str());
|
|
|
|
|
}
|
|
|
|
|
return ods.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static docstring stringifySearchBuffer(Buffer & buffer, FindAndReplaceOptions const & opt)
|
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
docstring str;
|
|
|
|
|
if (!opt.ignoreformat) {
|
|
|
|
|
str = buffer_to_latex(buffer);
|
|
|
|
|
} else {
|
2019-03-03 13:08:27 +00:00
|
|
|
|
// OutputParams runparams(&buffer.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2011-02-07 20:36:40 +00:00
|
|
|
|
runparams.nice = true;
|
2019-03-02 14:42:38 +00:00
|
|
|
|
runparams.flavor = OutputParams::XETEX;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
runparams.linelen = 10000; //lyxrc.plaintext_linelen;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
runparams.dryrun = true;
|
2014-03-27 23:12:56 +00:00
|
|
|
|
runparams.for_search = true;
|
2011-05-26 17:08:48 +00:00
|
|
|
|
for (pos_type pit = pos_type(0); pit < (pos_type)buffer.paragraphs().size(); ++pit) {
|
|
|
|
|
Paragraph const & par = buffer.paragraphs().at(pit);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Adding to search string: '"
|
2014-03-27 23:12:56 +00:00
|
|
|
|
<< par.asString(pos_type(0), par.size(),
|
2014-03-28 22:56:20 +00:00
|
|
|
|
AS_STR_INSETS | AS_STR_SKIPDELETE | AS_STR_PLAINTEXT,
|
2014-03-27 23:12:56 +00:00
|
|
|
|
&runparams)
|
2012-10-23 20:58:10 +00:00
|
|
|
|
<< "'");
|
2014-03-27 23:12:56 +00:00
|
|
|
|
str += par.asString(pos_type(0), par.size(),
|
2014-03-28 22:56:20 +00:00
|
|
|
|
AS_STR_INSETS | AS_STR_SKIPDELETE | AS_STR_PLAINTEXT,
|
2014-03-27 23:12:56 +00:00
|
|
|
|
&runparams);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return str;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/// Return separation pos between the leading material and the rest
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static size_t identifyLeading(string const & s)
|
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
string t = s;
|
|
|
|
|
// @TODO Support \item[text]
|
2018-10-02 09:53:01 +00:00
|
|
|
|
// Kornel: Added textsl, textsf, textit, texttt and noun
|
2018-10-23 19:12:22 +00:00
|
|
|
|
// + allow to search for colored text too
|
2019-02-20 13:14:50 +00:00
|
|
|
|
while (regex_replace(t, t, REGEX_BOS "\\\\(((footnotesize|tiny|scriptsize|small|large|Large|LARGE|huge|Huge|emph|noun|minisec|text(bf|md|sl|sf|it|tt))|((textcolor|foreignlanguage|latexenvironment)\\{[a-z]+\\*?\\})|(u|uu)line|(s|x)out|uwave)|((sub)?(((sub)?section)|paragraph)|part|chapter)\\*?)\\{", "")
|
2017-04-16 21:10:17 +00:00
|
|
|
|
|| regex_replace(t, t, REGEX_BOS "\\$", "")
|
2020-05-29 12:22:34 +00:00
|
|
|
|
|| regex_replace(t, t, REGEX_BOS "\\\\\\[", "")
|
2018-11-03 10:15:12 +00:00
|
|
|
|
|| regex_replace(t, t, REGEX_BOS " ?\\\\item\\{[a-z]+\\}", "")
|
2020-05-29 18:04:57 +00:00
|
|
|
|
|| regex_replace(t, t, REGEX_BOS "\\\\begin\\{[a-zA-Z_]*\\*?\\}", ""))
|
2018-10-05 18:26:44 +00:00
|
|
|
|
;
|
|
|
|
|
LYXERR(Debug::FIND, " after removing leading $, \\[ , \\emph{, \\textbf{, etc.: '" << t << "'");
|
2011-02-07 20:36:40 +00:00
|
|
|
|
return s.find(t);
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-06 21:50:50 +00:00
|
|
|
|
/*
|
|
|
|
|
* Given a latexified string, retrieve some handled features
|
|
|
|
|
* The features of the regex will later be compared with the features
|
|
|
|
|
* of the searched text. If the regex features are not a
|
|
|
|
|
* subset of the analized, then, in not format ignoring search
|
|
|
|
|
* we can early stop the search in the relevant inset.
|
|
|
|
|
*/
|
2018-10-05 18:26:44 +00:00
|
|
|
|
typedef map<string, bool> Features;
|
|
|
|
|
|
|
|
|
|
static Features identifyFeatures(string const & s)
|
|
|
|
|
{
|
2019-02-20 13:14:50 +00:00
|
|
|
|
static regex const feature("\\\\(([a-zA-Z]+(\\{([a-z]+\\*?)\\}|\\*)?))\\{");
|
2019-02-19 22:11:09 +00:00
|
|
|
|
static regex const valid("^("
|
|
|
|
|
"("
|
|
|
|
|
"(footnotesize|tiny|scriptsize|small|large|Large|LARGE|huge|Huge|"
|
|
|
|
|
"emph|noun|text(bf|md|sl|sf|it|tt)|"
|
2019-02-20 13:14:50 +00:00
|
|
|
|
"(textcolor|foreignlanguage|item|listitem|latexenvironment)\\{[a-z]+\\*?\\})|"
|
2019-02-19 22:11:09 +00:00
|
|
|
|
"(u|uu)line|(s|x)out|uwave|"
|
|
|
|
|
"(sub|extra)?title|author|subject|publishers|dedication|(upper|lower)titleback|lyx(right)?address)|"
|
|
|
|
|
"((sub)?(((sub)?section)|paragraph)|part|chapter|lyxslide)\\*?)$");
|
2018-10-05 18:26:44 +00:00
|
|
|
|
smatch sub;
|
|
|
|
|
bool displ = true;
|
|
|
|
|
Features info;
|
|
|
|
|
|
|
|
|
|
for (sregex_iterator it(s.begin(), s.end(), feature), end; it != end; ++it) {
|
|
|
|
|
sub = *it;
|
|
|
|
|
if (displ) {
|
|
|
|
|
if (sub.str(1).compare("regexp") == 0) {
|
|
|
|
|
displ = false;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
string token = sub.str(1);
|
|
|
|
|
smatch sub2;
|
|
|
|
|
if (regex_match(token, sub2, valid)) {
|
|
|
|
|
info[token] = true;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// ignore
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (sub.str(1).compare("endregexp") == 0) {
|
|
|
|
|
displ = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return info;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
/*
|
|
|
|
|
* defines values features of a key "\\[a-z]+{"
|
|
|
|
|
*/
|
|
|
|
|
class KeyInfo {
|
2018-10-14 18:39:13 +00:00
|
|
|
|
public:
|
2018-10-18 15:37:15 +00:00
|
|
|
|
enum KeyType {
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* Char type with content discarded
|
|
|
|
|
* like \hspace{1cm} */
|
|
|
|
|
noContent,
|
|
|
|
|
/* Char, like \backslash */
|
2018-10-18 15:37:15 +00:00
|
|
|
|
isChar,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* \part, \section*, ... */
|
2018-10-20 10:47:37 +00:00
|
|
|
|
isSectioning,
|
2019-02-19 22:11:09 +00:00
|
|
|
|
/* title, author etc */
|
|
|
|
|
isTitle,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* \foreignlanguage{ngerman}, ... */
|
|
|
|
|
isMain,
|
2019-02-19 22:11:09 +00:00
|
|
|
|
/* inside \code{}
|
2018-12-14 15:02:33 +00:00
|
|
|
|
* to discard language in content */
|
|
|
|
|
noMain,
|
2018-10-18 15:37:15 +00:00
|
|
|
|
isRegex,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* \begin{eqnarray}...\end{eqnarray}, ... $...$ */
|
2018-10-22 18:19:36 +00:00
|
|
|
|
isMath,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* fonts, colors, markups, ... */
|
2018-10-18 15:37:15 +00:00
|
|
|
|
isStandard,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* footnotesize, ... large, ...
|
|
|
|
|
* Ignore all of them */
|
2018-10-22 18:19:36 +00:00
|
|
|
|
isSize,
|
2018-10-18 15:37:15 +00:00
|
|
|
|
invalid,
|
2019-02-21 13:45:41 +00:00
|
|
|
|
/* inputencoding, ...
|
2018-12-14 15:02:33 +00:00
|
|
|
|
* Discard also content, because they do not help in search */
|
2018-10-18 15:37:15 +00:00
|
|
|
|
doRemove,
|
2019-02-07 12:35:47 +00:00
|
|
|
|
/* twocolumns, ...
|
|
|
|
|
* like remove, but also all arguments */
|
|
|
|
|
removeWithArg,
|
2019-02-17 23:40:55 +00:00
|
|
|
|
/* item, listitem */
|
2018-11-03 10:15:12 +00:00
|
|
|
|
isList,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* tex, latex, ... like isChar */
|
2019-02-05 07:04:47 +00:00
|
|
|
|
isIgnored,
|
|
|
|
|
/* like \lettrine[lines=5]{}{} */
|
|
|
|
|
cleanToStart,
|
2019-02-07 12:35:47 +00:00
|
|
|
|
/* End of arguments marker for lettrine,
|
|
|
|
|
* so that they can be ignored */
|
2019-02-05 07:04:47 +00:00
|
|
|
|
endArguments
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
2020-11-01 11:34:49 +00:00
|
|
|
|
KeyInfo() = default;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo(KeyType type, int parcount, bool disable)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
: keytype(type),
|
2018-10-22 18:19:36 +00:00
|
|
|
|
parenthesiscount(parcount),
|
2020-11-01 11:34:49 +00:00
|
|
|
|
disabled(disable) {}
|
|
|
|
|
KeyType keytype = invalid;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
string head;
|
2020-11-01 11:34:49 +00:00
|
|
|
|
int _tokensize = -1;
|
|
|
|
|
int _tokenstart = -1;
|
|
|
|
|
int _dataStart = -1;
|
|
|
|
|
int _dataEnd = -1;
|
|
|
|
|
int parenthesiscount = 1;
|
|
|
|
|
bool disabled = false;
|
|
|
|
|
bool used = false; /* by pattern */
|
2018-10-14 18:39:13 +00:00
|
|
|
|
};
|
2018-10-12 14:47:07 +00:00
|
|
|
|
|
2018-10-19 17:11:20 +00:00
|
|
|
|
class Border {
|
|
|
|
|
public:
|
|
|
|
|
Border(int l=0, int u=0) : low(l), upper(u) {};
|
|
|
|
|
int low;
|
|
|
|
|
int upper;
|
|
|
|
|
};
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
#define MAXOPENED 30
|
|
|
|
|
class Intervall {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
bool isPatternString_;
|
2019-03-20 16:25:25 +00:00
|
|
|
|
public:
|
|
|
|
|
explicit Intervall(bool isPattern, string const & p) :
|
2019-12-29 16:40:13 +00:00
|
|
|
|
isPatternString_(isPattern), par(p), ignoreidx(-1), actualdeptindex(0),
|
|
|
|
|
hasTitle(false), langcount(0)
|
2019-03-20 16:25:25 +00:00
|
|
|
|
{
|
|
|
|
|
depts[0] = 0;
|
|
|
|
|
closes[0] = 0;
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
string par;
|
|
|
|
|
int ignoreidx;
|
2019-03-20 22:20:13 +00:00
|
|
|
|
static vector<Border> borders;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int depts[MAXOPENED];
|
|
|
|
|
int closes[MAXOPENED];
|
|
|
|
|
int actualdeptindex;
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int previousNotIgnored(int) const;
|
|
|
|
|
int nextNotIgnored(int) const;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void handleOpenP(int i);
|
|
|
|
|
void handleCloseP(int i, bool closingAllowed);
|
|
|
|
|
void resetOpenedP(int openPos);
|
|
|
|
|
void addIntervall(int upper);
|
|
|
|
|
void addIntervall(int low, int upper); /* if explicit */
|
2019-03-02 14:42:38 +00:00
|
|
|
|
void removeAccents();
|
2020-11-25 23:11:07 +00:00
|
|
|
|
void setForDefaultLang(KeyInfo const & defLang) const;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
int findclosing(int start, int end, char up, char down, int repeat);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void handleParentheses(int lastpos, bool closingAllowed);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
bool hasTitle;
|
2019-12-29 16:40:13 +00:00
|
|
|
|
int langcount; // Number of disabled language specs up to current position in actual interval
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int isOpeningPar(int pos) const;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
string titleValue;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void output(ostringstream &os, int lastpos);
|
|
|
|
|
// string show(int lastpos);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-03-20 22:20:13 +00:00
|
|
|
|
vector<Border> Intervall::borders = vector<Border>(30);
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int Intervall::isOpeningPar(int pos) const
|
2019-02-23 12:11:34 +00:00
|
|
|
|
{
|
|
|
|
|
if ((pos < 0) || (size_t(pos) >= par.size()))
|
|
|
|
|
return 0;
|
|
|
|
|
if (par[pos] != '{')
|
|
|
|
|
return 0;
|
|
|
|
|
if (size_t(pos) + 2 >= par.size())
|
|
|
|
|
return 1;
|
|
|
|
|
if (par[pos+2] != '}')
|
|
|
|
|
return 1;
|
|
|
|
|
if (par[pos+1] == '[' || par[pos+1] == ']')
|
|
|
|
|
return 3;
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
void Intervall::setForDefaultLang(KeyInfo const & defLang) const
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Enable the use of first token again
|
|
|
|
|
if (ignoreidx >= 0) {
|
2019-02-21 13:45:41 +00:00
|
|
|
|
int value = defLang._tokenstart + defLang._tokensize;
|
|
|
|
|
if (value > 0) {
|
|
|
|
|
if (borders[0].low < value)
|
|
|
|
|
borders[0].low = value;
|
|
|
|
|
if (borders[0].upper < value)
|
|
|
|
|
borders[0].upper = value;
|
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
static void checkDepthIndex(int val)
|
2018-10-13 19:02:53 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
static int maxdepthidx = MAXOPENED-2;
|
2019-03-18 17:19:44 +00:00
|
|
|
|
static int lastmaxdepth = 0;
|
|
|
|
|
if (val > lastmaxdepth) {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Depth reached " << val);
|
2019-03-18 17:19:44 +00:00
|
|
|
|
lastmaxdepth = val;
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (val > maxdepthidx) {
|
|
|
|
|
maxdepthidx = val;
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "maxdepthidx now " << val);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-18 17:19:44 +00:00
|
|
|
|
#if 0
|
|
|
|
|
// Not needed, because borders are now dynamically expanded
|
2018-10-18 15:37:15 +00:00
|
|
|
|
static void checkIgnoreIdx(int val)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2019-03-18 17:19:44 +00:00
|
|
|
|
static int lastmaxignore = -1;
|
|
|
|
|
if ((lastmaxignore < val) && (size_t(val+1) >= borders.size())) {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "IgnoreIdx reached " << val);
|
2019-03-18 17:19:44 +00:00
|
|
|
|
lastmaxignore = val;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-18 17:19:44 +00:00
|
|
|
|
#endif
|
2018-10-12 14:47:07 +00:00
|
|
|
|
|
2018-10-13 19:02:53 +00:00
|
|
|
|
/*
|
|
|
|
|
* Expand the region of ignored parts of the input latex string
|
|
|
|
|
* The region is only relevant in output()
|
|
|
|
|
*/
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::addIntervall(int low, int upper)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
|
|
|
|
int idx;
|
|
|
|
|
if (low == upper) return;
|
|
|
|
|
for (idx = ignoreidx+1; idx > 0; --idx) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (low > borders[idx-1].upper) {
|
2018-10-12 14:47:07 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
Border br(low, upper);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
if (idx > ignoreidx) {
|
2019-03-18 17:19:44 +00:00
|
|
|
|
if (borders.size() <= size_t(idx)) {
|
|
|
|
|
borders.push_back(br);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
borders[idx] = br;
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
ignoreidx = idx;
|
2019-03-18 17:19:44 +00:00
|
|
|
|
// checkIgnoreIdx(ignoreidx);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Expand only if one of the new bound is inside the interwall
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// We know here that br.low > borders[idx-1].upper
|
|
|
|
|
if (br.upper < borders[idx].low) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// We have to insert at this pos
|
2019-03-18 17:19:44 +00:00
|
|
|
|
if (size_t(ignoreidx+1) >= borders.size()) {
|
|
|
|
|
borders.push_back(borders[ignoreidx]);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
borders[ignoreidx+1] = borders[ignoreidx];
|
|
|
|
|
}
|
|
|
|
|
for (int i = ignoreidx; i > idx; --i) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[i] = borders[i-1];
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[idx] = br;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
ignoreidx += 1;
|
2019-03-18 17:19:44 +00:00
|
|
|
|
// checkIgnoreIdx(ignoreidx);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// Here we know, that we are overlapping
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (br.low > borders[idx].low)
|
|
|
|
|
br.low = borders[idx].low;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// check what has to be concatenated
|
|
|
|
|
int count = 0;
|
|
|
|
|
for (int i = idx; i <= ignoreidx; i++) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (br.upper >= borders[i].low) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
count++;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (br.upper < borders[i].upper)
|
|
|
|
|
br.upper = borders[i].upper;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// count should be >= 1 here
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[idx] = br;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (count > 1) {
|
|
|
|
|
for (int i = idx + count; i <= ignoreidx; i++) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[i-count+1] = borders[i];
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
ignoreidx -= count - 1;
|
|
|
|
|
return;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-02 14:42:38 +00:00
|
|
|
|
typedef map<string, string> AccentsMap;
|
|
|
|
|
static AccentsMap accents = map<string, string>();
|
|
|
|
|
|
2019-03-02 21:00:20 +00:00
|
|
|
|
static void buildaccent(string n, string param, string values)
|
2019-03-02 14:42:38 +00:00
|
|
|
|
{
|
2019-03-02 21:00:20 +00:00
|
|
|
|
stringstream s(n);
|
|
|
|
|
string name;
|
|
|
|
|
const char delim = '|';
|
|
|
|
|
while (getline(s, name, delim)) {
|
|
|
|
|
size_t start = 0;
|
2020-10-09 06:04:20 +00:00
|
|
|
|
for (char c : param) {
|
|
|
|
|
string key = name + "{" + c + "}";
|
2019-03-02 21:00:20 +00:00
|
|
|
|
// get the corresponding utf8-value
|
|
|
|
|
if ((values[start] & 0xc0) != 0xc0) {
|
2019-03-04 13:05:44 +00:00
|
|
|
|
// should not happen, utf8 encoding starts at least with 11xxxxxx
|
2019-03-18 17:19:44 +00:00
|
|
|
|
// but value for '\dot{i}' is 'i', which is ascii
|
|
|
|
|
if ((values[start] & 0x80) == 0) {
|
|
|
|
|
// is ascii
|
|
|
|
|
accents[key] = values.substr(start, 1);
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "" << key << "=" << accents[key]);
|
2019-03-18 17:19:44 +00:00
|
|
|
|
}
|
|
|
|
|
start++;
|
|
|
|
|
continue;
|
2019-03-02 21:00:20 +00:00
|
|
|
|
}
|
|
|
|
|
for (int j = 1; ;j++) {
|
2019-03-04 13:05:44 +00:00
|
|
|
|
if (start + j >= values.size()) {
|
|
|
|
|
accents[key] = values.substr(start, j);
|
|
|
|
|
start = values.size() - 1;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2019-03-09 23:29:56 +00:00
|
|
|
|
else if ((values[start+j] & 0xc0) != 0x80) {
|
2019-03-04 13:05:44 +00:00
|
|
|
|
// This is the first byte of following utf8 char
|
|
|
|
|
accents[key] = values.substr(start, j);
|
|
|
|
|
start += j;
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "" << key << "=" << accents[key]);
|
2019-03-04 13:05:44 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void buildAccentsMap()
|
|
|
|
|
{
|
|
|
|
|
accents["imath"] = "ı";
|
2019-03-04 13:05:44 +00:00
|
|
|
|
accents["i"] = "ı";
|
2019-03-02 21:00:20 +00:00
|
|
|
|
accents["jmath"] = "ȷ";
|
2020-11-23 20:55:13 +00:00
|
|
|
|
accents["cdot"] = "·";
|
2019-03-02 14:42:38 +00:00
|
|
|
|
accents["lyxmathsym{ß}"] = "ß";
|
2019-03-09 23:29:56 +00:00
|
|
|
|
accents["text{ß}"] = "ß";
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["ddot{\\imath}"] = "ï";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("ddot", "aAeEhHiIioOtuUwWxXyY",
|
|
|
|
|
"äÄëËḧḦïÏïöÖẗüÜẅẄẍẌÿŸ"); // umlaut
|
|
|
|
|
buildaccent("dot|.", "aAbBcCdDeEfFGghHIimMnNoOpPrRsStTwWxXyYzZ",
|
|
|
|
|
"ȧȦḃḂċĊḋḊėĖḟḞĠġḣḢİİṁṀṅṄȯȮṗṖṙṘṡṠṫṪẇẆẋẊẏẎżŻ"); // dot{i} can only happen if ignoring case, but there is no lowercase of 'İ'
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["acute{\\imath}"] = "í";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("acute", "aAcCeEgGkKlLmMoOnNpPrRsSuUwWyYzZiI",
|
|
|
|
|
"áÁćĆéÉǵǴḱḰĺĹḿḾóÓńŃṕṔŕŔśŚúÚẃẂýÝźŹíÍ");
|
2019-03-03 13:08:27 +00:00
|
|
|
|
buildaccent("dacute|H|h", "oOuU", "őŐűŰ"); // double acute
|
2019-03-08 21:44:00 +00:00
|
|
|
|
buildaccent("mathring|r", "aAuUwy",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"åÅůŮẘẙ"); // ring
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["check{\\imath}"] = "ǐ";
|
|
|
|
|
accents["check{\\jmath}"] = "ǰ";
|
2019-03-09 23:29:56 +00:00
|
|
|
|
buildaccent("check|v", "cCdDaAeEiIoOuUgGkKhHlLnNrRsSTtzZ",
|
|
|
|
|
"čČďĎǎǍěĚǐǏǒǑǔǓǧǦǩǨȟȞľĽňŇřŘšŠŤťžŽ"); // caron
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["hat{\\imath}"] = "î";
|
|
|
|
|
accents["hat{\\jmath}"] = "ĵ";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("hat|^", "aAcCeEgGhHiIjJoOsSuUwWyYzZ",
|
|
|
|
|
"âÂĉĈêÊĝĜĥĤîÎĵĴôÔŝŜûÛŵŴŷŶẑẐ"); // circ
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["bar{\\imath}"] = "ī";
|
2019-03-04 13:05:44 +00:00
|
|
|
|
buildaccent("bar|=", "aAeEiIoOuUyY",
|
|
|
|
|
"āĀēĒīĪōŌūŪȳȲ"); // macron
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["tilde{\\imath}"] = "ĩ";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("tilde", "aAeEiInNoOuUvVyY",
|
|
|
|
|
"ãÃẽẼĩĨñÑõÕũŨṽṼỹỸ"); // tilde
|
2019-03-04 13:05:44 +00:00
|
|
|
|
accents["breve{\\imath}"] = "ĭ";
|
|
|
|
|
buildaccent("breve|u", "aAeEgGiIoOuU",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ăĂĕĔğĞĭĬŏŎŭŬ"); // breve
|
2019-03-04 13:05:44 +00:00
|
|
|
|
accents["grave{\\imath}"] = "ì";
|
|
|
|
|
buildaccent("grave|`", "aAeEiIoOuUnNwWyY",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"àÀèÈìÌòÒùÙǹǸẁẀỳỲ"); // grave
|
2019-03-08 21:44:00 +00:00
|
|
|
|
buildaccent("subdot|d", "BbDdHhKkLlMmNnRrSsTtVvWwZzAaEeIiOoUuYy",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ḄḅḌḍḤḥḲḳḶḷṂṃṆṇṚṛṢṣṬṭṾṿẈẉẒẓẠạẸẹỊịỌọỤụỴỵ"); // dot below
|
2019-03-17 12:06:56 +00:00
|
|
|
|
buildaccent("ogonek|k", "AaEeIiUuOo",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ĄąĘęĮįŲųǪǫ"); // ogonek
|
|
|
|
|
buildaccent("cedilla|c", "CcGgKkLlNnRrSsTtEeDdHh",
|
|
|
|
|
"ÇçĢĢĶķĻļŅņŖŗŞşŢţȨȩḐḑḨḩ"); // cedilla
|
2019-03-18 11:59:40 +00:00
|
|
|
|
buildaccent("subring|textsubring", "Aa",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"Ḁḁ"); // subring
|
2019-03-18 11:59:40 +00:00
|
|
|
|
buildaccent("subhat|textsubcircum", "DdEeLlNnTtUu",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ḒḓḘḙḼḽṊṋṰṱṶṷ"); // subcircum
|
2019-03-18 11:59:40 +00:00
|
|
|
|
buildaccent("subtilde|textsubtilde", "EeIiUu",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ḚḛḬḭṴṵ"); // subtilde
|
2019-03-20 22:20:13 +00:00
|
|
|
|
accents["dgrave{\\imath}"] = "ȉ";
|
|
|
|
|
accents["textdoublegrave{\\i}"] = "ȉ";
|
|
|
|
|
buildaccent("dgrave|textdoublegrave", "AaEeIiOoRrUu",
|
|
|
|
|
"ȀȁȄȅȈȉȌȍȐȑȔȕ"); // double grave
|
|
|
|
|
accents["rcap{\\imath}"] = "ȉ";
|
|
|
|
|
accents["textroundcap{\\i}"] = "ȉ";
|
|
|
|
|
buildaccent("rcap|textroundcap", "AaEeIiOoRrUu",
|
|
|
|
|
"ȂȃȆȇȊȋȎȏȒȓȖȗ"); // inverted breve
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("slashed", "oO",
|
|
|
|
|
"øØ"); // slashed
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Created accents in math or regexp environment
|
|
|
|
|
* are macros, but we need the utf8 equivalent
|
|
|
|
|
*/
|
|
|
|
|
void Intervall::removeAccents()
|
|
|
|
|
{
|
|
|
|
|
if (accents.empty())
|
|
|
|
|
buildAccentsMap();
|
2020-11-23 20:55:13 +00:00
|
|
|
|
static regex const accre("\\\\(([\\S]|grave|breve|lyxmathsym|text|ddot|dot|acute|dacute|mathring|check|hat|bar|tilde|subdot|ogonek|cedilla|subring|textsubring|subhat|textsubcircum|subtilde|textsubtilde|dgrave|textdoublegrave|rcap|textroundcap|slashed)\\{[^\\{\\}]+\\}|(i|imath|jmath|cdot)(?![a-zA-Z]))");
|
2019-03-02 14:42:38 +00:00
|
|
|
|
smatch sub;
|
|
|
|
|
for (sregex_iterator itacc(par.begin(), par.end(), accre), end; itacc != end; ++itacc) {
|
|
|
|
|
sub = *itacc;
|
|
|
|
|
string key = sub.str(1);
|
|
|
|
|
if (accents.find(key) != accents.end()) {
|
|
|
|
|
string val = accents[key];
|
2019-03-18 08:38:34 +00:00
|
|
|
|
size_t pos = sub.position(size_t(0));
|
2019-03-02 14:42:38 +00:00
|
|
|
|
for (size_t i = 0; i < val.size(); i++) {
|
|
|
|
|
par[pos+i] = val[i];
|
|
|
|
|
}
|
2020-11-23 20:55:13 +00:00
|
|
|
|
// Remove possibly following space too
|
|
|
|
|
if (par[pos+sub.str(0).size()] == ' ')
|
|
|
|
|
addIntervall(pos+val.size(), pos + sub.str(0).size()+1);
|
|
|
|
|
else
|
|
|
|
|
addIntervall(pos+val.size(), pos + sub.str(0).size());
|
2019-03-03 13:08:27 +00:00
|
|
|
|
for (size_t i = pos+val.size(); i < pos + sub.str(0).size(); i++) {
|
2019-03-08 21:44:00 +00:00
|
|
|
|
// remove traces of any remaining chars
|
2019-03-03 13:08:27 +00:00
|
|
|
|
par[i] = ' ';
|
|
|
|
|
}
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Not added accent for \"" << key << "\"");
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::handleOpenP(int i)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
actualdeptindex++;
|
|
|
|
|
depts[actualdeptindex] = i+1;
|
|
|
|
|
closes[actualdeptindex] = -1;
|
|
|
|
|
checkDepthIndex(actualdeptindex);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Intervall::handleCloseP(int i, bool closingAllowed)
|
|
|
|
|
{
|
|
|
|
|
if (actualdeptindex <= 0) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (! closingAllowed)
|
|
|
|
|
LYXERR(Debug::FIND, "Bad closing parenthesis in latex"); /* should not happen, but the latex input may be wrong */
|
|
|
|
|
// if we are at the very end
|
|
|
|
|
addIntervall(i, i+1);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
else {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
closes[actualdeptindex] = i+1;
|
|
|
|
|
actualdeptindex--;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::resetOpenedP(int openPos)
|
|
|
|
|
{
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// Used as initializer for foreignlanguage entry
|
2018-10-18 15:37:15 +00:00
|
|
|
|
actualdeptindex = 1;
|
|
|
|
|
depts[1] = openPos+1;
|
|
|
|
|
closes[1] = -1;
|
|
|
|
|
}
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int Intervall::previousNotIgnored(int start) const
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-14 18:39:13 +00:00
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
for (idx = ignoreidx; idx >= 0; --idx) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start > borders[idx].upper)
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return start;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start >= borders[idx].low)
|
|
|
|
|
start = borders[idx].low-1;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-14 18:39:13 +00:00
|
|
|
|
return start;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int Intervall::nextNotIgnored(int start) const
|
2018-10-13 19:02:53 +00:00
|
|
|
|
{
|
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
for (idx = 0; idx <= ignoreidx; idx++) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start < borders[idx].low)
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return start;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start < borders[idx].upper)
|
|
|
|
|
start = borders[idx].upper;
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
|
|
|
|
return start;
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
typedef map<string, KeyInfo> KeysMap;
|
|
|
|
|
typedef vector< KeyInfo> Entries;
|
|
|
|
|
static KeysMap keys = map<string, KeyInfo>();
|
|
|
|
|
|
|
|
|
|
class LatexInfo {
|
|
|
|
|
private:
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int entidx_;
|
|
|
|
|
Entries entries_;
|
|
|
|
|
Intervall interval_;
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void buildKeys(bool);
|
|
|
|
|
void buildEntries(bool);
|
|
|
|
|
void makeKey(const string &, KeyInfo, bool isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
void processRegion(int start, int region_end); /* remove {} parts */
|
2020-10-09 15:50:24 +00:00
|
|
|
|
void removeHead(KeyInfo const &, int count=0);
|
2018-10-27 14:57:42 +00:00
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
public:
|
2019-09-13 14:23:49 +00:00
|
|
|
|
LatexInfo(string const & par, bool isPatternString)
|
|
|
|
|
: entidx_(-1), interval_(isPatternString, par)
|
2019-03-20 16:25:25 +00:00
|
|
|
|
{
|
2018-10-27 14:57:42 +00:00
|
|
|
|
buildKeys(isPatternString);
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_ = vector<KeyInfo>();
|
2018-10-27 14:57:42 +00:00
|
|
|
|
buildEntries(isPatternString);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
|
|
|
|
int getFirstKey() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entidx_ = 0;
|
|
|
|
|
if (entries_.empty()) {
|
|
|
|
|
return -1;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_[0].keytype == KeyInfo::isTitle) {
|
|
|
|
|
if (! entries_[0].disabled) {
|
|
|
|
|
interval_.hasTitle = true;
|
|
|
|
|
interval_.titleValue = entries_[0].head;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.hasTitle = false;
|
|
|
|
|
interval_.titleValue = "";
|
2019-02-19 22:11:09 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
removeHead(entries_[0]);
|
|
|
|
|
if (entries_.size() > 1)
|
|
|
|
|
return 1;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
else
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return 0;
|
|
|
|
|
};
|
|
|
|
|
int getNextKey() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entidx_++;
|
|
|
|
|
if (int(entries_.size()) > entidx_) {
|
|
|
|
|
return entidx_;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
bool setNextKey(int idx) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((idx == entidx_) && (entidx_ >= 0)) {
|
|
|
|
|
entidx_--;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
else
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return false;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int find(int start, KeyInfo::KeyType keytype) const {
|
2019-02-07 12:35:47 +00:00
|
|
|
|
if (start < 0)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int tmpIdx = start;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (tmpIdx < int(entries_.size())) {
|
|
|
|
|
if (entries_[tmpIdx].keytype == keytype)
|
2019-02-07 12:35:47 +00:00
|
|
|
|
return tmpIdx;
|
|
|
|
|
tmpIdx++;
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
};
|
2020-10-09 15:50:24 +00:00
|
|
|
|
int process(ostringstream & os, KeyInfo const & actual);
|
|
|
|
|
int dispatch(ostringstream & os, int previousStart, KeyInfo & actual);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// string show(int lastpos) { return interval.show(lastpos);};
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int nextNotIgnored(int start) { return interval_.nextNotIgnored(start);};
|
2018-10-18 15:37:15 +00:00
|
|
|
|
KeyInfo &getKeyInfo(int keyinfo) {
|
|
|
|
|
static KeyInfo invalidInfo = KeyInfo();
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((keyinfo < 0) || ( keyinfo >= int(entries_.size())))
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return invalidInfo;
|
|
|
|
|
else
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[keyinfo];
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
2020-11-25 23:11:07 +00:00
|
|
|
|
void setForDefaultLang(KeyInfo const & defLang) {interval_.setForDefaultLang(defLang);};
|
2019-03-21 11:53:41 +00:00
|
|
|
|
void addIntervall(int low, int up) { interval_.addIntervall(low, up); };
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
2019-02-17 23:40:55 +00:00
|
|
|
|
int Intervall::findclosing(int start, int end, char up = '{', char down = '}', int repeat = 1)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int skip = 0;
|
|
|
|
|
int depth = 0;
|
|
|
|
|
for (int i = start; i < end; i += 1 + skip) {
|
|
|
|
|
char c;
|
|
|
|
|
c = par[i];
|
|
|
|
|
skip = 0;
|
|
|
|
|
if (c == '\\') skip = 1;
|
2018-11-03 10:15:12 +00:00
|
|
|
|
else if (c == up) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
depth++;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
else if (c == down) {
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if (depth == 0) {
|
2020-01-03 12:08:32 +00:00
|
|
|
|
repeat--;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if ((repeat <= 0) || (par[i+1] != up))
|
|
|
|
|
return i;
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
--depth;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return end;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-30 19:52:29 +00:00
|
|
|
|
class MathInfo {
|
|
|
|
|
class MathEntry {
|
|
|
|
|
public:
|
|
|
|
|
string wait;
|
|
|
|
|
size_t mathEnd;
|
|
|
|
|
size_t mathStart;
|
|
|
|
|
size_t mathSize;
|
|
|
|
|
};
|
2019-03-21 11:53:41 +00:00
|
|
|
|
size_t actualIdx_;
|
|
|
|
|
vector<MathEntry> entries_;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
public:
|
|
|
|
|
MathInfo() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actualIdx_ = 0;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2019-09-13 14:23:49 +00:00
|
|
|
|
void insert(string const & wait, size_t start, size_t end) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
MathEntry m = MathEntry();
|
|
|
|
|
m.wait = wait;
|
|
|
|
|
m.mathStart = start;
|
|
|
|
|
m.mathEnd = end;
|
|
|
|
|
m.mathSize = end - start;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_.push_back(m);
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2020-11-25 23:11:07 +00:00
|
|
|
|
bool empty() const { return entries_.empty(); };
|
|
|
|
|
size_t getEndPos() const {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return 0;
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[actualIdx_].mathEnd;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2020-11-25 23:11:07 +00:00
|
|
|
|
size_t getStartPos() const {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return 100000; /* definitely enough? */
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[actualIdx_].mathStart;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
|
|
|
|
size_t getFirstPos() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actualIdx_ = 0;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return getStartPos();
|
|
|
|
|
}
|
2020-11-25 23:11:07 +00:00
|
|
|
|
size_t getSize() const {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return size_t(0);
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[actualIdx_].mathSize;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
void incrEntry() { actualIdx_++; };
|
2018-10-30 19:52:29 +00:00
|
|
|
|
};
|
|
|
|
|
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void LatexInfo::buildEntries(bool isPatternString)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
{
|
2018-11-12 11:17:16 +00:00
|
|
|
|
static regex const rmath("\\$|\\\\\\[|\\\\\\]|\\\\(begin|end)\\{((eqnarray|equation|flalign|gather|multline|align|alignat)\\*?)\\}");
|
2018-10-28 18:40:14 +00:00
|
|
|
|
static regex const rkeys("\\$|\\\\\\[|\\\\\\]|\\\\((([a-zA-Z]+\\*?)(\\{([a-z]+\\*?)\\}|=[0-9]+[a-z]+)?))");
|
|
|
|
|
static bool disableLanguageOverride = false;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
smatch sub, submath;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
bool evaluatingRegexp = false;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
MathInfo mi;
|
2018-10-29 12:17:54 +00:00
|
|
|
|
bool evaluatingMath = false;
|
2018-12-11 16:27:50 +00:00
|
|
|
|
bool evaluatingCode = false;
|
|
|
|
|
size_t codeEnd = 0;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
bool evaluatingOptional = false;
|
|
|
|
|
size_t optionalEnd = 0;
|
2018-12-11 16:27:50 +00:00
|
|
|
|
int codeStart = -1;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
KeyInfo found;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
bool math_end_waiting = false;
|
|
|
|
|
size_t math_pos = 10000;
|
|
|
|
|
string math_end;
|
|
|
|
|
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.removeAccents();
|
2019-03-02 14:42:38 +00:00
|
|
|
|
|
2019-03-21 11:53:41 +00:00
|
|
|
|
for (sregex_iterator itmath(interval_.par.begin(), interval_.par.end(), rmath), end; itmath != end; ++itmath) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
submath = *itmath;
|
|
|
|
|
if (math_end_waiting) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
size_t pos = submath.position(size_t(0));
|
2018-11-09 12:36:47 +00:00
|
|
|
|
if ((math_end == "$") &&
|
|
|
|
|
(submath.str(0) == "$") &&
|
2019-03-21 11:53:41 +00:00
|
|
|
|
(interval_.par[pos-1] != '\\')) {
|
2018-11-09 12:36:47 +00:00
|
|
|
|
mi.insert("$", math_pos, pos + 1);
|
|
|
|
|
math_end_waiting = false;
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
2018-11-09 12:36:47 +00:00
|
|
|
|
else if ((math_end == "\\]") &&
|
|
|
|
|
(submath.str(0) == "\\]")) {
|
|
|
|
|
mi.insert("\\]", math_pos, pos + 2);
|
|
|
|
|
math_end_waiting = false;
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
|
|
|
|
else if ((submath.str(1).compare("end") == 0) &&
|
2018-10-22 18:19:36 +00:00
|
|
|
|
(submath.str(2).compare(math_end) == 0)) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
mi.insert(math_end, math_pos, pos + submath.str(0).length());
|
2018-10-22 18:19:36 +00:00
|
|
|
|
math_end_waiting = false;
|
|
|
|
|
}
|
2018-11-09 12:36:47 +00:00
|
|
|
|
else
|
|
|
|
|
continue;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (submath.str(1).compare("begin") == 0) {
|
|
|
|
|
math_end_waiting = true;
|
|
|
|
|
math_end = submath.str(2);
|
2018-10-27 14:57:42 +00:00
|
|
|
|
math_pos = submath.position(size_t(0));
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
2018-10-28 18:40:14 +00:00
|
|
|
|
else if (submath.str(0).compare("\\[") == 0) {
|
|
|
|
|
math_end_waiting = true;
|
|
|
|
|
math_end = "\\]";
|
|
|
|
|
math_pos = submath.position(size_t(0));
|
|
|
|
|
}
|
|
|
|
|
else if (submath.str(0) == "$") {
|
|
|
|
|
size_t pos = submath.position(size_t(0));
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((pos == 0) || (interval_.par[pos-1] != '\\')) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
math_end_waiting = true;
|
|
|
|
|
math_end = "$";
|
|
|
|
|
math_pos = pos;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-02 09:32:28 +00:00
|
|
|
|
// Ignore language if there is math somewhere in pattern-string
|
2018-10-28 18:40:14 +00:00
|
|
|
|
if (isPatternString) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
if (! mi.empty()) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
// Disable language
|
|
|
|
|
keys["foreignlanguage"].disabled = true;
|
|
|
|
|
disableLanguageOverride = true;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
disableLanguageOverride = false;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (disableLanguageOverride) {
|
|
|
|
|
keys["foreignlanguage"].disabled = true;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-30 19:52:29 +00:00
|
|
|
|
math_pos = mi.getFirstPos();
|
2019-03-21 11:53:41 +00:00
|
|
|
|
for (sregex_iterator it(interval_.par.begin(), interval_.par.end(), rkeys), end; it != end; ++it) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
sub = *it;
|
2018-10-28 18:40:14 +00:00
|
|
|
|
string key = sub.str(3);
|
|
|
|
|
if (key == "") {
|
|
|
|
|
if (sub.str(0)[0] == '\\')
|
|
|
|
|
key = sub.str(0)[1];
|
2018-12-05 12:36:43 +00:00
|
|
|
|
else {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
key = sub.str(0);
|
2018-12-05 12:36:43 +00:00
|
|
|
|
if (key == "$") {
|
|
|
|
|
size_t k_pos = sub.position(size_t(0));
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((k_pos > 0) && (interval_.par[k_pos - 1] == '\\')) {
|
2018-12-05 12:36:43 +00:00
|
|
|
|
// Escaped '$', ignoring
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-28 18:40:14 +00:00
|
|
|
|
};
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (evaluatingRegexp) {
|
|
|
|
|
if (sub.str(1).compare("endregexp") == 0) {
|
|
|
|
|
evaluatingRegexp = false;
|
|
|
|
|
// found._tokenstart already set
|
2018-10-27 14:57:42 +00:00
|
|
|
|
found._dataEnd = sub.position(size_t(0)) + 13;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
found._tokensize = found._dataEnd - found._tokenstart;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2019-03-02 14:42:38 +00:00
|
|
|
|
else {
|
2019-02-22 12:21:23 +00:00
|
|
|
|
continue;
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-29 12:17:54 +00:00
|
|
|
|
if (evaluatingMath) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
if (size_t(sub.position(size_t(0))) < mi.getEndPos())
|
2018-10-29 12:17:54 +00:00
|
|
|
|
continue;
|
|
|
|
|
evaluatingMath = false;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
mi.incrEntry();
|
|
|
|
|
math_pos = mi.getStartPos();
|
2018-10-29 12:17:54 +00:00
|
|
|
|
}
|
2018-10-28 18:40:14 +00:00
|
|
|
|
if (keys.find(key) == keys.end()) {
|
2019-02-12 13:21:14 +00:00
|
|
|
|
found = KeyInfo(KeyInfo::isStandard, 0, true);
|
|
|
|
|
if (isPatternString) {
|
|
|
|
|
found.keytype = KeyInfo::isChar;
|
|
|
|
|
found.disabled = false;
|
|
|
|
|
found.used = true;
|
|
|
|
|
}
|
|
|
|
|
keys[key] = found;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2019-02-12 13:21:14 +00:00
|
|
|
|
else
|
|
|
|
|
found = keys[key];
|
2018-10-28 18:40:14 +00:00
|
|
|
|
if (key.compare("regexp") == 0) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
evaluatingRegexp = true;
|
2018-10-27 14:57:42 +00:00
|
|
|
|
found._tokenstart = sub.position(size_t(0));
|
2018-10-18 15:37:15 +00:00
|
|
|
|
found._tokensize = 0;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Handle the other params of key
|
|
|
|
|
if (found.keytype == KeyInfo::isIgnored)
|
|
|
|
|
continue;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else if (found.keytype == KeyInfo::isMath) {
|
2018-10-27 14:57:42 +00:00
|
|
|
|
if (size_t(sub.position(size_t(0))) == math_pos) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
found = keys[key];
|
2018-10-27 14:57:42 +00:00
|
|
|
|
found._tokenstart = sub.position(size_t(0));
|
2018-10-30 19:52:29 +00:00
|
|
|
|
found._tokensize = mi.getSize();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
found._dataEnd = found._tokenstart + found._tokensize;
|
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-10-29 12:17:54 +00:00
|
|
|
|
evaluatingMath = true;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
2018-11-07 08:35:16 +00:00
|
|
|
|
else {
|
|
|
|
|
// begin|end of unknown env, discard
|
2018-11-12 11:17:16 +00:00
|
|
|
|
// First handle tables
|
|
|
|
|
// longtable|tabular
|
|
|
|
|
bool discardComment;
|
2019-02-10 17:00:55 +00:00
|
|
|
|
found = keys[key];
|
|
|
|
|
found.keytype = KeyInfo::doRemove;
|
2018-11-12 11:17:16 +00:00
|
|
|
|
if ((sub.str(5).compare("longtable") == 0) ||
|
|
|
|
|
(sub.str(5).compare("tabular") == 0)) {
|
|
|
|
|
discardComment = true; /* '%' */
|
|
|
|
|
}
|
2019-02-10 17:00:55 +00:00
|
|
|
|
else {
|
2018-11-12 11:17:16 +00:00
|
|
|
|
discardComment = false;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
static regex const removeArgs("^(multicols|multipar|sectionbox|subsectionbox|tcolorbox)$");
|
|
|
|
|
smatch sub2;
|
|
|
|
|
string token = sub.str(5);
|
|
|
|
|
if (regex_match(token, sub2, removeArgs)) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
found.keytype = KeyInfo::removeWithArg;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
}
|
2019-02-10 17:00:55 +00:00
|
|
|
|
}
|
2018-11-08 08:59:51 +00:00
|
|
|
|
// discard spaces before pos(0)
|
2018-11-07 08:35:16 +00:00
|
|
|
|
int pos = sub.position(size_t(0));
|
|
|
|
|
int count;
|
|
|
|
|
for (count = 0; pos - count > 0; count++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
char c = interval_.par[pos-count-1];
|
2018-11-12 11:17:16 +00:00
|
|
|
|
if (discardComment) {
|
|
|
|
|
if ((c != ' ') && (c != '%'))
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
else if (c != ' ')
|
2018-11-07 08:35:16 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
found._tokenstart = pos - count;
|
|
|
|
|
if (sub.str(1).compare(0, 5, "begin") == 0) {
|
|
|
|
|
size_t pos1 = pos + sub.str(0).length();
|
2018-11-18 09:37:12 +00:00
|
|
|
|
if (sub.str(5).compare("cjk") == 0) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
pos1 = interval_.findclosing(pos1+1, interval_.par.length()) + 1;
|
|
|
|
|
if ((interval_.par[pos1] == '{') && (interval_.par[pos1+1] == '}'))
|
2018-11-18 09:37:12 +00:00
|
|
|
|
pos1 += 2;
|
|
|
|
|
found.keytype = KeyInfo::isMain;
|
|
|
|
|
found._dataStart = pos1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found._dataEnd = interval_.par.length();
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found.disabled = keys["foreignlanguage"].disabled;
|
|
|
|
|
found.used = keys["foreignlanguage"].used;
|
|
|
|
|
found._tokensize = pos1 - found._tokenstart;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-11-07 12:14:50 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-02-22 12:21:23 +00:00
|
|
|
|
// Swallow possible optional params
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (interval_.par[pos1] == '[') {
|
|
|
|
|
pos1 = interval_.findclosing(pos1+1, interval_.par.length(), '[', ']')+1;
|
2018-11-18 09:37:12 +00:00
|
|
|
|
}
|
2019-02-22 12:21:23 +00:00
|
|
|
|
// Swallow also the eventual parameter
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[pos1] == '{') {
|
|
|
|
|
found._dataEnd = interval_.findclosing(pos1+1, interval_.par.length()) + 1;
|
2018-11-18 09:37:12 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
found._dataEnd = pos1;
|
|
|
|
|
}
|
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
found._tokensize = count + found._dataEnd - pos;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found.disabled = true;
|
2018-11-07 12:14:50 +00:00
|
|
|
|
}
|
2018-11-07 08:35:16 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-11-18 09:37:12 +00:00
|
|
|
|
// Handle "\end{...}"
|
2018-11-07 08:35:16 +00:00
|
|
|
|
found._dataStart = pos + sub.str(0).length();
|
|
|
|
|
found._dataEnd = found._dataStart;
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found._tokensize = count + found._dataEnd - pos;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found.disabled = true;
|
2018-11-07 08:35:16 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else if (found.keytype != KeyInfo::isRegex) {
|
2018-10-27 14:57:42 +00:00
|
|
|
|
found._tokenstart = sub.position(size_t(0));
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (found.parenthesiscount == 0) {
|
|
|
|
|
// Probably to be discarded
|
2018-12-14 15:02:33 +00:00
|
|
|
|
size_t following_pos = sub.position(size_t(0)) + sub.str(3).length() + 1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
char following = interval_.par[following_pos];
|
2018-10-24 09:07:11 +00:00
|
|
|
|
if (following == ' ')
|
2018-10-19 17:11:20 +00:00
|
|
|
|
found.head = "\\" + sub.str(3) + " ";
|
2018-10-24 09:07:11 +00:00
|
|
|
|
else if (following == '=') {
|
|
|
|
|
// like \uldepth=1000pt
|
|
|
|
|
found.head = sub.str(0);
|
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
else
|
2018-10-28 18:40:14 +00:00
|
|
|
|
found.head = "\\" + key;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
found._tokensize = found.head.length();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
found._dataEnd = found._tokenstart + found._tokensize;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
}
|
|
|
|
|
else {
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int params = found._tokenstart + key.length() + 1;
|
|
|
|
|
if (evaluatingOptional) {
|
|
|
|
|
if (size_t(found._tokenstart) > optionalEnd) {
|
|
|
|
|
evaluatingOptional = false;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
found.disabled = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
int optend = params;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (interval_.par[optend] == '[') {
|
2019-02-07 12:35:47 +00:00
|
|
|
|
// discard optional parameters
|
2019-03-21 11:53:41 +00:00
|
|
|
|
optend = interval_.findclosing(optend+1, interval_.par.length(), '[', ']') + 1;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
}
|
|
|
|
|
if (optend > params) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
key += interval_.par.substr(params, optend-params);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
evaluatingOptional = true;
|
|
|
|
|
optionalEnd = optend;
|
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
string token = sub.str(5);
|
|
|
|
|
int closings = found.parenthesiscount;
|
2018-10-20 10:47:37 +00:00
|
|
|
|
if (found.parenthesiscount == 1) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
found.head = "\\" + key + "{";
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
else if (found.parenthesiscount > 1) {
|
|
|
|
|
if (token != "") {
|
|
|
|
|
found.head = sub.str(0) + "{";
|
|
|
|
|
closings = found.parenthesiscount - 1;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
found.head = "\\" + key + "{";
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2019-02-21 13:45:41 +00:00
|
|
|
|
found._tokensize = found.head.length();
|
2018-10-18 15:37:15 +00:00
|
|
|
|
found._dataStart = found._tokenstart + found.head.length();
|
2019-12-29 16:40:13 +00:00
|
|
|
|
if (found.keytype == KeyInfo::doRemove) {
|
|
|
|
|
int endpar = 2 + interval_.findclosing(found._dataStart, interval_.par.length(), '{', '}', closings);
|
|
|
|
|
found._dataStart = endpar;
|
|
|
|
|
found._tokensize = found._dataStart - found._tokenstart;
|
2020-01-01 13:03:21 +00:00
|
|
|
|
closings = 0;
|
2019-12-29 16:40:13 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par.substr(found._dataStart-1, 15).compare("\\endarguments{}") == 0) {
|
2019-02-17 23:40:55 +00:00
|
|
|
|
found._dataStart += 15;
|
|
|
|
|
}
|
2020-01-01 13:03:21 +00:00
|
|
|
|
size_t endpos;
|
|
|
|
|
if (closings < 1)
|
|
|
|
|
endpos = found._dataStart - 1;
|
|
|
|
|
else
|
|
|
|
|
endpos = interval_.findclosing(found._dataStart, interval_.par.length(), '{', '}', closings);
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if (found.keytype == KeyInfo::isList) {
|
|
|
|
|
// Check if it really is list env
|
|
|
|
|
static regex const listre("^([a-z]+)$");
|
|
|
|
|
smatch sub2;
|
|
|
|
|
if (!regex_match(token, sub2, listre)) {
|
|
|
|
|
// Change the key of this entry. It is not in a list/item environment
|
|
|
|
|
found.keytype = KeyInfo::endArguments;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-12-11 16:27:50 +00:00
|
|
|
|
if (found.keytype == KeyInfo::noMain) {
|
|
|
|
|
evaluatingCode = true;
|
|
|
|
|
codeEnd = endpos;
|
|
|
|
|
codeStart = found._dataStart;
|
|
|
|
|
}
|
|
|
|
|
else if (evaluatingCode) {
|
|
|
|
|
if (size_t(found._dataStart) > codeEnd)
|
|
|
|
|
evaluatingCode = false;
|
|
|
|
|
else if (found.keytype == KeyInfo::isMain) {
|
|
|
|
|
// Disable this key, treate it as standard
|
|
|
|
|
found.keytype = KeyInfo::isStandard;
|
|
|
|
|
found.disabled = true;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((codeEnd == interval_.par.length()) &&
|
2018-12-11 16:27:50 +00:00
|
|
|
|
(found._tokenstart == codeStart)) {
|
|
|
|
|
// trickery, because the code inset starts
|
|
|
|
|
// with \selectlanguage ...
|
|
|
|
|
codeEnd = endpos;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.size() > 1) {
|
|
|
|
|
entries_[entries_.size()-1]._dataEnd = codeEnd;
|
2018-12-11 16:27:50 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((endpos == interval_.par.length()) &&
|
2018-11-08 08:59:51 +00:00
|
|
|
|
(found.keytype == KeyInfo::doRemove)) {
|
|
|
|
|
// Missing closing => error in latex-input?
|
|
|
|
|
// therefore do not delete remaining data
|
|
|
|
|
found._dataStart -= 1;
|
|
|
|
|
found._dataEnd = found._dataStart;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
found._dataEnd = endpos;
|
2018-12-16 13:50:38 +00:00
|
|
|
|
}
|
|
|
|
|
if (isPatternString) {
|
|
|
|
|
keys[key].used = true;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_.push_back(found);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void LatexInfo::makeKey(const string &keysstring, KeyInfo keyI, bool isPatternString)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-19 17:11:20 +00:00
|
|
|
|
stringstream s(keysstring);
|
|
|
|
|
string key;
|
|
|
|
|
const char delim = '|';
|
|
|
|
|
while (getline(s, key, delim)) {
|
2018-10-27 14:57:42 +00:00
|
|
|
|
KeyInfo keyII(keyI);
|
|
|
|
|
if (isPatternString) {
|
|
|
|
|
keyII.used = false;
|
|
|
|
|
}
|
|
|
|
|
else if ( !keys[key].used)
|
|
|
|
|
keyII.disabled = true;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
keys[key] = keyII;
|
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void LatexInfo::buildKeys(bool isPatternString)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-27 14:57:42 +00:00
|
|
|
|
|
|
|
|
|
static bool keysBuilt = false;
|
|
|
|
|
if (keysBuilt && !isPatternString) return;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
|
2018-12-11 16:27:50 +00:00
|
|
|
|
// Known standard keys with 1 parameter.
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// Split is done, if not at start of region
|
2018-11-09 05:07:17 +00:00
|
|
|
|
makeKey("textsf|textss|texttt", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getFamily()), isPatternString);
|
|
|
|
|
makeKey("textbf", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getSeries()), isPatternString);
|
|
|
|
|
makeKey("textit|textsc|textsl", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getShape()), isPatternString);
|
|
|
|
|
makeKey("uuline|uline|uwave", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getUnderline()), isPatternString);
|
|
|
|
|
makeKey("emph|noun", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getMarkUp()), isPatternString);
|
|
|
|
|
makeKey("sout|xout", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getStrikeOut()), isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
|
2018-10-23 19:12:22 +00:00
|
|
|
|
makeKey("section|subsection|subsubsection|paragraph|subparagraph|minisec",
|
2018-11-09 05:07:17 +00:00
|
|
|
|
KeyInfo(KeyInfo::isSectioning, 1, ignoreFormats.getSectioning()), isPatternString);
|
2018-10-23 19:12:22 +00:00
|
|
|
|
makeKey("section*|subsection*|subsubsection*|paragraph*",
|
2018-11-09 05:07:17 +00:00
|
|
|
|
KeyInfo(KeyInfo::isSectioning, 1, ignoreFormats.getSectioning()), isPatternString);
|
|
|
|
|
makeKey("part|part*|chapter|chapter*", KeyInfo(KeyInfo::isSectioning, 1, ignoreFormats.getSectioning()), isPatternString);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
makeKey("title|subtitle|author|subject|publishers|dedication|uppertitleback|lowertitleback|extratitle|lyxaddress|lyxrightaddress", KeyInfo(KeyInfo::isTitle, 1, ignoreFormats.getFrontMatter()), isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// Regex
|
2018-10-27 14:57:42 +00:00
|
|
|
|
makeKey("regexp", KeyInfo(KeyInfo::isRegex, 1, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Split is done, if not at start of region
|
2018-11-09 05:07:17 +00:00
|
|
|
|
makeKey("textcolor", KeyInfo(KeyInfo::isStandard, 2, ignoreFormats.getColor()), isPatternString);
|
2019-02-20 13:14:50 +00:00
|
|
|
|
makeKey("latexenvironment", KeyInfo(KeyInfo::isStandard, 2, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Split is done always.
|
2018-11-09 05:07:17 +00:00
|
|
|
|
makeKey("foreignlanguage", KeyInfo(KeyInfo::isMain, 2, ignoreFormats.getLanguage()), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
2019-02-19 22:11:09 +00:00
|
|
|
|
// Known charaters
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// No split
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("backslash|textbackslash|slash", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("textasciicircum|textasciitilde", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2019-02-10 17:00:55 +00:00
|
|
|
|
makeKey("textasciiacute|texemdash", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("dots|ldots", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
// Spaces
|
|
|
|
|
makeKey("quad|qquad|hfill|dotfill", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("textvisiblespace|nobreakspace", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("negthickspace|negmedspace|negthinspace", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
// Skip
|
2019-02-12 13:21:14 +00:00
|
|
|
|
// makeKey("enskip|smallskip|medskip|bigskip|vfill", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2018-12-13 16:12:57 +00:00
|
|
|
|
// Custom space/skip, remove the content (== length value)
|
2019-02-25 10:59:54 +00:00
|
|
|
|
makeKey("vspace|vspace*|hspace|hspace*|mspace", KeyInfo(KeyInfo::noContent, 1, false), isPatternString);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
// Found in fr/UserGuide.lyx
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("og|fg", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
// quotes
|
|
|
|
|
makeKey("textquotedbl|quotesinglbase|lyxarrow", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("textquotedblleft|textquotedblright", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// Known macros to remove (including their parameter)
|
|
|
|
|
// No split
|
2019-02-21 19:32:08 +00:00
|
|
|
|
makeKey("input|inputencoding|label|ref|index|bibitem", KeyInfo(KeyInfo::doRemove, 1, false), isPatternString);
|
2019-02-16 17:39:10 +00:00
|
|
|
|
makeKey("addtocounter|setlength", KeyInfo(KeyInfo::noContent, 2, true), isPatternString);
|
2018-12-11 16:27:50 +00:00
|
|
|
|
// handle like standard keys with 1 parameter.
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("url|href|vref|thanks", KeyInfo(KeyInfo::isStandard, 1, false), isPatternString);
|
2018-12-11 16:27:50 +00:00
|
|
|
|
|
2019-12-29 16:40:13 +00:00
|
|
|
|
// Ignore deleted text
|
2020-01-01 13:03:21 +00:00
|
|
|
|
makeKey("lyxdeleted", KeyInfo(KeyInfo::doRemove, 3, false), isPatternString);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
// but preserve added text
|
2020-01-01 13:03:21 +00:00
|
|
|
|
makeKey("lyxadded", KeyInfo(KeyInfo::doRemove, 2, false), isPatternString);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// Macros to remove, but let the parameter survive
|
|
|
|
|
// No split
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("menuitem|textmd|textrm", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
2018-12-11 16:27:50 +00:00
|
|
|
|
|
|
|
|
|
// Remove language spec from content of these insets
|
2019-02-10 17:00:55 +00:00
|
|
|
|
makeKey("code", KeyInfo(KeyInfo::noMain, 1, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Same effect as previous, parameter will survive (because there is no one anyway)
|
|
|
|
|
// No split
|
2019-02-19 22:11:09 +00:00
|
|
|
|
makeKey("noindent|textcompwordmark|maketitle", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2018-11-08 08:59:51 +00:00
|
|
|
|
// Remove table decorations
|
2018-11-07 08:35:16 +00:00
|
|
|
|
makeKey("hline|tabularnewline|toprule|bottomrule|midrule", KeyInfo(KeyInfo::doRemove, 0, true), isPatternString);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
// Discard shape-header.
|
2019-02-21 13:45:41 +00:00
|
|
|
|
// For footnote or shortcut too, because of lang settings
|
2019-02-19 22:11:09 +00:00
|
|
|
|
// and wrong handling if used 'KeyInfo::noMain'
|
2018-11-08 08:59:51 +00:00
|
|
|
|
makeKey("circlepar|diamondpar|heartpar|nutpar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
|
|
|
|
makeKey("trianglerightpar|hexagonpar|starpar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
|
|
|
|
makeKey("triangleuppar|triangledownpar|droppar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
|
|
|
|
makeKey("triangleleftpar|shapepar|dropuppar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
2019-02-25 10:59:54 +00:00
|
|
|
|
makeKey("hphantom|vphantom|footnote|shortcut|include|includegraphics", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
2019-02-21 19:32:08 +00:00
|
|
|
|
makeKey("parbox", KeyInfo(KeyInfo::doRemove, 1, true), isPatternString);
|
2018-11-12 11:17:16 +00:00
|
|
|
|
// like ('tiny{}' or '\tiny ' ... )
|
2018-12-16 13:50:38 +00:00
|
|
|
|
makeKey("footnotesize|tiny|scriptsize|small|large|Large|LARGE|huge|Huge", KeyInfo(KeyInfo::isSize, 0, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Survives, like known character
|
2019-02-17 23:40:55 +00:00
|
|
|
|
makeKey("lyx|LyX|latex|LaTeX|latexe|LaTeXe|tex|TeX", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("item|listitem", KeyInfo(KeyInfo::isList, 1, false), isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
|
2018-10-28 18:40:14 +00:00
|
|
|
|
makeKey("begin|end", KeyInfo(KeyInfo::isMath, 1, false), isPatternString);
|
|
|
|
|
makeKey("[|]", KeyInfo(KeyInfo::isMath, 1, false), isPatternString);
|
|
|
|
|
makeKey("$", KeyInfo(KeyInfo::isMath, 1, false), isPatternString);
|
2018-10-23 17:59:08 +00:00
|
|
|
|
|
2019-02-17 23:40:55 +00:00
|
|
|
|
makeKey("par|uldepth|ULdepth|protect|nobreakdash|medskip|relax", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2018-11-20 13:36:11 +00:00
|
|
|
|
// Remove RTL/LTR marker
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("l|r|textlr|textfr|textar|beginl|endl", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2019-02-05 07:04:47 +00:00
|
|
|
|
makeKey("lettrine", KeyInfo(KeyInfo::cleanToStart, 0, true), isPatternString);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
makeKey("lyxslide", KeyInfo(KeyInfo::isSectioning, 1, true), isPatternString);
|
2019-02-05 07:04:47 +00:00
|
|
|
|
makeKey("endarguments", KeyInfo(KeyInfo::endArguments, 0, true), isPatternString);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
makeKey("twocolumn", KeyInfo(KeyInfo::removeWithArg, 2, true), isPatternString);
|
2019-02-16 17:39:10 +00:00
|
|
|
|
makeKey("tnotetext|ead|fntext|cortext|address", KeyInfo(KeyInfo::removeWithArg, 0, true), isPatternString);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
makeKey("lyxend", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2018-10-27 14:57:42 +00:00
|
|
|
|
if (isPatternString) {
|
|
|
|
|
// Allow the first searched string to rebuild the keys too
|
|
|
|
|
keysBuilt = false;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// no need to rebuild again
|
|
|
|
|
keysBuilt = true;
|
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-14 18:39:13 +00:00
|
|
|
|
/*
|
2018-10-18 15:37:15 +00:00
|
|
|
|
* Keep the list of actual opened parentheses actual
|
|
|
|
|
* (e.g. depth == 4 means there are 4 '{' not processed yet)
|
2018-10-14 18:39:13 +00:00
|
|
|
|
*/
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::handleParentheses(int lastpos, bool closingAllowed)
|
2018-10-13 19:02:53 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int skip = 0;
|
|
|
|
|
for (int i = depts[actualdeptindex]; i < lastpos; i+= 1 + skip) {
|
|
|
|
|
char c;
|
|
|
|
|
c = par[i];
|
|
|
|
|
skip = 0;
|
|
|
|
|
if (c == '\\') skip = 1;
|
|
|
|
|
else if (c == '{') {
|
|
|
|
|
handleOpenP(i);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
else if (c == '}') {
|
|
|
|
|
handleCloseP(i, closingAllowed);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
#if (0)
|
|
|
|
|
string Intervall::show(int lastpos)
|
2018-10-14 18:39:13 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
string s;
|
|
|
|
|
int i = 0;
|
|
|
|
|
for (idx = 0; idx <= ignoreidx; idx++) {
|
|
|
|
|
while (i < lastpos) {
|
|
|
|
|
int printsize;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i <= borders[idx].low) {
|
|
|
|
|
if (borders[idx].low > lastpos)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
printsize = lastpos - i;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
else
|
2018-10-19 17:11:20 +00:00
|
|
|
|
printsize = borders[idx].low - i;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
s += par.substr(i, printsize);
|
|
|
|
|
i += printsize;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i >= borders[idx].low)
|
|
|
|
|
i = borders[idx].upper;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
i = borders[idx].upper;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
break;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
if (lastpos > i) {
|
|
|
|
|
s += par.substr(i, lastpos-i);
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return s;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
void Intervall::output(ostringstream &os, int lastpos)
|
|
|
|
|
{
|
|
|
|
|
// get number of chars to output
|
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
int i = 0;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
int printed = 0;
|
|
|
|
|
string startTitle = titleValue;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
for (idx = 0; idx <= ignoreidx; idx++) {
|
|
|
|
|
if (i < lastpos) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i <= borders[idx].low) {
|
2018-12-18 05:53:58 +00:00
|
|
|
|
int printsize;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (borders[idx].low > lastpos)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
printsize = lastpos - i;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
else
|
2018-10-19 17:11:20 +00:00
|
|
|
|
printsize = borders[idx].low - i;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
if (printsize > 0) {
|
|
|
|
|
os << startTitle << par.substr(i, printsize);
|
|
|
|
|
i += printsize;
|
|
|
|
|
printed += printsize;
|
|
|
|
|
startTitle = "";
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
handleParentheses(i, false);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i >= borders[idx].low)
|
|
|
|
|
i = borders[idx].upper;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
i = borders[idx].upper;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
else
|
|
|
|
|
break;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (lastpos > i) {
|
2019-02-19 22:11:09 +00:00
|
|
|
|
os << startTitle << par.substr(i, lastpos-i);
|
|
|
|
|
printed += lastpos-i;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
handleParentheses(lastpos, false);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
int startindex;
|
|
|
|
|
if (keys["foreignlanguage"].disabled)
|
|
|
|
|
startindex = actualdeptindex-langcount;
|
|
|
|
|
else
|
|
|
|
|
startindex = actualdeptindex;
|
|
|
|
|
for (int i = startindex; i > 0; --i) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
os << "}";
|
|
|
|
|
}
|
2019-02-19 22:11:09 +00:00
|
|
|
|
if (hasTitle && (printed > 0))
|
|
|
|
|
os << "}";
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (! isPatternString_)
|
2018-11-02 09:32:28 +00:00
|
|
|
|
os << "\n";
|
2018-10-18 15:37:15 +00:00
|
|
|
|
handleParentheses(lastpos, true); /* extra closings '}' allowed here */
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2018-10-13 19:02:53 +00:00
|
|
|
|
|
2018-10-22 18:19:36 +00:00
|
|
|
|
void LatexInfo::processRegion(int start, int region_end)
|
2018-10-14 18:39:13 +00:00
|
|
|
|
{
|
2018-10-29 06:37:32 +00:00
|
|
|
|
while (start < region_end) { /* Let {[} and {]} survive */
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int cnt = interval_.isOpeningPar(start);
|
2019-02-23 12:11:34 +00:00
|
|
|
|
if (cnt == 1) {
|
2018-10-23 17:59:08 +00:00
|
|
|
|
// Closing is allowed past the region
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int closing = interval_.findclosing(start+1, interval_.par.length());
|
|
|
|
|
interval_.addIntervall(start, start+1);
|
|
|
|
|
interval_.addIntervall(closing, closing+1);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
2019-02-23 12:11:34 +00:00
|
|
|
|
else if (cnt == 3)
|
|
|
|
|
start += 2;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
start = interval_.nextNotIgnored(start+1);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-09 15:50:24 +00:00
|
|
|
|
void LatexInfo::removeHead(KeyInfo const & actual, int count)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (actual.parenthesiscount == 0) {
|
|
|
|
|
// "{\tiny{} ...}" ==> "{{} ...}"
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._tokenstart + actual._tokensize);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else {
|
|
|
|
|
// Remove header hull, that is "\url{abcd}" ==> "abcd"
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart - count, actual._dataStart);
|
|
|
|
|
interval_.addIntervall(actual._dataEnd, actual._dataEnd+1);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int LatexInfo::dispatch(ostringstream &os, int previousStart, KeyInfo &actual)
|
|
|
|
|
{
|
2018-11-04 13:54:06 +00:00
|
|
|
|
int nextKeyIdx = 0;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
switch (actual.keytype)
|
2018-11-04 13:54:06 +00:00
|
|
|
|
{
|
2019-02-19 22:11:09 +00:00
|
|
|
|
case KeyInfo::isTitle: {
|
|
|
|
|
removeHead(actual);
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2019-02-05 07:04:47 +00:00
|
|
|
|
case KeyInfo::cleanToStart: {
|
|
|
|
|
actual._dataEnd = actual._dataStart;
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
// Search for end of arguments
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int tmpIdx = find(nextKeyIdx, KeyInfo::endArguments);
|
|
|
|
|
if (tmpIdx > 0) {
|
|
|
|
|
for (int i = nextKeyIdx; i <= tmpIdx; i++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_[i].disabled = true;
|
2019-02-05 07:04:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = entries_[tmpIdx]._dataEnd;
|
2019-02-05 07:04:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (interval_.par[actual._dataEnd] == ' ')
|
2019-02-05 07:04:47 +00:00
|
|
|
|
actual._dataEnd++;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(0, actual._dataEnd+1);
|
|
|
|
|
interval_.actualdeptindex = 0;
|
|
|
|
|
interval_.depts[0] = actual._dataEnd+1;
|
|
|
|
|
interval_.closes[0] = -1;
|
2019-02-05 07:04:47 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2018-12-14 15:02:33 +00:00
|
|
|
|
case KeyInfo::noContent: { /* char like "\hspace{2cm}" */
|
2019-02-12 13:21:14 +00:00
|
|
|
|
if (actual.disabled)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd);
|
2019-02-12 13:21:14 +00:00
|
|
|
|
else
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataEnd);
|
2018-12-14 15:02:33 +00:00
|
|
|
|
}
|
|
|
|
|
// fall through
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isChar: {
|
|
|
|
|
nextKeyIdx = getNextKey();
|
2018-10-18 15:37:15 +00:00
|
|
|
|
break;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isSize: {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (actual.disabled || (interval_.par[actual._dataStart] != '{') || (interval_.par[actual._dataStart-1] == ' ')) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
processRegion(actual._dataEnd, actual._dataEnd+1); /* remove possibly following {} */
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd+1);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
} else {
|
2018-12-18 05:53:58 +00:00
|
|
|
|
// Here _dataStart points to '{', so correct it
|
|
|
|
|
actual._dataStart += 1;
|
|
|
|
|
actual._tokensize += 1;
|
|
|
|
|
actual.parenthesiscount = 1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[actual._dataStart] == '}') {
|
2018-12-18 05:53:58 +00:00
|
|
|
|
// Determine the end if used like '{\tiny{}...}'
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = interval_.findclosing(actual._dataStart+1, interval_.par.length()) + 1;
|
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataStart+1);
|
2018-12-18 05:53:58 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// Determine the end if used like '\tiny{...}'
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = interval_.findclosing(actual._dataStart, interval_.par.length()) + 1;
|
2018-12-16 13:50:38 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// Split on this key if not at start
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int start = interval_.nextNotIgnored(previousStart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (start < actual._tokenstart) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.output(os, actual._tokenstart);
|
|
|
|
|
interval_.addIntervall(start, actual._tokenstart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
// discard entry if at end of actual
|
|
|
|
|
nextKeyIdx = process(os, actual);
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-02-05 07:04:47 +00:00
|
|
|
|
case KeyInfo::endArguments:
|
2019-02-17 23:40:55 +00:00
|
|
|
|
// Remove trailing '{}' too
|
2019-02-28 12:00:12 +00:00
|
|
|
|
actual._dataStart += 1;
|
|
|
|
|
actual._dataEnd += 1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd+1);
|
2019-02-05 07:04:47 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
2018-12-11 16:27:50 +00:00
|
|
|
|
case KeyInfo::noMain:
|
|
|
|
|
// fall through
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isStandard: {
|
|
|
|
|
if (actual.disabled) {
|
|
|
|
|
removeHead(actual);
|
|
|
|
|
processRegion(actual._dataStart, actual._dataStart+1);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
} else {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
// Split on this key if not at datastart of calling entry
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int start = interval_.nextNotIgnored(previousStart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (start < actual._tokenstart) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.output(os, actual._tokenstart);
|
|
|
|
|
interval_.addIntervall(start, actual._tokenstart);
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// discard entry if at end of actual
|
|
|
|
|
nextKeyIdx = process(os, actual);
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-02-07 12:35:47 +00:00
|
|
|
|
case KeyInfo::removeWithArg: {
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
// Search for end of arguments
|
|
|
|
|
int tmpIdx = find(nextKeyIdx, KeyInfo::endArguments);
|
|
|
|
|
if (tmpIdx > 0) {
|
|
|
|
|
for (int i = nextKeyIdx; i <= tmpIdx; i++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_[i].disabled = true;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = entries_[tmpIdx]._dataEnd;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd+1);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::doRemove: {
|
2018-11-07 08:35:16 +00:00
|
|
|
|
// Remove the key with all parameters and following spaces
|
|
|
|
|
size_t pos;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
for (pos = actual._dataEnd+1; pos < interval_.par.length(); pos++) {
|
|
|
|
|
if ((interval_.par[pos] != ' ') && (interval_.par[pos] != '%'))
|
2018-11-07 08:35:16 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-12-29 16:40:13 +00:00
|
|
|
|
// Remove also enclosing parentheses [] and {}
|
|
|
|
|
int numpars = 0;
|
|
|
|
|
int spaces = 0;
|
|
|
|
|
while (actual._tokenstart > numpars) {
|
2020-05-26 16:49:50 +00:00
|
|
|
|
if (pos+numpars >= interval_.par.size())
|
|
|
|
|
break;
|
|
|
|
|
else if (interval_.par[pos+numpars] == ']' && interval_.par[actual._tokenstart-numpars-1] == '[')
|
2019-12-29 16:40:13 +00:00
|
|
|
|
numpars++;
|
|
|
|
|
else if (interval_.par[pos+numpars] == '}' && interval_.par[actual._tokenstart-numpars-1] == '{')
|
|
|
|
|
numpars++;
|
|
|
|
|
else
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (numpars > 0) {
|
|
|
|
|
if (interval_.par[pos+numpars] == ' ')
|
|
|
|
|
spaces++;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
interval_.addIntervall(actual._tokenstart-numpars, pos+numpars+spaces);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
case KeyInfo::isList: {
|
|
|
|
|
// Discard space before _tokenstart
|
|
|
|
|
int count;
|
|
|
|
|
for (count = 0; count < actual._tokenstart; count++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[actual._tokenstart-count-1] != ' ')
|
2018-11-03 10:15:12 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-02-16 17:39:10 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
int tmpIdx = find(nextKeyIdx, KeyInfo::endArguments);
|
|
|
|
|
if (tmpIdx > 0) {
|
2019-02-17 23:40:55 +00:00
|
|
|
|
// Special case: \item is not a list, but a command (like in Style Author_Biography in maa-monthly.layout)
|
|
|
|
|
// with arguments
|
|
|
|
|
// How else can we catch this one?
|
2019-02-16 17:39:10 +00:00
|
|
|
|
for (int i = nextKeyIdx; i <= tmpIdx; i++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_[i].disabled = true;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = entries_[tmpIdx]._dataEnd;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
else if (nextKeyIdx > 0) {
|
|
|
|
|
// Ignore any lang entries inside data region
|
2019-03-21 11:53:41 +00:00
|
|
|
|
for (int i = nextKeyIdx; i < int(entries_.size()) && entries_[i]._tokenstart < actual._dataEnd; i++) {
|
|
|
|
|
if (entries_[i].keytype == KeyInfo::isMain)
|
|
|
|
|
entries_[i].disabled = true;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
if (actual.disabled) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._dataEnd+1);
|
2018-11-03 10:15:12 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._tokenstart);
|
2018-11-03 10:15:12 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[actual._dataEnd+1] == '[') {
|
|
|
|
|
int posdown = interval_.findclosing(actual._dataEnd+2, interval_.par.length(), '[', ']');
|
|
|
|
|
if ((interval_.par[actual._dataEnd+2] == '{') &&
|
|
|
|
|
(interval_.par[posdown-1] == '}')) {
|
|
|
|
|
interval_.addIntervall(actual._dataEnd+1,actual._dataEnd+3);
|
|
|
|
|
interval_.addIntervall(posdown-1, posdown+1);
|
2018-11-05 11:58:45 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._dataEnd+1, actual._dataEnd+2);
|
|
|
|
|
interval_.addIntervall(posdown, posdown+1);
|
2018-11-05 11:58:45 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int blk = interval_.nextNotIgnored(actual._dataEnd+1);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
if (blk > posdown) {
|
2018-11-06 14:28:43 +00:00
|
|
|
|
// Discard at most 1 space after empty item
|
2018-11-04 20:41:04 +00:00
|
|
|
|
int count;
|
2018-11-06 14:28:43 +00:00
|
|
|
|
for (count = 0; count < 1; count++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[blk+count] != ' ')
|
2018-11-04 20:41:04 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (count > 0)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(blk, blk+count);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isSectioning: {
|
2018-11-08 08:59:51 +00:00
|
|
|
|
// Discard spaces before _tokenstart
|
2018-10-22 18:19:36 +00:00
|
|
|
|
int count;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int val = actual._tokenstart;
|
|
|
|
|
for (count = 0; count < actual._tokenstart;) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
val = interval_.previousNotIgnored(val-1);
|
2020-05-26 13:58:23 +00:00
|
|
|
|
if (val < 0 || interval_.par[val] != ' ')
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
else {
|
|
|
|
|
count = actual._tokenstart - val;
|
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (actual.disabled) {
|
|
|
|
|
removeHead(actual, count);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
} else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._tokenstart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = process(os, actual);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isMath: {
|
|
|
|
|
// Same as regex, use the content unchanged
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isRegex: {
|
|
|
|
|
// DO NOT SPLIT ON REGEX
|
|
|
|
|
// Do not disable
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isIgnored: {
|
|
|
|
|
// Treat like a character for now
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isMain: {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par.substr(actual._dataStart, 2) == "% ")
|
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataStart+2);
|
2019-02-21 19:32:08 +00:00
|
|
|
|
if (actual._tokenstart > 0) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int prev = interval_.previousNotIgnored(actual._tokenstart - 1);
|
|
|
|
|
if ((prev >= 0) && interval_.par[prev] == '%')
|
|
|
|
|
interval_.addIntervall(prev, prev+1);
|
2019-02-21 19:32:08 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (actual.disabled) {
|
|
|
|
|
removeHead(actual);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
interval_.langcount++;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((interval_.par.substr(actual._dataStart, 3) == " \\[") ||
|
|
|
|
|
(interval_.par.substr(actual._dataStart, 8) == " \\begin{")) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
// Discard also the space before math-equation
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataStart+1);
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
2019-02-07 12:35:47 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
// interval.resetOpenedP(actual._dataStart-1);
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else {
|
2019-02-21 13:45:41 +00:00
|
|
|
|
if (actual._tokenstart < 26) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// for the first (and maybe dummy) language
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.setForDefaultLang(actual);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.resetOpenedP(actual._dataStart-1);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::invalid:
|
|
|
|
|
// This cannot happen, already handled
|
|
|
|
|
// fall through
|
|
|
|
|
default: {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "Unhandled keytype");
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-11-04 13:54:06 +00:00
|
|
|
|
}
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return nextKeyIdx;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-10-09 15:50:24 +00:00
|
|
|
|
int LatexInfo::process(ostringstream & os, KeyInfo const & actual )
|
2018-10-22 18:19:36 +00:00
|
|
|
|
{
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int end = interval_.nextNotIgnored(actual._dataEnd);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
int oldStart = actual._dataStart;
|
|
|
|
|
int nextKeyIdx = getNextKey();
|
|
|
|
|
while (true) {
|
|
|
|
|
if ((nextKeyIdx < 0) ||
|
2019-03-21 11:53:41 +00:00
|
|
|
|
(entries_[nextKeyIdx]._tokenstart >= actual._dataEnd) ||
|
|
|
|
|
(entries_[nextKeyIdx].keytype == KeyInfo::invalid)) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (oldStart <= end) {
|
|
|
|
|
processRegion(oldStart, end);
|
|
|
|
|
oldStart = end+1;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo &nextKey = getKeyInfo(nextKeyIdx);
|
|
|
|
|
|
2019-02-07 12:35:47 +00:00
|
|
|
|
if ((nextKey.keytype == KeyInfo::isMain) && !nextKey.disabled) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
(void) dispatch(os, actual._dataStart, nextKey);
|
|
|
|
|
end = nextKey._tokenstart;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
processRegion(oldStart, nextKey._tokenstart);
|
|
|
|
|
nextKeyIdx = dispatch(os, actual._dataStart, nextKey);
|
|
|
|
|
|
|
|
|
|
oldStart = nextKey._dataEnd+1;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// now nextKey is either invalid or is outside of actual._dataEnd
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// output the remaining and discard myself
|
|
|
|
|
if (oldStart <= end) {
|
|
|
|
|
processRegion(oldStart, end);
|
|
|
|
|
}
|
2020-05-26 16:49:50 +00:00
|
|
|
|
if (interval_.par.size() > (size_t) end && interval_.par[end] == '}') {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
end += 1;
|
|
|
|
|
// This is the normal case.
|
|
|
|
|
// But if using the firstlanguage, the closing may be missing
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// get minimum of 'end' and 'actual._dataEnd' in case that the nextKey.keytype was 'KeyInfo::isMain'
|
|
|
|
|
int output_end;
|
|
|
|
|
if (actual._dataEnd < end)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
output_end = interval_.nextNotIgnored(actual._dataEnd);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else
|
2019-03-21 11:53:41 +00:00
|
|
|
|
output_end = interval_.nextNotIgnored(end);
|
FindAdv: Added lyx-function search-ignore
Enable/disable ignoring the specified type
language: e.g. british, slovak, latin, ...
color: blue, red, ...
sectioning: part, chapter, ..
font:
series: bold, ...
shape: upright, italic, slanted
family: serif, monospace ...
markup: enphasize, noun
underline:
strike:
Examples:
search-ignore language true
search-ignore shape true
2018-11-15 13:20:50 +00:00
|
|
|
|
if ((actual.keytype == KeyInfo::isMain) && actual.disabled) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._tokenstart+actual._tokensize);
|
FindAdv: Added lyx-function search-ignore
Enable/disable ignoring the specified type
language: e.g. british, slovak, latin, ...
color: blue, red, ...
sectioning: part, chapter, ..
font:
series: bold, ...
shape: upright, italic, slanted
family: serif, monospace ...
markup: enphasize, noun
underline:
strike:
Examples:
search-ignore language true
search-ignore shape true
2018-11-15 13:20:50 +00:00
|
|
|
|
}
|
2018-12-16 13:50:38 +00:00
|
|
|
|
// Remove possible empty data
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int dstart = interval_.nextNotIgnored(actual._dataStart);
|
|
|
|
|
while (interval_.isOpeningPar(dstart) == 1) {
|
|
|
|
|
interval_.addIntervall(dstart, dstart+1);
|
|
|
|
|
int dend = interval_.findclosing(dstart+1, output_end);
|
|
|
|
|
interval_.addIntervall(dend, dend+1);
|
|
|
|
|
dstart = interval_.nextNotIgnored(dstart+1);
|
2018-12-16 13:50:38 +00:00
|
|
|
|
}
|
|
|
|
|
if (dstart < output_end)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.output(os, output_end);
|
|
|
|
|
interval_.addIntervall(actual._tokenstart, end);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return nextKeyIdx;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-10 17:00:55 +00:00
|
|
|
|
string splitOnKnownMacros(string par, bool isPatternString)
|
|
|
|
|
{
|
2018-10-12 14:47:07 +00:00
|
|
|
|
ostringstream os;
|
2018-10-27 14:57:42 +00:00
|
|
|
|
LatexInfo li(par, isPatternString);
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "Berfore split: " << par);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo DummyKey = KeyInfo(KeyInfo::KeyType::isMain, 2, true);
|
2018-10-23 17:59:08 +00:00
|
|
|
|
DummyKey.head = "";
|
2018-10-22 18:19:36 +00:00
|
|
|
|
DummyKey._tokensize = 0;
|
|
|
|
|
DummyKey._dataStart = 0;
|
|
|
|
|
DummyKey._dataEnd = par.length();
|
|
|
|
|
DummyKey.disabled = true;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int firstkeyIdx = li.getFirstKey();
|
|
|
|
|
string s;
|
|
|
|
|
if (firstkeyIdx >= 0) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo firstKey = li.getKeyInfo(firstkeyIdx);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
DummyKey._tokenstart = firstKey._tokenstart;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
int nextkeyIdx;
|
|
|
|
|
if ((firstKey.keytype != KeyInfo::isMain) || firstKey.disabled) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
// Use dummy firstKey
|
2018-10-22 18:19:36 +00:00
|
|
|
|
firstKey = DummyKey;
|
|
|
|
|
(void) li.setNextKey(firstkeyIdx);
|
|
|
|
|
}
|
2018-11-16 11:12:06 +00:00
|
|
|
|
else {
|
|
|
|
|
if (par.substr(firstKey._dataStart, 2) == "% ")
|
|
|
|
|
li.addIntervall(firstKey._dataStart, firstKey._dataStart+2);
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, firstKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
while (nextkeyIdx >= 0) {
|
|
|
|
|
// Check for a possible gap between the last
|
|
|
|
|
// entry and this one
|
|
|
|
|
int datastart = li.nextNotIgnored(firstKey._dataStart);
|
|
|
|
|
KeyInfo &nextKey = li.getKeyInfo(nextkeyIdx);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if ((nextKey._tokenstart > datastart)) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Handle the gap
|
|
|
|
|
firstKey._dataStart = datastart;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
firstKey._dataEnd = par.length();
|
2018-10-18 15:37:15 +00:00
|
|
|
|
(void) li.setNextKey(nextkeyIdx);
|
2019-02-21 13:45:41 +00:00
|
|
|
|
// Fake the last opened parenthesis
|
|
|
|
|
li.setForDefaultLang(firstKey);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, firstKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (nextKey.keytype != KeyInfo::isMain) {
|
|
|
|
|
firstKey._dataStart = datastart;
|
|
|
|
|
firstKey._dataEnd = nextKey._dataEnd+1;
|
|
|
|
|
(void) li.setNextKey(nextkeyIdx);
|
2019-02-21 13:45:41 +00:00
|
|
|
|
li.setForDefaultLang(firstKey);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, firstKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, nextKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Handle the remaining
|
|
|
|
|
firstKey._dataStart = li.nextNotIgnored(firstKey._dataStart);
|
|
|
|
|
firstKey._dataEnd = par.length();
|
2018-11-05 11:58:45 +00:00
|
|
|
|
// Check if ! empty
|
|
|
|
|
if ((firstKey._dataStart < firstKey._dataEnd) &&
|
|
|
|
|
(par[firstKey._dataStart] != '}')) {
|
2019-02-21 13:45:41 +00:00
|
|
|
|
li.setForDefaultLang(firstKey);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
(void) li.process(os, firstKey);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
s = os.str();
|
2018-11-08 08:59:51 +00:00
|
|
|
|
if (s.empty()) {
|
|
|
|
|
// return string definitelly impossible to match
|
|
|
|
|
s = "\\foreignlanguage{ignore}{ }";
|
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
else
|
2018-10-28 18:40:14 +00:00
|
|
|
|
s = par; /* no known macros found */
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "After split: " << s);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
return s;
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-06 21:50:50 +00:00
|
|
|
|
/*
|
|
|
|
|
* Try to unify the language specs in the latexified text.
|
|
|
|
|
* Resulting modified string is set to "", if
|
|
|
|
|
* the searched tex does not contain all the features in the search pattern
|
|
|
|
|
*/
|
2018-10-27 14:57:42 +00:00
|
|
|
|
static string correctlanguagesetting(string par, bool isPatternString, bool withformat)
|
2018-10-05 18:26:44 +00:00
|
|
|
|
{
|
|
|
|
|
static Features regex_f;
|
|
|
|
|
static int missed = 0;
|
|
|
|
|
static bool regex_with_format = false;
|
|
|
|
|
|
|
|
|
|
int parlen = par.length();
|
|
|
|
|
|
|
|
|
|
while ((parlen > 0) && (par[parlen-1] == '\n')) {
|
|
|
|
|
parlen--;
|
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if (isPatternString && (parlen > 0) && (par[parlen-1] == '~')) {
|
|
|
|
|
// Happens to be there in case of description or labeling environment
|
|
|
|
|
parlen--;
|
|
|
|
|
}
|
2018-10-15 06:09:19 +00:00
|
|
|
|
string result;
|
|
|
|
|
if (withformat) {
|
|
|
|
|
// Split the latex input into pieces which
|
|
|
|
|
// can be digested by our search engine
|
2018-10-19 17:11:20 +00:00
|
|
|
|
LYXERR(Debug::FIND, "input: \"" << par << "\"");
|
2019-02-17 23:40:55 +00:00
|
|
|
|
result = splitOnKnownMacros(par.substr(0,parlen), isPatternString);
|
2018-10-15 06:09:19 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After split: \"" << result << "\"");
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
result = par.substr(0, parlen);
|
2018-10-27 14:57:42 +00:00
|
|
|
|
if (isPatternString) {
|
2018-10-05 18:26:44 +00:00
|
|
|
|
missed = 0;
|
|
|
|
|
if (withformat) {
|
2018-10-12 14:47:07 +00:00
|
|
|
|
regex_f = identifyFeatures(result);
|
2019-03-04 13:05:44 +00:00
|
|
|
|
string features = "";
|
2018-10-05 18:26:44 +00:00
|
|
|
|
for (auto it = regex_f.cbegin(); it != regex_f.cend(); ++it) {
|
|
|
|
|
string a = it->first;
|
|
|
|
|
regex_with_format = true;
|
2019-03-04 13:05:44 +00:00
|
|
|
|
features += " " + a;
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "Identified regex format:" << a);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
2019-03-04 13:05:44 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Identified Features" << features);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
} else if (regex_with_format) {
|
2018-10-12 14:47:07 +00:00
|
|
|
|
Features info = identifyFeatures(result);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
for (auto it = regex_f.cbegin(); it != regex_f.cend(); ++it) {
|
|
|
|
|
string a = it->first;
|
|
|
|
|
bool b = it->second;
|
|
|
|
|
if (b && ! info[a]) {
|
|
|
|
|
missed++;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Missed(" << missed << " " << a <<", srclen = " << parlen );
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return "";
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "No regex formats");
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return result;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
2011-06-19 16:41:23 +00:00
|
|
|
|
// Remove trailing closure of math, macros and environments, so to catch parts of them.
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static int identifyClosing(string & t)
|
|
|
|
|
{
|
2011-06-19 16:41:23 +00:00
|
|
|
|
int open_braces = 0;
|
|
|
|
|
do {
|
|
|
|
|
LYXERR(Debug::FIND, "identifyClosing(): t now is '" << t << "'");
|
2017-04-16 21:10:17 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\$" REGEX_EOS, "$1"))
|
2011-06-19 16:41:23 +00:00
|
|
|
|
continue;
|
2020-05-29 12:22:34 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\\\\\]" REGEX_EOS, "$1"))
|
2011-06-19 16:41:23 +00:00
|
|
|
|
continue;
|
2020-05-29 18:04:57 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\\\end\\{[a-zA-Z_]*\\*?\\}" REGEX_EOS, "$1"))
|
2011-06-19 16:41:23 +00:00
|
|
|
|
continue;
|
2017-04-16 21:10:17 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\}" REGEX_EOS, "$1")) {
|
2011-06-19 16:41:23 +00:00
|
|
|
|
++open_braces;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
} while (true);
|
|
|
|
|
return open_braces;
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-13 13:06:18 +00:00
|
|
|
|
static int num_replaced = 0;
|
2019-03-16 07:17:09 +00:00
|
|
|
|
static bool previous_single_replace = true;
|
2011-06-19 16:41:23 +00:00
|
|
|
|
|
2009-12-30 18:40:18 +00:00
|
|
|
|
MatchStringAdv::MatchStringAdv(lyx::Buffer & buf, FindAndReplaceOptions const & opt)
|
|
|
|
|
: p_buf(&buf), p_first_buf(&buf), opt(opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
Buffer & find_buf = *theBufferList().getBuffer(FileName(to_utf8(opt.find_buf_name)), true);
|
2011-08-25 19:16:14 +00:00
|
|
|
|
docstring const & ds = stringifySearchBuffer(find_buf, opt);
|
|
|
|
|
use_regexp = lyx::to_utf8(ds).find("\\regexp{") != std::string::npos;
|
2019-03-16 07:17:09 +00:00
|
|
|
|
if (opt.replace_all && previous_single_replace) {
|
|
|
|
|
previous_single_replace = false;
|
|
|
|
|
num_replaced = 0;
|
|
|
|
|
}
|
|
|
|
|
else if (!opt.replace_all) {
|
2019-03-13 13:06:18 +00:00
|
|
|
|
num_replaced = 0; // count number of replaced strings
|
2019-03-16 07:17:09 +00:00
|
|
|
|
previous_single_replace = true;
|
|
|
|
|
}
|
2011-08-25 19:16:14 +00:00
|
|
|
|
// When using regexp, braces are hacked already by escape_for_regex()
|
|
|
|
|
par_as_string = normalize(ds, !use_regexp);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
open_braces = 0;
|
|
|
|
|
close_wildcards = 0;
|
|
|
|
|
|
2011-06-19 16:41:23 +00:00
|
|
|
|
size_t lead_size = 0;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
// correct the language settings
|
|
|
|
|
par_as_string = correctlanguagesetting(par_as_string, true, !opt.ignoreformat);
|
2013-04-01 08:16:45 +00:00
|
|
|
|
if (opt.ignoreformat) {
|
2015-05-17 15:27:12 +00:00
|
|
|
|
if (!use_regexp) {
|
|
|
|
|
// if par_as_string_nolead were emty,
|
|
|
|
|
// the following call to findAux will always *find* the string
|
|
|
|
|
// in the checked data, and thus always using the slow
|
|
|
|
|
// examining of the current text part.
|
|
|
|
|
par_as_string_nolead = par_as_string;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
2011-06-19 16:41:23 +00:00
|
|
|
|
lead_size = identifyLeading(par_as_string);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Lead_size: " << lead_size);
|
2011-06-19 16:41:23 +00:00
|
|
|
|
lead_as_string = par_as_string.substr(0, lead_size);
|
|
|
|
|
par_as_string_nolead = par_as_string.substr(lead_size, par_as_string.size() - lead_size);
|
|
|
|
|
}
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
|
|
|
|
if (!use_regexp) {
|
2011-06-19 16:41:23 +00:00
|
|
|
|
open_braces = identifyClosing(par_as_string);
|
|
|
|
|
identifyClosing(par_as_string_nolead);
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Open braces: " << open_braces);
|
|
|
|
|
LYXERR(Debug::FIND, "Built MatchStringAdv object: par_as_string = '" << par_as_string << "'");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} else {
|
2011-02-07 20:36:40 +00:00
|
|
|
|
string lead_as_regexp;
|
|
|
|
|
if (lead_size > 0) {
|
2011-06-19 16:41:23 +00:00
|
|
|
|
// @todo No need to search for \regexp{} insets in leading material
|
|
|
|
|
lead_as_regexp = escape_for_regex(par_as_string.substr(0, lead_size), !opt.ignoreformat);
|
|
|
|
|
par_as_string = par_as_string_nolead;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "lead_as_regexp is '" << lead_as_regexp << "'");
|
|
|
|
|
LYXERR(Debug::FIND, "par_as_string now is '" << par_as_string << "'");
|
|
|
|
|
}
|
2011-06-19 16:41:23 +00:00
|
|
|
|
par_as_string = escape_for_regex(par_as_string, !opt.ignoreformat);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// Insert (.*?) before trailing closure of math, macros and environments, so to catch parts of them.
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "par_as_string now is '" << par_as_string << "'");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
if (
|
|
|
|
|
// Insert .* before trailing '\$' ('$' has been escaped by escape_for_regex)
|
|
|
|
|
regex_replace(par_as_string, par_as_string, "(.*[^\\\\])(\\\\\\$)\\'", "$1(.*?)$2")
|
2012-10-23 20:58:10 +00:00
|
|
|
|
// Insert .* before trailing '\\\]' ('\]' has been escaped by escape_for_regex)
|
|
|
|
|
|| regex_replace(par_as_string, par_as_string, "(.*[^\\\\])( \\\\\\\\\\\\\\])\\'", "$1(.*?)$2")
|
|
|
|
|
// Insert .* before trailing '\\end\{...}' ('\end{...}' has been escaped by escape_for_regex)
|
|
|
|
|
|| regex_replace(par_as_string, par_as_string,
|
|
|
|
|
"(.*[^\\\\])( \\\\\\\\end\\\\\\{[a-zA-Z_]*)(\\\\\\*)?(\\\\\\})\\'", "$1(.*?)$2$3$4")
|
|
|
|
|
// Insert .* before trailing '\}' ('}' has been escaped by escape_for_regex)
|
|
|
|
|
|| regex_replace(par_as_string, par_as_string, "(.*[^\\\\])(\\\\\\})\\'", "$1(.*?)$2")
|
|
|
|
|
) {
|
2008-11-15 23:30:27 +00:00
|
|
|
|
++close_wildcards;
|
|
|
|
|
}
|
2018-09-29 07:23:30 +00:00
|
|
|
|
if (!opt.ignoreformat) {
|
2019-02-23 12:11:34 +00:00
|
|
|
|
// Remove extra '\}' at end if not part of \{\.\}
|
|
|
|
|
size_t lng = par_as_string.size();
|
|
|
|
|
while(lng > 2) {
|
|
|
|
|
if (par_as_string.substr(lng-2, 2).compare("\\}") == 0) {
|
|
|
|
|
if (lng >= 6) {
|
|
|
|
|
if (par_as_string.substr(lng-6,3).compare("\\{\\") == 0)
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
lng -= 2;
|
|
|
|
|
open_braces++;
|
|
|
|
|
}
|
2020-05-26 16:49:50 +00:00
|
|
|
|
else
|
2019-02-23 12:11:34 +00:00
|
|
|
|
break;
|
2020-05-26 16:49:50 +00:00
|
|
|
|
}
|
2019-02-23 12:11:34 +00:00
|
|
|
|
if (lng < par_as_string.size())
|
|
|
|
|
par_as_string = par_as_string.substr(0,lng);
|
2018-11-02 09:32:28 +00:00
|
|
|
|
/*
|
2018-09-29 07:23:30 +00:00
|
|
|
|
// save '\.'
|
|
|
|
|
regex_replace(par_as_string, par_as_string, "\\\\\\.", "_xxbdotxx_");
|
2018-09-29 16:46:21 +00:00
|
|
|
|
// handle '.' -> '[^]', replace later as '[^\}\{\\]'
|
2018-09-29 07:23:30 +00:00
|
|
|
|
regex_replace(par_as_string, par_as_string, "\\.", "[^]");
|
2018-09-29 16:46:21 +00:00
|
|
|
|
// replace '[^...]' with '[^...\}\{\\]'
|
|
|
|
|
regex_replace(par_as_string, par_as_string, "\\[\\^([^\\\\\\]]*)\\]", "_xxbrlxx_$1\\}\\{\\\\_xxbrrxx_");
|
2018-09-29 07:23:30 +00:00
|
|
|
|
regex_replace(par_as_string, par_as_string, "_xxbrlxx_", "[^");
|
|
|
|
|
regex_replace(par_as_string, par_as_string, "_xxbrrxx_", "]");
|
|
|
|
|
// restore '\.'
|
|
|
|
|
regex_replace(par_as_string, par_as_string, "_xxbdotxx_", "\\.");
|
2018-11-02 09:32:28 +00:00
|
|
|
|
*/
|
2018-09-29 07:23:30 +00:00
|
|
|
|
}
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "par_as_string now is '" << par_as_string << "'");
|
|
|
|
|
LYXERR(Debug::FIND, "Open braces: " << open_braces);
|
|
|
|
|
LYXERR(Debug::FIND, "Close .*? : " << close_wildcards);
|
|
|
|
|
LYXERR(Debug::FIND, "Replaced text (to be used as regex): " << par_as_string);
|
2017-04-16 21:10:17 +00:00
|
|
|
|
|
2009-08-17 07:06:01 +00:00
|
|
|
|
// If entered regexp must match at begin of searched string buffer
|
2018-09-30 14:08:47 +00:00
|
|
|
|
// Kornel: Added parentheses to use $1 for size of the leading string
|
2018-09-30 16:37:55 +00:00
|
|
|
|
string regexp_str;
|
|
|
|
|
string regexp2_str;
|
|
|
|
|
{
|
|
|
|
|
// TODO: Adapt '\[12345678]' in par_as_string to acount for the first '()
|
|
|
|
|
// Unfortunately is '\1', '\2', etc not working for strings with extra format
|
|
|
|
|
// so the convert has no effect in that case
|
|
|
|
|
for (int i = 8; i > 0; --i) {
|
|
|
|
|
string orig = "\\\\" + std::to_string(i);
|
|
|
|
|
string dest = "\\" + std::to_string(i+1);
|
|
|
|
|
while (regex_replace(par_as_string, par_as_string, orig, dest));
|
|
|
|
|
}
|
|
|
|
|
regexp_str = "(" + lead_as_regexp + ")" + par_as_string;
|
2019-02-10 17:00:55 +00:00
|
|
|
|
regexp2_str = "(" + lead_as_regexp + ").*?" + par_as_string;
|
2018-09-30 16:37:55 +00:00
|
|
|
|
}
|
2011-06-19 16:41:23 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Setting regexp to : '" << regexp_str << "'");
|
2011-02-07 20:36:40 +00:00
|
|
|
|
regexp = lyx::regex(regexp_str);
|
|
|
|
|
|
2011-06-19 16:41:23 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Setting regexp2 to: '" << regexp2_str << "'");
|
2011-02-07 20:36:40 +00:00
|
|
|
|
regexp2 = lyx::regex(regexp2_str);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-20 16:00:47 +00:00
|
|
|
|
|
2018-11-12 11:17:16 +00:00
|
|
|
|
// Count number of characters in string
|
|
|
|
|
// {]} ==> 1
|
|
|
|
|
// \& ==> 1
|
|
|
|
|
// --- ==> 1
|
|
|
|
|
// \\[a-zA-Z]+ ==> 1
|
|
|
|
|
static int computeSize(string s, int len)
|
|
|
|
|
{
|
|
|
|
|
if (len == 0)
|
|
|
|
|
return 0;
|
|
|
|
|
int skip = 1;
|
|
|
|
|
int count = 0;
|
|
|
|
|
for (int i = 0; i < len; i += skip, count++) {
|
|
|
|
|
if (s[i] == '\\') {
|
|
|
|
|
skip = 2;
|
|
|
|
|
if (isalpha(s[i+1])) {
|
|
|
|
|
for (int j = 2; i+j < len; j++) {
|
|
|
|
|
if (! isalpha(s[i+j])) {
|
|
|
|
|
if (s[i+j] == ' ')
|
|
|
|
|
skip++;
|
|
|
|
|
else if ((s[i+j] == '{') && s[i+j+1] == '}')
|
|
|
|
|
skip += 2;
|
2018-11-13 11:11:33 +00:00
|
|
|
|
else if ((s[i+j] == '{') && (i + j + 1 >= len))
|
|
|
|
|
skip++;
|
2018-11-12 11:17:16 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
skip++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (s[i] == '{') {
|
|
|
|
|
if (s[i+1] == '}')
|
|
|
|
|
skip = 2;
|
|
|
|
|
else
|
|
|
|
|
skip = 3;
|
|
|
|
|
}
|
|
|
|
|
else if (s[i] == '-') {
|
|
|
|
|
if (s[i+1] == '-') {
|
|
|
|
|
if (s[i+2] == '-')
|
|
|
|
|
skip = 3;
|
|
|
|
|
else
|
|
|
|
|
skip = 2;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
skip = 1;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
skip = 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return count;
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult MatchStringAdv::findAux(DocIterator const & cur, int len, bool at_begin) const
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult mres;
|
|
|
|
|
|
2013-08-23 19:36:50 +00:00
|
|
|
|
if (at_begin &&
|
|
|
|
|
(opt.restr == FindAndReplaceOptions::R_ONLY_MATHS && !cur.inMathed()) )
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2016-12-18 03:12:27 +00:00
|
|
|
|
|
2008-12-20 16:00:47 +00:00
|
|
|
|
docstring docstr = stringifyFromForSearch(opt, cur, len);
|
2011-08-25 19:16:14 +00:00
|
|
|
|
string str = normalize(docstr, true);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
if (!opt.ignoreformat) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
str = correctlanguagesetting(str, false, !opt.ignoreformat);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (str.empty()) {
|
|
|
|
|
mres.match_len = -1;
|
|
|
|
|
return mres;
|
|
|
|
|
}
|
2016-12-18 03:12:27 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Matching against '" << lyx::to_utf8(docstr) << "'");
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After normalization: '" << str << "'");
|
2016-12-18 03:12:27 +00:00
|
|
|
|
|
2016-12-18 03:13:37 +00:00
|
|
|
|
if (use_regexp) {
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Searching in regexp mode: at_begin=" << at_begin);
|
2017-04-16 21:10:17 +00:00
|
|
|
|
regex const *p_regexp;
|
|
|
|
|
regex_constants::match_flag_type flags;
|
|
|
|
|
if (at_begin) {
|
|
|
|
|
flags = regex_constants::match_continuous;
|
|
|
|
|
p_regexp = ®exp;
|
|
|
|
|
} else {
|
|
|
|
|
flags = regex_constants::match_default;
|
|
|
|
|
p_regexp = ®exp2;
|
|
|
|
|
}
|
|
|
|
|
sregex_iterator re_it(str.begin(), str.end(), *p_regexp, flags);
|
2017-04-16 17:43:54 +00:00
|
|
|
|
if (re_it == sregex_iterator())
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2016-12-18 03:09:03 +00:00
|
|
|
|
match_results<string::const_iterator> const & m = *re_it;
|
2016-12-18 03:12:27 +00:00
|
|
|
|
|
2018-09-29 07:23:30 +00:00
|
|
|
|
if (0) { // Kornel Benko: DO NOT CHECKK
|
|
|
|
|
// Check braces on the segment that matched the entire regexp expression,
|
|
|
|
|
// plus the last subexpression, if a (.*?) was inserted in the constructor.
|
|
|
|
|
if (!braces_match(m[0].first, m[0].second, open_braces))
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2018-09-29 07:23:30 +00:00
|
|
|
|
}
|
2017-07-03 17:53:14 +00:00
|
|
|
|
|
2016-12-18 03:09:03 +00:00
|
|
|
|
// Check braces on segments that matched all (.*?) subexpressions,
|
|
|
|
|
// except the last "padding" one inserted by lyx.
|
|
|
|
|
for (size_t i = 1; i < m.size() - 1; ++i)
|
2018-10-01 10:06:42 +00:00
|
|
|
|
if (!braces_match(m[i].first, m[i].second, open_braces))
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2017-07-03 17:53:14 +00:00
|
|
|
|
|
2016-12-18 03:09:03 +00:00
|
|
|
|
// Exclude from the returned match length any length
|
|
|
|
|
// due to close wildcards added at end of regexp
|
2019-03-13 13:06:18 +00:00
|
|
|
|
// and also the length of the leading (e.g. '\emph{}')
|
2018-09-30 14:08:47 +00:00
|
|
|
|
//
|
|
|
|
|
// Whole found string, including the leading: m[0].second - m[0].first
|
|
|
|
|
// Size of the leading string: m[1].second - m[1].first
|
|
|
|
|
int leadingsize = 0;
|
|
|
|
|
if (m.size() > 1)
|
|
|
|
|
leadingsize = m[1].second - m[1].first;
|
|
|
|
|
int result;
|
2019-12-29 16:40:13 +00:00
|
|
|
|
for (size_t i = 0; i < m.size(); i++) {
|
|
|
|
|
LYXERR(Debug::FIND, "Match " << i << " is " << m[i].second - m[i].first << " long");
|
|
|
|
|
}
|
2016-12-18 03:09:03 +00:00
|
|
|
|
if (close_wildcards == 0)
|
2018-09-30 14:08:47 +00:00
|
|
|
|
result = m[0].second - m[0].first;
|
2016-12-18 03:16:49 +00:00
|
|
|
|
|
2018-09-30 14:08:47 +00:00
|
|
|
|
else
|
|
|
|
|
result = m[m.size() - close_wildcards].first - m[0].first;
|
|
|
|
|
|
2018-11-25 17:25:14 +00:00
|
|
|
|
size_t pos = m.position(size_t(0));
|
2018-11-12 11:17:16 +00:00
|
|
|
|
// Ignore last closing characters
|
|
|
|
|
while (result > 0) {
|
|
|
|
|
if (str[pos+result-1] == '}')
|
|
|
|
|
--result;
|
|
|
|
|
else
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-09-30 14:08:47 +00:00
|
|
|
|
if (result > leadingsize)
|
|
|
|
|
result -= leadingsize;
|
|
|
|
|
else
|
|
|
|
|
result = 0;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
mres.match_len = computeSize(str.substr(pos+leadingsize,result), result);
|
|
|
|
|
mres.match2end = str.size() - pos - leadingsize;
|
|
|
|
|
mres.pos = pos+leadingsize;
|
|
|
|
|
return mres;
|
2016-12-18 03:16:49 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// else !use_regexp: but all code paths above return
|
|
|
|
|
LYXERR(Debug::FIND, "Searching in normal mode: par_as_string='"
|
|
|
|
|
<< par_as_string << "', str='" << str << "'");
|
|
|
|
|
LYXERR(Debug::FIND, "Searching in normal mode: lead_as_string='"
|
|
|
|
|
<< lead_as_string << "', par_as_string_nolead='"
|
|
|
|
|
<< par_as_string_nolead << "'");
|
|
|
|
|
|
|
|
|
|
if (at_begin) {
|
|
|
|
|
LYXERR(Debug::FIND, "size=" << par_as_string.size()
|
|
|
|
|
<< ", substr='" << str.substr(0, par_as_string.size()) << "'");
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (str.substr(0, par_as_string.size()) == par_as_string) {
|
|
|
|
|
mres.match_len = par_as_string.size();
|
|
|
|
|
mres.match2end = str.size();
|
|
|
|
|
mres.pos = 0;
|
|
|
|
|
return mres;
|
|
|
|
|
}
|
2016-12-18 03:13:37 +00:00
|
|
|
|
} else {
|
2019-03-16 10:26:20 +00:00
|
|
|
|
// Start the search _after_ the leading part
|
|
|
|
|
size_t pos = str.find(par_as_string_nolead, lead_as_string.size());
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (pos != string::npos) {
|
|
|
|
|
mres.match_len = par_as_string.size();
|
|
|
|
|
mres.match2end = str.size() - pos;
|
|
|
|
|
mres.pos = pos;
|
|
|
|
|
return mres;
|
2020-05-29 06:44:56 +00:00
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult MatchStringAdv::operator()(DocIterator const & cur, int len, bool at_begin) const
|
2010-01-09 12:39:29 +00:00
|
|
|
|
{
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult mres = findAux(cur, len, at_begin);
|
|
|
|
|
int res = mres.match_len;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
"res=" << res << ", at_begin=" << at_begin
|
|
|
|
|
<< ", matchword=" << opt.matchword
|
|
|
|
|
<< ", inTexted=" << cur.inTexted());
|
2010-01-09 12:39:29 +00:00
|
|
|
|
if (res == 0 || !at_begin || !opt.matchword || !cur.inTexted())
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2020-04-07 09:47:08 +00:00
|
|
|
|
if ((len > 0) && (res < len)) {
|
2019-02-26 22:00:31 +00:00
|
|
|
|
mres.match_len = 0;
|
2020-04-07 09:47:08 +00:00
|
|
|
|
return mres;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
}
|
2010-01-09 12:39:29 +00:00
|
|
|
|
Paragraph const & par = cur.paragraph();
|
2012-10-23 20:58:10 +00:00
|
|
|
|
bool ws_left = (cur.pos() > 0)
|
|
|
|
|
? par.isWordSeparator(cur.pos() - 1)
|
|
|
|
|
: true;
|
2020-05-29 12:22:34 +00:00
|
|
|
|
bool ws_right;
|
|
|
|
|
if (len < 0)
|
|
|
|
|
ws_right = true;
|
|
|
|
|
else {
|
|
|
|
|
ws_right = (cur.pos() + len < par.size())
|
2018-11-25 16:51:20 +00:00
|
|
|
|
? par.isWordSeparator(cur.pos() + len)
|
2012-10-23 20:58:10 +00:00
|
|
|
|
: true;
|
2020-05-29 12:22:34 +00:00
|
|
|
|
}
|
2010-01-09 12:39:29 +00:00
|
|
|
|
LYXERR(Debug::FIND,
|
|
|
|
|
"cur.pos()=" << cur.pos() << ", res=" << res
|
|
|
|
|
<< ", separ: " << ws_left << ", " << ws_right
|
2020-04-07 09:47:08 +00:00
|
|
|
|
<< ", len: " << len
|
2010-01-09 12:39:29 +00:00
|
|
|
|
<< endl);
|
2018-11-25 16:51:20 +00:00
|
|
|
|
if (ws_left && ws_right) {
|
2020-04-07 09:47:08 +00:00
|
|
|
|
// Check for word separators inside the found 'word'
|
|
|
|
|
for (int i = 0; i < len; i++) {
|
|
|
|
|
if (par.isWordSeparator(cur.pos() + i)) {
|
2019-02-26 22:00:31 +00:00
|
|
|
|
mres.match_len = 0;
|
2020-04-07 09:47:08 +00:00
|
|
|
|
return mres;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
}
|
2020-04-07 09:47:08 +00:00
|
|
|
|
}
|
|
|
|
|
return mres;
|
|
|
|
|
}
|
2019-02-26 22:00:31 +00:00
|
|
|
|
mres.match_len = 0;
|
|
|
|
|
return mres;
|
2010-01-09 12:39:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-08-25 19:16:14 +00:00
|
|
|
|
string MatchStringAdv::normalize(docstring const & s, bool hack_braces) const
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
|
|
|
|
string t;
|
|
|
|
|
if (! opt.casesensitive)
|
|
|
|
|
t = lyx::to_utf8(lowercase(s));
|
|
|
|
|
else
|
|
|
|
|
t = lyx::to_utf8(s);
|
|
|
|
|
// Remove \n at begin
|
2012-10-21 19:14:16 +00:00
|
|
|
|
while (!t.empty() && t[0] == '\n')
|
2008-11-15 23:30:27 +00:00
|
|
|
|
t = t.substr(1);
|
|
|
|
|
// Remove \n at end
|
2012-10-21 19:14:16 +00:00
|
|
|
|
while (!t.empty() && t[t.size() - 1] == '\n')
|
2008-11-15 23:30:27 +00:00
|
|
|
|
t = t.substr(0, t.size() - 1);
|
|
|
|
|
size_t pos;
|
2019-02-10 17:00:55 +00:00
|
|
|
|
// Handle all other '\n'
|
2019-02-07 12:35:47 +00:00
|
|
|
|
while ((pos = t.find("\n")) != string::npos) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
if (pos > 1 && t[pos-1] == '\\' && t[pos-2] == '\\' ) {
|
|
|
|
|
// Handle '\\\n'
|
2019-02-28 12:00:12 +00:00
|
|
|
|
if (isAlnumASCII(t[pos+1])) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
t.replace(pos-2, 3, " ");
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
t.replace(pos-2, 3, "");
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-02-28 12:00:12 +00:00
|
|
|
|
else if (!isAlnumASCII(t[pos+1]) || !isAlnumASCII(t[pos-1])) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
// '\n' adjacent to non-alpha-numerics, discard
|
2019-02-07 12:35:47 +00:00
|
|
|
|
t.replace(pos, 1, "");
|
2019-02-10 17:00:55 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// Replace all other \n with spaces
|
2019-02-07 12:35:47 +00:00
|
|
|
|
t.replace(pos, 1, " ");
|
2019-02-10 17:00:55 +00:00
|
|
|
|
}
|
2019-02-07 12:35:47 +00:00
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// Remove stale empty \emph{}, \textbf{} and similar blocks from latexify
|
2018-10-02 09:53:01 +00:00
|
|
|
|
// Kornel: Added textsl, textsf, textit, texttt and noun
|
|
|
|
|
// + allow to seach for colored text too
|
2009-08-22 16:06:19 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Removing stale empty \\emph{}, \\textbf{}, \\*section{} macros from: " << t);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
while (regex_replace(t, t, "\\\\(emph|noun|text(bf|sl|sf|it|tt)|(u|uu)line|(s|x)out|uwave)(\\{(\\{\\})?\\})+", ""))
|
|
|
|
|
LYXERR(Debug::FIND, " further removing stale empty \\emph{}, \\textbf{} macros from: " << t);
|
|
|
|
|
while (regex_replace(t, t, "\\\\((sub)?(((sub)?section)|paragraph)|part)\\*?(\\{(\\{\\})?\\})+", ""))
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, " further removing stale empty \\emph{}, \\textbf{} macros from: " << t);
|
2011-08-25 19:16:14 +00:00
|
|
|
|
|
2018-11-03 10:15:12 +00:00
|
|
|
|
while (regex_replace(t, t, "\\\\(foreignlanguage|textcolor|item)\\{[a-z]+\\}(\\{(\\{\\})?\\})+", ""));
|
2011-08-25 19:16:14 +00:00
|
|
|
|
// FIXME - check what preceeds the brace
|
|
|
|
|
if (hack_braces) {
|
|
|
|
|
if (opt.ignoreformat)
|
|
|
|
|
while (regex_replace(t, t, "\\{", "_x_<")
|
|
|
|
|
|| regex_replace(t, t, "\\}", "_x_>"))
|
|
|
|
|
LYXERR(Debug::FIND, "After {} replacement: '" << t << "'");
|
|
|
|
|
else
|
|
|
|
|
while (regex_replace(t, t, "\\\\\\{", "_x_<")
|
|
|
|
|
|| regex_replace(t, t, "\\\\\\}", "_x_>"))
|
|
|
|
|
LYXERR(Debug::FIND, "After {} replacement: '" << t << "'");
|
|
|
|
|
}
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return t;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
docstring stringifyFromCursor(DocIterator const & cur, int len)
|
|
|
|
|
{
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Stringifying with len=" << len << " from cursor at pos: " << cur);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
if (cur.inTexted()) {
|
2012-10-23 20:58:10 +00:00
|
|
|
|
Paragraph const & par = cur.paragraph();
|
|
|
|
|
// TODO what about searching beyond/across paragraph breaks ?
|
|
|
|
|
// TODO Try adding a AS_STR_INSERTS as last arg
|
|
|
|
|
pos_type end = ( len == -1 || cur.pos() + len > int(par.size()) ) ?
|
|
|
|
|
int(par.size()) : cur.pos() + len;
|
2019-03-03 13:08:27 +00:00
|
|
|
|
// OutputParams runparams(&cur.buffer()->params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2012-10-23 20:58:10 +00:00
|
|
|
|
runparams.nice = true;
|
2019-03-02 14:42:38 +00:00
|
|
|
|
runparams.flavor = OutputParams::XETEX;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
runparams.linelen = 10000; //lyxrc.plaintext_linelen;
|
2012-10-23 20:58:10 +00:00
|
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
|
runparams.dryrun = true;
|
2019-02-27 09:17:56 +00:00
|
|
|
|
runparams.for_search = true;
|
2012-10-23 20:58:10 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Stringifying with cur: "
|
|
|
|
|
<< cur << ", from pos: " << cur.pos() << ", end: " << end);
|
2019-02-27 09:17:56 +00:00
|
|
|
|
return par.asString(cur.pos(), end,
|
2014-03-28 22:56:20 +00:00
|
|
|
|
AS_STR_INSETS | AS_STR_SKIPDELETE | AS_STR_PLAINTEXT,
|
2014-03-27 23:12:56 +00:00
|
|
|
|
&runparams);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} else if (cur.inMathed()) {
|
2012-10-23 20:58:10 +00:00
|
|
|
|
CursorSlice cs = cur.top();
|
|
|
|
|
MathData md = cs.cell();
|
|
|
|
|
MathData::const_iterator it_end =
|
|
|
|
|
(( len == -1 || cs.pos() + len > int(md.size()))
|
|
|
|
|
? md.end()
|
|
|
|
|
: md.begin() + cs.pos() + len );
|
2018-11-02 09:32:28 +00:00
|
|
|
|
MathData md2;
|
2012-10-23 20:58:10 +00:00
|
|
|
|
for (MathData::const_iterator it = md.begin() + cs.pos();
|
|
|
|
|
it != it_end; ++it)
|
2018-11-02 09:32:28 +00:00
|
|
|
|
md2.push_back(*it);
|
|
|
|
|
docstring s = asString(md2);
|
2012-10-23 20:58:10 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Stringified math: '" << s << "'");
|
|
|
|
|
return s;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Don't know how to stringify from here: " << cur);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return docstring();
|
|
|
|
|
}
|
|
|
|
|
|
2009-08-19 15:14:28 +00:00
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
|
/** Computes the LaTeX export of buf starting from cur and ending len positions
|
|
|
|
|
* after cur, if len is positive, or at the paragraph or innermost inset end
|
|
|
|
|
* if len is -1.
|
|
|
|
|
*/
|
2008-12-20 16:00:47 +00:00
|
|
|
|
docstring latexifyFromCursor(DocIterator const & cur, int len)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Latexifying with len=" << len << " from cursor at pos: " << cur);
|
|
|
|
|
LYXERR(Debug::FIND, " with cur.lastpost=" << cur.lastpos() << ", cur.lastrow="
|
2012-10-23 20:58:10 +00:00
|
|
|
|
<< cur.lastrow() << ", cur.lastcol=" << cur.lastcol());
|
2008-12-20 16:00:47 +00:00
|
|
|
|
Buffer const & buf = *cur.buffer();
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
odocstringstream ods;
|
2016-09-04 02:02:47 +00:00
|
|
|
|
otexstream os(ods);
|
2019-03-03 13:08:27 +00:00
|
|
|
|
//OutputParams runparams(&buf.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
runparams.nice = false;
|
2019-03-02 14:42:38 +00:00
|
|
|
|
runparams.flavor = OutputParams::XETEX;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
runparams.linelen = 8000; //lyxrc.plaintext_linelen;
|
|
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
|
runparams.dryrun = true;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
runparams.for_search = true;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
if (cur.inTexted()) {
|
2010-12-18 15:57:27 +00:00
|
|
|
|
// @TODO what about searching beyond/across paragraph breaks ?
|
|
|
|
|
pos_type endpos = cur.paragraph().size();
|
|
|
|
|
if (len != -1 && endpos > cur.pos() + len)
|
|
|
|
|
endpos = cur.pos() + len;
|
2011-02-10 20:02:48 +00:00
|
|
|
|
TeXOnePar(buf, *cur.innerText(), cur.pit(), os, runparams,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
string(), cur.pos(), endpos);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
string s = lyx::to_utf8(ods.str());
|
2018-10-12 14:47:07 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Latexified +modified text: '" << s << "'");
|
2018-10-05 18:26:44 +00:00
|
|
|
|
return(lyx::from_utf8(s));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} else if (cur.inMathed()) {
|
|
|
|
|
// Retrieve the math environment type, and add '$' or '$[' or others (\begin{equation}) accordingly
|
|
|
|
|
for (int s = cur.depth() - 1; s >= 0; --s) {
|
2012-10-23 20:58:10 +00:00
|
|
|
|
CursorSlice const & cs = cur[s];
|
2015-10-10 19:23:52 +00:00
|
|
|
|
if (cs.asInsetMath() && cs.asInsetMath()->asHullInset()) {
|
2015-10-07 03:13:21 +00:00
|
|
|
|
WriteStream ws(os);
|
2012-10-23 20:58:10 +00:00
|
|
|
|
cs.asInsetMath()->asHullInset()->header_write(ws);
|
|
|
|
|
break;
|
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CursorSlice const & cs = cur.top();
|
|
|
|
|
MathData md = cs.cell();
|
2012-10-23 20:58:10 +00:00
|
|
|
|
MathData::const_iterator it_end =
|
|
|
|
|
((len == -1 || cs.pos() + len > int(md.size()))
|
|
|
|
|
? md.end()
|
|
|
|
|
: md.begin() + cs.pos() + len);
|
2018-11-02 09:32:28 +00:00
|
|
|
|
MathData md2;
|
2012-10-23 20:58:10 +00:00
|
|
|
|
for (MathData::const_iterator it = md.begin() + cs.pos();
|
|
|
|
|
it != it_end; ++it)
|
2018-11-02 09:32:28 +00:00
|
|
|
|
md2.push_back(*it);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2018-11-02 09:32:28 +00:00
|
|
|
|
ods << asString(md2);
|
2008-11-17 11:46:07 +00:00
|
|
|
|
// Retrieve the math environment type, and add '$' or '$]'
|
|
|
|
|
// or others (\end{equation}) accordingly
|
2008-11-15 23:30:27 +00:00
|
|
|
|
for (int s = cur.depth() - 1; s >= 0; --s) {
|
2018-02-24 05:32:14 +00:00
|
|
|
|
CursorSlice const & cs2 = cur[s];
|
|
|
|
|
InsetMath * inset = cs2.asInsetMath();
|
2008-11-17 11:46:07 +00:00
|
|
|
|
if (inset && inset->asHullInset()) {
|
2015-10-07 03:13:21 +00:00
|
|
|
|
WriteStream ws(os);
|
2008-11-17 11:46:07 +00:00
|
|
|
|
inset->asHullInset()->footer_write(ws);
|
|
|
|
|
break;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Latexified math: '" << lyx::to_utf8(ods.str()) << "'");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} else {
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Don't know how to stringify from here: " << cur);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
return ods.str();
|
|
|
|
|
}
|
|
|
|
|
|
2009-12-30 18:40:18 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/** Finalize an advanced find operation, advancing the cursor to the innermost
|
|
|
|
|
** position that matches, plus computing the length of the matching text to
|
|
|
|
|
** be selected
|
|
|
|
|
**/
|
|
|
|
|
int findAdvFinalize(DocIterator & cur, MatchStringAdv const & match)
|
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
// Search the foremost position that matches (avoids find of entire math
|
|
|
|
|
// inset when match at start of it)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
size_t d;
|
2008-11-17 11:46:07 +00:00
|
|
|
|
DocIterator old_cur(cur.buffer());
|
2008-11-15 23:30:27 +00:00
|
|
|
|
do {
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Forwarding one step (searching for innermost match)");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
d = cur.depth();
|
|
|
|
|
old_cur = cur;
|
|
|
|
|
cur.forwardPos();
|
2019-02-26 22:00:31 +00:00
|
|
|
|
} while (cur && cur.depth() > d && match(cur).match_len > 0);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
cur = old_cur;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int max_match = match(cur).match_len; /* match valid only if not searching whole words */
|
2018-11-25 16:51:20 +00:00
|
|
|
|
if (max_match <= 0) return 0;
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Ok");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
// Compute the match length
|
2018-11-25 16:51:20 +00:00
|
|
|
|
int len = 1;
|
2018-11-26 11:37:18 +00:00
|
|
|
|
if (cur.pos() + len > cur.lastpos())
|
|
|
|
|
return 0;
|
2018-11-25 16:51:20 +00:00
|
|
|
|
if (match.opt.matchword) {
|
|
|
|
|
LYXERR(Debug::FIND, "verifying unmatch with len = " << len);
|
2019-02-26 22:00:31 +00:00
|
|
|
|
while (cur.pos() + len <= cur.lastpos() && match(cur, len).match_len <= 0) {
|
2018-11-25 16:51:20 +00:00
|
|
|
|
++len;
|
|
|
|
|
LYXERR(Debug::FIND, "verifying unmatch with len = " << len);
|
|
|
|
|
}
|
|
|
|
|
// Length of matched text (different from len param)
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int old_match = match(cur, len).match_len;
|
2018-11-25 16:51:20 +00:00
|
|
|
|
if (old_match < 0)
|
|
|
|
|
old_match = 0;
|
|
|
|
|
int new_match;
|
|
|
|
|
// Greedy behaviour while matching regexps
|
2019-02-26 22:00:31 +00:00
|
|
|
|
while ((new_match = match(cur, len + 1).match_len) > old_match) {
|
2018-11-25 16:51:20 +00:00
|
|
|
|
++len;
|
|
|
|
|
old_match = new_match;
|
|
|
|
|
LYXERR(Debug::FIND, "verifying match with len = " << len);
|
|
|
|
|
}
|
|
|
|
|
if (old_match == 0)
|
|
|
|
|
len = 0;
|
|
|
|
|
}
|
|
|
|
|
else {
|
2018-11-27 18:10:27 +00:00
|
|
|
|
int minl = 1;
|
|
|
|
|
int maxl = cur.lastpos() - cur.pos();
|
|
|
|
|
// Greedy behaviour while matching regexps
|
|
|
|
|
while (maxl > minl) {
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int actual_match = match(cur, len).match_len;
|
2018-11-27 18:10:27 +00:00
|
|
|
|
if (actual_match >= max_match) {
|
|
|
|
|
// actual_match > max_match _can_ happen,
|
|
|
|
|
// if the search area splits
|
|
|
|
|
// some following word so that the regex
|
|
|
|
|
// (e.g. 'r.*r\b' matches 'r' from the middle of the
|
|
|
|
|
// splitted word)
|
|
|
|
|
// This means, the len value is too big
|
|
|
|
|
maxl = len;
|
|
|
|
|
len = (int)((maxl + minl)/2);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// (actual_match < max_match)
|
|
|
|
|
minl = len + 1;
|
|
|
|
|
len = (int)((maxl + minl)/2);
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-02-11 12:06:02 +00:00
|
|
|
|
old_cur = cur;
|
|
|
|
|
// Search for real start of matched characters
|
|
|
|
|
while (len > 1) {
|
|
|
|
|
int actual_match;
|
|
|
|
|
do {
|
|
|
|
|
cur.forwardPos();
|
|
|
|
|
} while (cur.depth() > old_cur.depth()); /* Skip inner insets */
|
|
|
|
|
if (cur.depth() < old_cur.depth()) {
|
|
|
|
|
// Outer inset?
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "cur.depth() < old_cur.depth(), this should never happen");
|
2019-02-11 12:06:02 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (cur.pos() != old_cur.pos()) {
|
|
|
|
|
// OK, forwarded 1 pos in actual inset
|
2019-02-26 22:00:31 +00:00
|
|
|
|
actual_match = match(cur, len-1).match_len;
|
2019-02-11 12:06:02 +00:00
|
|
|
|
if (actual_match == max_match) {
|
|
|
|
|
// Ha, got it! The shorter selection has the same match length
|
|
|
|
|
len--;
|
|
|
|
|
old_cur = cur;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// OK, the shorter selection matches less chars, revert to previous value
|
|
|
|
|
cur = old_cur;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "cur.pos() == old_cur.pos(), this should never happen");
|
2019-02-26 22:00:31 +00:00
|
|
|
|
actual_match = match(cur, len).match_len;
|
2019-02-11 12:06:02 +00:00
|
|
|
|
if (actual_match == max_match)
|
|
|
|
|
old_cur = cur;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-27 18:10:27 +00:00
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return len;
|
|
|
|
|
}
|
|
|
|
|
|
2008-12-20 16:00:47 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/// Finds forward
|
2020-10-09 15:50:24 +00:00
|
|
|
|
int findForwardAdv(DocIterator & cur, MatchStringAdv const & match)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
if (!cur)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return 0;
|
2012-03-06 23:21:12 +00:00
|
|
|
|
while (!theApp()->longOperationCancelled() && cur) {
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "findForwardAdv() cur: " << cur);
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult mres = match(cur, -1, false);
|
|
|
|
|
int match_len = mres.match_len;
|
2011-05-27 20:39:58 +00:00
|
|
|
|
LYXERR(Debug::FIND, "match_len: " << match_len);
|
2019-02-27 09:17:56 +00:00
|
|
|
|
if ((mres.pos > 100000) || (mres.match2end > 100000) || (match_len > 100000)) {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "BIG LENGTHS: " << mres.pos << ", " << match_len << ", " << mres.match2end);
|
2019-02-26 22:00:31 +00:00
|
|
|
|
match_len = 0;
|
|
|
|
|
}
|
2018-10-05 18:26:44 +00:00
|
|
|
|
if (match_len > 0) {
|
2019-02-26 22:00:31 +00:00
|
|
|
|
// Try to find the begin of searched string
|
|
|
|
|
int increment = mres.pos/2;
|
|
|
|
|
while (mres.pos > 5 && (increment > 5)) {
|
|
|
|
|
DocIterator old_cur = cur;
|
|
|
|
|
for (int i = 0; i < increment && cur; cur.forwardPos(), i++) {
|
|
|
|
|
}
|
2019-03-03 13:08:27 +00:00
|
|
|
|
if (! cur || (cur.pit() > old_cur.pit())) {
|
|
|
|
|
// Are we outside of the paragraph?
|
|
|
|
|
// This can happen if moving past some UTF8-encoded chars
|
2019-02-26 22:00:31 +00:00
|
|
|
|
cur = old_cur;
|
|
|
|
|
increment /= 2;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
MatchResult mres2 = match(cur, -1, false);
|
|
|
|
|
if ((mres2.match2end < mres.match2end) ||
|
|
|
|
|
(mres2.match_len < mres.match_len)) {
|
|
|
|
|
cur = old_cur;
|
|
|
|
|
increment /= 2;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
mres = mres2;
|
|
|
|
|
increment -= 2;
|
|
|
|
|
if (increment > mres.pos/2)
|
|
|
|
|
increment = mres.pos/2;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-05 18:26:44 +00:00
|
|
|
|
int match_len_zero_count = 0;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
for (int i = 0; !theApp()->longOperationCancelled() && cur; cur.forwardPos()) {
|
|
|
|
|
if (i++ > 10) {
|
|
|
|
|
int remaining_len = match(cur, -1, false).match_len;
|
|
|
|
|
if (remaining_len <= 0) {
|
|
|
|
|
// Apparently the searched string is not in the remaining part
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
i = 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Advancing cur: " << cur);
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int match_len3 = match(cur, 1).match_len;
|
2018-11-13 11:11:33 +00:00
|
|
|
|
if (match_len3 < 0)
|
|
|
|
|
continue;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int match_len2 = match(cur).match_len;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
LYXERR(Debug::FIND, "match_len2: " << match_len2);
|
|
|
|
|
if (match_len2 > 0) {
|
2011-02-07 20:36:40 +00:00
|
|
|
|
// Sometimes in finalize we understand it wasn't a match
|
|
|
|
|
// and we need to continue the outest loop
|
|
|
|
|
int len = findAdvFinalize(cur, match);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
if (len > 0) {
|
2011-02-07 20:36:40 +00:00
|
|
|
|
return len;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (match_len2 >= 0) {
|
2019-02-26 12:24:36 +00:00
|
|
|
|
if (match_len2 == 0)
|
|
|
|
|
match_len_zero_count++;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
else
|
|
|
|
|
match_len_zero_count = 0;
|
|
|
|
|
}
|
|
|
|
|
else {
|
2020-04-07 09:47:08 +00:00
|
|
|
|
if (++match_len_zero_count > 3) {
|
|
|
|
|
LYXERR(Debug::FIND, "match_len2_zero_count: " << match_len_zero_count << ", match_len was " << match_len);
|
|
|
|
|
}
|
2018-10-06 07:58:29 +00:00
|
|
|
|
break;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!cur)
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2018-10-05 18:26:44 +00:00
|
|
|
|
if (match_len >= 0 && cur.pit() < cur.lastpit()) {
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Advancing par: cur=" << cur);
|
2010-01-04 12:29:38 +00:00
|
|
|
|
cur.forwardPar();
|
2011-02-07 20:36:40 +00:00
|
|
|
|
} else {
|
|
|
|
|
// This should exit nested insets, if any, or otherwise undefine the currsor.
|
|
|
|
|
cur.pos() = cur.lastpos();
|
|
|
|
|
LYXERR(Debug::FIND, "Advancing pos: cur=" << cur);
|
2010-01-04 12:29:38 +00:00
|
|
|
|
cur.forwardPos();
|
2008-12-20 16:00:47 +00:00
|
|
|
|
}
|
2010-01-04 12:29:38 +00:00
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2009-12-30 21:50:55 +00:00
|
|
|
|
|
2009-08-17 08:08:21 +00:00
|
|
|
|
/// Find the most backward consecutive match within same paragraph while searching backwards.
|
2010-05-02 22:33:36 +00:00
|
|
|
|
int findMostBackwards(DocIterator & cur, MatchStringAdv const & match)
|
2010-01-04 12:29:38 +00:00
|
|
|
|
{
|
2009-08-17 08:08:21 +00:00
|
|
|
|
DocIterator cur_begin = doc_iterator_begin(cur.buffer());
|
2010-03-20 13:59:46 +00:00
|
|
|
|
DocIterator tmp_cur = cur;
|
2010-05-02 22:33:36 +00:00
|
|
|
|
int len = findAdvFinalize(tmp_cur, match);
|
2010-03-20 13:59:46 +00:00
|
|
|
|
Inset & inset = cur.inset();
|
|
|
|
|
for (; cur != cur_begin; cur.backwardPos()) {
|
|
|
|
|
LYXERR(Debug::FIND, "findMostBackwards(): cur=" << cur);
|
|
|
|
|
DocIterator new_cur = cur;
|
|
|
|
|
new_cur.backwardPos();
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (new_cur == cur || &new_cur.inset() != &inset || !match(new_cur).match_len)
|
2010-03-20 13:59:46 +00:00
|
|
|
|
break;
|
|
|
|
|
int new_len = findAdvFinalize(new_cur, match);
|
|
|
|
|
if (new_len == len)
|
|
|
|
|
break;
|
|
|
|
|
len = new_len;
|
2009-08-17 08:08:21 +00:00
|
|
|
|
}
|
2010-03-20 13:59:46 +00:00
|
|
|
|
LYXERR(Debug::FIND, "findMostBackwards(): exiting with cur=" << cur);
|
2010-05-02 22:33:36 +00:00
|
|
|
|
return len;
|
2009-08-17 08:08:21 +00:00
|
|
|
|
}
|
2008-12-20 16:00:47 +00:00
|
|
|
|
|
2010-01-04 12:29:38 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/// Finds backwards
|
2012-10-27 13:45:27 +00:00
|
|
|
|
int findBackwardsAdv(DocIterator & cur, MatchStringAdv & match)
|
|
|
|
|
{
|
2009-08-17 08:08:21 +00:00
|
|
|
|
if (! cur)
|
|
|
|
|
return 0;
|
2010-01-04 12:29:38 +00:00
|
|
|
|
// Backup of original position
|
2009-08-17 08:08:21 +00:00
|
|
|
|
DocIterator cur_begin = doc_iterator_begin(cur.buffer());
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (cur == cur_begin)
|
|
|
|
|
return 0;
|
2010-03-20 13:59:46 +00:00
|
|
|
|
cur.backwardPos();
|
|
|
|
|
DocIterator cur_orig(cur);
|
2010-01-04 12:29:38 +00:00
|
|
|
|
bool pit_changed = false;
|
2008-12-20 16:00:47 +00:00
|
|
|
|
do {
|
2009-08-17 08:08:21 +00:00
|
|
|
|
cur.pos() = 0;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
bool found_match = (match(cur, -1, false).match_len > 0);
|
2010-01-04 12:29:38 +00:00
|
|
|
|
|
2009-08-17 08:08:21 +00:00
|
|
|
|
if (found_match) {
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (pit_changed)
|
|
|
|
|
cur.pos() = cur.lastpos();
|
|
|
|
|
else
|
|
|
|
|
cur.pos() = cur_orig.pos();
|
|
|
|
|
LYXERR(Debug::FIND, "findBackAdv2: cur: " << cur);
|
|
|
|
|
DocIterator cur_prev_iter;
|
2010-03-20 13:59:46 +00:00
|
|
|
|
do {
|
2019-02-26 22:00:31 +00:00
|
|
|
|
found_match = (match(cur).match_len > 0);
|
2015-05-17 15:27:12 +00:00
|
|
|
|
LYXERR(Debug::FIND, "findBackAdv3: found_match="
|
2010-01-04 12:29:38 +00:00
|
|
|
|
<< found_match << ", cur: " << cur);
|
2010-05-02 22:33:36 +00:00
|
|
|
|
if (found_match)
|
|
|
|
|
return findMostBackwards(cur, match);
|
|
|
|
|
|
2010-03-20 13:59:46 +00:00
|
|
|
|
// Stop if begin of document reached
|
|
|
|
|
if (cur == cur_begin)
|
2009-08-17 08:08:21 +00:00
|
|
|
|
break;
|
2009-12-26 22:10:14 +00:00
|
|
|
|
cur_prev_iter = cur;
|
2009-08-17 08:08:21 +00:00
|
|
|
|
cur.backwardPos();
|
2010-03-20 13:59:46 +00:00
|
|
|
|
} while (true);
|
2008-12-20 16:00:47 +00:00
|
|
|
|
}
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (cur == cur_begin)
|
2009-12-30 22:21:23 +00:00
|
|
|
|
break;
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (cur.pit() > 0)
|
|
|
|
|
--cur.pit();
|
|
|
|
|
else
|
|
|
|
|
cur.backwardPos();
|
|
|
|
|
pit_changed = true;
|
2012-03-06 23:21:12 +00:00
|
|
|
|
} while (!theApp()->longOperationCancelled());
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2009-12-30 18:40:18 +00:00
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
|
2009-01-14 15:34:56 +00:00
|
|
|
|
docstring stringifyFromForSearch(FindAndReplaceOptions const & opt,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
DocIterator const & cur, int len)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2018-10-05 18:26:44 +00:00
|
|
|
|
if (cur.pos() < 0 || cur.pos() > cur.lastpos())
|
|
|
|
|
return docstring();
|
2008-11-17 11:46:07 +00:00
|
|
|
|
if (!opt.ignoreformat)
|
2008-12-20 16:00:47 +00:00
|
|
|
|
return latexifyFromCursor(cur, len);
|
2019-02-27 09:33:25 +00:00
|
|
|
|
else
|
|
|
|
|
return stringifyFromCursor(cur, len);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2012-10-23 20:58:10 +00:00
|
|
|
|
FindAndReplaceOptions::FindAndReplaceOptions(
|
2019-10-26 22:06:54 +00:00
|
|
|
|
docstring const & _find_buf_name, bool _casesensitive,
|
|
|
|
|
bool _matchword, bool _forward, bool _expandmacros, bool _ignoreformat,
|
|
|
|
|
docstring const & _repl_buf_name, bool _keep_case,
|
|
|
|
|
SearchScope _scope, SearchRestriction _restr, bool _replace_all)
|
|
|
|
|
: find_buf_name(_find_buf_name), casesensitive(_casesensitive), matchword(_matchword),
|
|
|
|
|
forward(_forward), expandmacros(_expandmacros), ignoreformat(_ignoreformat),
|
|
|
|
|
repl_buf_name(_repl_buf_name), keep_case(_keep_case), scope(_scope), restr(_restr), replace_all(_replace_all)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
2009-08-19 22:55:38 +00:00
|
|
|
|
|
2010-10-13 18:30:37 +00:00
|
|
|
|
namespace {
|
2009-08-19 22:55:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** Check if 'len' letters following cursor are all non-lowercase */
|
2013-07-22 09:39:11 +00:00
|
|
|
|
static bool allNonLowercase(Cursor const & cur, int len)
|
2012-10-27 13:45:27 +00:00
|
|
|
|
{
|
2013-07-22 09:39:11 +00:00
|
|
|
|
pos_type beg_pos = cur.selectionBegin().pos();
|
|
|
|
|
pos_type end_pos = cur.selectionBegin().pos() + len;
|
|
|
|
|
if (len > cur.lastpos() + 1 - beg_pos) {
|
|
|
|
|
LYXERR(Debug::FIND, "This should not happen, more debug needed");
|
|
|
|
|
len = cur.lastpos() + 1 - beg_pos;
|
2015-10-09 06:14:18 +00:00
|
|
|
|
end_pos = beg_pos + len;
|
2013-07-22 09:39:11 +00:00
|
|
|
|
}
|
|
|
|
|
for (pos_type pos = beg_pos; pos != end_pos; ++pos)
|
2009-08-19 22:55:38 +00:00
|
|
|
|
if (isLowerCase(cur.paragraph().getChar(pos)))
|
|
|
|
|
return false;
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** Check if first letter is upper case and second one is lower case */
|
2013-07-22 09:39:11 +00:00
|
|
|
|
static bool firstUppercase(Cursor const & cur)
|
2012-10-27 13:45:27 +00:00
|
|
|
|
{
|
2009-08-19 22:55:38 +00:00
|
|
|
|
char_type ch1, ch2;
|
2013-07-22 09:39:11 +00:00
|
|
|
|
pos_type pos = cur.selectionBegin().pos();
|
|
|
|
|
if (pos >= cur.lastpos() - 1) {
|
2009-08-19 22:55:38 +00:00
|
|
|
|
LYXERR(Debug::FIND, "No upper-case at cur: " << cur);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2013-07-22 09:39:11 +00:00
|
|
|
|
ch1 = cur.paragraph().getChar(pos);
|
|
|
|
|
ch2 = cur.paragraph().getChar(pos + 1);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
bool result = isUpperCase(ch1) && isLowerCase(ch2);
|
|
|
|
|
LYXERR(Debug::FIND, "firstUppercase(): "
|
2015-05-17 15:27:12 +00:00
|
|
|
|
<< "ch1=" << ch1 << "(" << char(ch1) << "), ch2="
|
2009-09-04 13:06:43 +00:00
|
|
|
|
<< ch2 << "(" << char(ch2) << ")"
|
2009-08-19 22:55:38 +00:00
|
|
|
|
<< ", result=" << result << ", cur=" << cur);
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** Make first letter of supplied buffer upper-case, and the rest lower-case.
|
|
|
|
|
**
|
|
|
|
|
** \fixme What to do with possible further paragraphs in replace buffer ?
|
|
|
|
|
**/
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static void changeFirstCase(Buffer & buffer, TextCase first_case, TextCase others_case)
|
|
|
|
|
{
|
2009-08-19 22:55:38 +00:00
|
|
|
|
ParagraphList::iterator pit = buffer.paragraphs().begin();
|
2020-10-05 10:38:09 +00:00
|
|
|
|
LASSERT(!pit->empty(), /**/);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
pos_type right = pos_type(1);
|
|
|
|
|
pit->changeCase(buffer.params(), pos_type(0), right, first_case);
|
2013-07-22 09:39:11 +00:00
|
|
|
|
right = pit->size();
|
|
|
|
|
pit->changeCase(buffer.params(), pos_type(1), right, others_case);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
}
|
2012-10-27 13:45:27 +00:00
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2009-08-19 22:55:38 +00:00
|
|
|
|
|
2010-01-30 10:11:24 +00:00
|
|
|
|
///
|
2019-03-13 13:06:18 +00:00
|
|
|
|
static int findAdvReplace(BufferView * bv, FindAndReplaceOptions const & opt, MatchStringAdv & matchAdv)
|
2010-01-30 10:11:24 +00:00
|
|
|
|
{
|
|
|
|
|
Cursor & cur = bv->cursor();
|
2020-10-05 10:38:09 +00:00
|
|
|
|
if (opt.repl_buf_name.empty()
|
2017-05-03 15:32:31 +00:00
|
|
|
|
|| theBufferList().getBuffer(FileName(to_utf8(opt.repl_buf_name)), true) == 0
|
|
|
|
|
|| theBufferList().getBuffer(FileName(to_utf8(opt.find_buf_name)), true) == 0)
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
2010-01-30 10:11:24 +00:00
|
|
|
|
DocIterator sel_beg = cur.selectionBegin();
|
|
|
|
|
DocIterator sel_end = cur.selectionEnd();
|
2010-02-22 21:44:59 +00:00
|
|
|
|
if (&sel_beg.inset() != &sel_end.inset()
|
2014-03-31 16:33:53 +00:00
|
|
|
|
|| sel_beg.pit() != sel_end.pit()
|
|
|
|
|
|| sel_beg.idx() != sel_end.idx())
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
int sel_len = sel_end.pos() - sel_beg.pos();
|
2010-02-22 21:44:59 +00:00
|
|
|
|
LYXERR(Debug::FIND, "sel_beg: " << sel_beg << ", sel_end: " << sel_end
|
|
|
|
|
<< ", sel_len: " << sel_len << endl);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
if (sel_len == 0)
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
|
|
|
|
LASSERT(sel_len > 0, return 0);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (!matchAdv(sel_beg, sel_len).match_len)
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
// Build a copy of the replace buffer, adapted to the KeepCase option
|
2020-10-09 15:50:24 +00:00
|
|
|
|
Buffer const & repl_buffer_orig = *theBufferList().getBuffer(FileName(to_utf8(opt.repl_buf_name)), true);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
ostringstream oss;
|
|
|
|
|
repl_buffer_orig.write(oss);
|
|
|
|
|
string lyx = oss.str();
|
2010-01-30 10:11:24 +00:00
|
|
|
|
Buffer repl_buffer("", false);
|
|
|
|
|
repl_buffer.setUnnamed(true);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
LASSERT(repl_buffer.readString(lyx), return 0);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
if (opt.keep_case && sel_len >= 2) {
|
2013-07-22 09:39:11 +00:00
|
|
|
|
LYXERR(Debug::FIND, "keep_case true: cur.pos()=" << cur.pos() << ", sel_len=" << sel_len);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
if (cur.inTexted()) {
|
|
|
|
|
if (firstUppercase(cur))
|
|
|
|
|
changeFirstCase(repl_buffer, text_uppercase, text_lowercase);
|
|
|
|
|
else if (allNonLowercase(cur, sel_len))
|
|
|
|
|
changeFirstCase(repl_buffer, text_uppercase, text_uppercase);
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-07-22 20:18:50 +00:00
|
|
|
|
cap::cutSelection(cur, false);
|
2011-10-18 18:57:42 +00:00
|
|
|
|
if (cur.inTexted()) {
|
2010-12-30 17:59:59 +00:00
|
|
|
|
repl_buffer.changeLanguage(
|
|
|
|
|
repl_buffer.language(),
|
|
|
|
|
cur.getFont().language());
|
2010-01-30 10:11:24 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Replacing by pasteParagraphList()ing repl_buffer");
|
2010-12-29 19:59:41 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Before pasteParagraphList() cur=" << cur << endl);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
cap::pasteParagraphList(cur, repl_buffer.paragraphs(),
|
|
|
|
|
repl_buffer.params().documentClassPtr(),
|
|
|
|
|
bv->buffer().errorList("Paste"));
|
2010-12-29 19:59:41 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After pasteParagraphList() cur=" << cur << endl);
|
2011-01-26 23:54:12 +00:00
|
|
|
|
sel_len = repl_buffer.paragraphs().begin()->size();
|
2011-10-18 18:57:42 +00:00
|
|
|
|
} else if (cur.inMathed()) {
|
2010-01-30 10:11:24 +00:00
|
|
|
|
odocstringstream ods;
|
2016-09-04 02:02:47 +00:00
|
|
|
|
otexstream os(ods);
|
2019-03-03 13:08:27 +00:00
|
|
|
|
// OutputParams runparams(&repl_buffer.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2010-01-30 10:11:24 +00:00
|
|
|
|
runparams.nice = false;
|
2019-03-02 14:42:38 +00:00
|
|
|
|
runparams.flavor = OutputParams::XETEX;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
runparams.linelen = 8000; //lyxrc.plaintext_linelen;
|
|
|
|
|
runparams.dryrun = true;
|
2011-02-10 20:02:48 +00:00
|
|
|
|
TeXOnePar(repl_buffer, repl_buffer.text(), 0, os, runparams);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
//repl_buffer.getSourceCode(ods, 0, repl_buffer.paragraphs().size(), false);
|
|
|
|
|
docstring repl_latex = ods.str();
|
|
|
|
|
LYXERR(Debug::FIND, "Latexified replace_buffer: '" << repl_latex << "'");
|
|
|
|
|
string s;
|
2017-03-31 09:38:30 +00:00
|
|
|
|
(void)regex_replace(to_utf8(repl_latex), s, "\\$(.*)\\$", "$1");
|
|
|
|
|
(void)regex_replace(s, s, "\\\\\\[(.*)\\\\\\]", "$1");
|
2010-01-30 10:11:24 +00:00
|
|
|
|
repl_latex = from_utf8(s);
|
2011-07-10 00:19:11 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Replacing by insert()ing latex: '" << repl_latex << "' cur=" << cur << " with depth=" << cur.depth());
|
|
|
|
|
MathData ar(cur.buffer());
|
|
|
|
|
asArray(repl_latex, ar, Parse::NORMAL);
|
|
|
|
|
cur.insert(ar);
|
|
|
|
|
sel_len = ar.size();
|
|
|
|
|
LYXERR(Debug::FIND, "After insert() cur=" << cur << " with depth: " << cur.depth() << " and len: " << sel_len);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
}
|
2011-04-16 10:48:55 +00:00
|
|
|
|
if (cur.pos() >= sel_len)
|
|
|
|
|
cur.pos() -= sel_len;
|
|
|
|
|
else
|
2011-01-26 23:54:12 +00:00
|
|
|
|
cur.pos() = 0;
|
2011-07-10 00:19:11 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After pos adj cur=" << cur << " with depth: " << cur.depth() << " and len: " << sel_len);
|
2011-01-26 23:54:12 +00:00
|
|
|
|
bv->putSelectionAt(DocIterator(cur), sel_len, !opt.forward);
|
2011-05-21 10:44:27 +00:00
|
|
|
|
bv->processUpdateFlags(Update::Force);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 1;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/// Perform a FindAdv operation.
|
2009-01-14 15:34:56 +00:00
|
|
|
|
bool findAdv(BufferView * bv, FindAndReplaceOptions const & opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2010-01-30 10:11:24 +00:00
|
|
|
|
DocIterator cur;
|
2010-12-29 19:59:41 +00:00
|
|
|
|
int match_len = 0;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2017-05-02 15:46:38 +00:00
|
|
|
|
// e.g., when invoking word-findadv from mini-buffer wither with
|
|
|
|
|
// wrong options syntax or before ever opening advanced F&R pane
|
2017-05-03 15:32:31 +00:00
|
|
|
|
if (theBufferList().getBuffer(FileName(to_utf8(opt.find_buf_name)), true) == 0)
|
2017-05-02 15:46:38 +00:00
|
|
|
|
return false;
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
try {
|
2010-01-30 10:11:24 +00:00
|
|
|
|
MatchStringAdv matchAdv(bv->buffer(), opt);
|
2013-04-03 23:43:36 +00:00
|
|
|
|
int length = bv->cursor().selectionEnd().pos() - bv->cursor().selectionBegin().pos();
|
2013-04-03 23:50:02 +00:00
|
|
|
|
if (length > 0)
|
|
|
|
|
bv->putSelectionAt(bv->cursor().selectionBegin(), length, !opt.forward);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
num_replaced += findAdvReplace(bv, opt, matchAdv);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
cur = bv->cursor();
|
2008-11-15 23:30:27 +00:00
|
|
|
|
if (opt.forward)
|
2012-10-23 20:58:10 +00:00
|
|
|
|
match_len = findForwardAdv(cur, matchAdv);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
else
|
2012-10-23 20:58:10 +00:00
|
|
|
|
match_len = findBackwardsAdv(cur, matchAdv);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} catch (...) {
|
2010-06-29 17:09:40 +00:00
|
|
|
|
// This may only be raised by lyx::regex()
|
2008-12-07 17:18:30 +00:00
|
|
|
|
bv->message(_("Invalid regular expression!"));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (match_len == 0) {
|
2019-03-13 13:06:18 +00:00
|
|
|
|
if (num_replaced > 0) {
|
|
|
|
|
switch (num_replaced)
|
|
|
|
|
{
|
|
|
|
|
case 1:
|
2019-03-13 13:14:35 +00:00
|
|
|
|
bv->message(_("One match has been replaced."));
|
2019-03-13 13:06:18 +00:00
|
|
|
|
break;
|
|
|
|
|
case 2:
|
|
|
|
|
bv->message(_("Two matches have been replaced."));
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
bv->message(bformat(_("%1$d matches have been replaced."), num_replaced));
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
num_replaced = 0;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
bv->message(_("Match not found."));
|
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-13 13:06:18 +00:00
|
|
|
|
if (num_replaced > 0)
|
|
|
|
|
bv->message(_("Match has been replaced."));
|
|
|
|
|
else
|
|
|
|
|
bv->message(_("Match found."));
|
2009-12-30 18:40:18 +00:00
|
|
|
|
|
2010-01-30 10:11:24 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Putting selection at cur=" << cur << " with len: " << match_len);
|
|
|
|
|
bv->putSelectionAt(cur, match_len, !opt.forward);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-01-10 12:37:50 +00:00
|
|
|
|
ostringstream & operator<<(ostringstream & os, FindAndReplaceOptions const & opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
os << to_utf8(opt.find_buf_name) << "\nEOSS\n"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
<< opt.casesensitive << ' '
|
|
|
|
|
<< opt.matchword << ' '
|
|
|
|
|
<< opt.forward << ' '
|
|
|
|
|
<< opt.expandmacros << ' '
|
|
|
|
|
<< opt.ignoreformat << ' '
|
2019-03-13 13:06:18 +00:00
|
|
|
|
<< opt.replace_all << ' '
|
2011-02-07 20:36:40 +00:00
|
|
|
|
<< to_utf8(opt.repl_buf_name) << "\nEOSS\n"
|
2009-12-30 18:40:18 +00:00
|
|
|
|
<< opt.keep_case << ' '
|
2013-08-23 19:36:50 +00:00
|
|
|
|
<< int(opt.scope) << ' '
|
|
|
|
|
<< int(opt.restr);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "built: " << os.str());
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
return os;
|
|
|
|
|
}
|
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
2010-01-10 12:37:50 +00:00
|
|
|
|
istringstream & operator>>(istringstream & is, FindAndReplaceOptions & opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "parsing");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
string s;
|
|
|
|
|
string line;
|
|
|
|
|
getline(is, line);
|
|
|
|
|
while (line != "EOSS") {
|
|
|
|
|
if (! s.empty())
|
2012-10-23 20:58:10 +00:00
|
|
|
|
s = s + "\n";
|
2008-11-15 23:30:27 +00:00
|
|
|
|
s = s + line;
|
|
|
|
|
if (is.eof()) // Tolerate malformed request
|
2012-10-23 20:58:10 +00:00
|
|
|
|
break;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
getline(is, line);
|
|
|
|
|
}
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "file_buf_name: '" << s << "'");
|
|
|
|
|
opt.find_buf_name = from_utf8(s);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
is >> opt.casesensitive >> opt.matchword >> opt.forward >> opt.expandmacros >> opt.ignoreformat >> opt.replace_all;
|
2009-01-14 15:34:56 +00:00
|
|
|
|
is.get(); // Waste space before replace string
|
|
|
|
|
s = "";
|
|
|
|
|
getline(is, line);
|
|
|
|
|
while (line != "EOSS") {
|
|
|
|
|
if (! s.empty())
|
2012-10-23 20:58:10 +00:00
|
|
|
|
s = s + "\n";
|
2009-01-14 15:34:56 +00:00
|
|
|
|
s = s + line;
|
|
|
|
|
if (is.eof()) // Tolerate malformed request
|
2012-10-23 20:58:10 +00:00
|
|
|
|
break;
|
2009-01-14 15:34:56 +00:00
|
|
|
|
getline(is, line);
|
|
|
|
|
}
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "repl_buf_name: '" << s << "'");
|
|
|
|
|
opt.repl_buf_name = from_utf8(s);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
is >> opt.keep_case;
|
2009-12-30 18:40:18 +00:00
|
|
|
|
int i;
|
|
|
|
|
is >> i;
|
|
|
|
|
opt.scope = FindAndReplaceOptions::SearchScope(i);
|
2013-08-23 19:36:50 +00:00
|
|
|
|
is >> i;
|
|
|
|
|
opt.restr = FindAndReplaceOptions::SearchRestriction(i);
|
|
|
|
|
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "parsed: " << opt.casesensitive << ' ' << opt.matchword << ' ' << opt.forward << ' '
|
2013-08-23 19:36:50 +00:00
|
|
|
|
<< opt.expandmacros << ' ' << opt.ignoreformat << ' ' << opt.keep_case << ' '
|
|
|
|
|
<< opt.scope << ' ' << opt.restr);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return is;
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace lyx
|