2003-08-23 00:17:00 +00:00
|
|
|
|
/**
|
2007-04-26 04:41:58 +00:00
|
|
|
|
* \file lyxfind.cpp
|
2003-08-23 00:17:00 +00:00
|
|
|
|
* This file is part of LyX, the document processor.
|
2008-12-20 16:00:47 +00:00
|
|
|
|
* License details can be found in the file COPYING.
|
2003-08-23 00:17:00 +00:00
|
|
|
|
*
|
2008-11-14 15:58:50 +00:00
|
|
|
|
* \author Lars Gullik Bjønnes
|
2003-08-23 00:17:00 +00:00
|
|
|
|
* \author John Levon
|
2008-11-14 15:58:50 +00:00
|
|
|
|
* \author Jürgen Vigna
|
2003-11-04 12:01:15 +00:00
|
|
|
|
* \author Alfredo Braunstein
|
2008-11-15 23:30:27 +00:00
|
|
|
|
* \author Tommaso Cucinotta
|
2020-12-14 19:43:39 +00:00
|
|
|
|
* \author Kornel Benko
|
2003-08-23 00:17:00 +00:00
|
|
|
|
*
|
|
|
|
|
* Full author contact details are available in file CREDITS.
|
|
|
|
|
*/
|
|
|
|
|
|
2001-03-06 10:20:33 +00:00
|
|
|
|
#include <config.h>
|
|
|
|
|
|
|
|
|
|
#include "lyxfind.h"
|
2003-09-09 22:13:45 +00:00
|
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Buffer.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "BufferList.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "BufferParams.h"
|
|
|
|
|
#include "BufferView.h"
|
2007-10-18 15:29:51 +00:00
|
|
|
|
#include "Changes.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "Cursor.h"
|
|
|
|
|
#include "CutAndPaste.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "FuncRequest.h"
|
2010-02-09 16:11:13 +00:00
|
|
|
|
#include "LyX.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "output_latex.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "OutputParams.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Paragraph.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "Text.h"
|
2019-03-03 13:08:27 +00:00
|
|
|
|
#include "Encoding.h"
|
2021-01-25 09:52:14 +00:00
|
|
|
|
#include "Language.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
#include "frontends/Application.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "frontends/alert.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
#include "mathed/InsetMath.h"
|
|
|
|
|
#include "mathed/InsetMathHull.h"
|
2016-09-30 14:50:09 +00:00
|
|
|
|
#include "mathed/MathData.h"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
#include "mathed/MathStream.h"
|
2011-05-29 13:40:34 +00:00
|
|
|
|
#include "mathed/MathSupport.h"
|
2003-09-09 22:13:45 +00:00
|
|
|
|
|
2008-02-18 07:14:42 +00:00
|
|
|
|
#include "support/debug.h"
|
2006-12-10 11:52:46 +00:00
|
|
|
|
#include "support/docstream.h"
|
2013-05-20 01:07:53 +00:00
|
|
|
|
#include "support/FileName.h"
|
2008-02-18 07:14:42 +00:00
|
|
|
|
#include "support/gettext.h"
|
2008-12-20 16:00:47 +00:00
|
|
|
|
#include "support/lassert.h"
|
2010-01-10 12:37:50 +00:00
|
|
|
|
#include "support/lstrings.h"
|
2019-02-28 12:00:12 +00:00
|
|
|
|
#include "support/textutils.h"
|
2020-11-28 23:05:24 +00:00
|
|
|
|
|
2021-01-31 08:53:06 +00:00
|
|
|
|
#include <unordered_map>
|
2020-11-28 23:05:24 +00:00
|
|
|
|
#include <regex>
|
2021-01-07 16:04:27 +00:00
|
|
|
|
|
2021-01-20 16:09:08 +00:00
|
|
|
|
//#define ResultsDebug
|
2021-01-01 20:50:36 +00:00
|
|
|
|
#define USE_QT_FOR_SEARCH
|
|
|
|
|
#if defined(USE_QT_FOR_SEARCH)
|
|
|
|
|
#include <QtCore> // sets QT_VERSION
|
|
|
|
|
#if (QT_VERSION >= 0x050000)
|
|
|
|
|
#include <QRegularExpression>
|
|
|
|
|
#define QTSEARCH 1
|
|
|
|
|
#else
|
|
|
|
|
#define QTSEARCH 0
|
|
|
|
|
#endif
|
|
|
|
|
#else
|
|
|
|
|
#define QTSEARCH 0
|
2020-12-30 12:00:03 +00:00
|
|
|
|
#endif
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2007-12-12 10:16:00 +00:00
|
|
|
|
using namespace std;
|
2007-12-12 18:57:56 +00:00
|
|
|
|
using namespace lyx::support;
|
2007-12-12 10:16:00 +00:00
|
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
|
namespace lyx {
|
2004-01-07 17:00:03 +00:00
|
|
|
|
|
2021-01-31 08:53:06 +00:00
|
|
|
|
typedef unordered_map<string, string> AccentsMap;
|
|
|
|
|
typedef unordered_map<string,string>::const_iterator AccentsIterator;
|
|
|
|
|
static AccentsMap accents = unordered_map<string, string>();
|
2018-11-09 05:07:17 +00:00
|
|
|
|
|
|
|
|
|
// Helper class for deciding what should be ignored
|
|
|
|
|
class IgnoreFormats {
|
|
|
|
|
public:
|
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
IgnoreFormats() = default;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getFamily() const { return ignoreFamily_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getSeries() const { return ignoreSeries_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getShape() const { return ignoreShape_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getUnderline() const { return ignoreUnderline_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getMarkUp() const { return ignoreMarkUp_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getStrikeOut() const { return ignoreStrikeOut_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getSectioning() const { return ignoreSectioning_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getFrontMatter() const { return ignoreFrontMatter_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getColor() const { return ignoreColor_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool getLanguage() const { return ignoreLanguage_; }
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2021-01-22 20:11:28 +00:00
|
|
|
|
bool getDeleted() const { return ignoreDeleted_; }
|
|
|
|
|
///
|
|
|
|
|
void setIgnoreDeleted(bool value);
|
|
|
|
|
///
|
2021-02-13 15:19:45 +00:00
|
|
|
|
void setIgnoreFormat(string const & type, bool value, bool fromUser = true);
|
2018-11-09 05:07:17 +00:00
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreFamily_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreSeries_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreShape_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreUnderline_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreMarkUp_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreStrikeOut_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreSectioning_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreFrontMatter_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreColor_ = false;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
///
|
2020-11-01 11:34:49 +00:00
|
|
|
|
bool ignoreLanguage_ = false;
|
2021-02-13 15:19:45 +00:00
|
|
|
|
bool userSelectedIgnoreLanguage_ = false;
|
2021-01-22 20:11:28 +00:00
|
|
|
|
///
|
2021-01-25 09:52:14 +00:00
|
|
|
|
bool ignoreDeleted_ = true;
|
2018-11-09 05:07:17 +00:00
|
|
|
|
};
|
|
|
|
|
|
2021-02-13 15:19:45 +00:00
|
|
|
|
void IgnoreFormats::setIgnoreFormat(string const & type, bool value, bool fromUser)
|
2018-11-09 05:07:17 +00:00
|
|
|
|
{
|
|
|
|
|
if (type == "color") {
|
|
|
|
|
ignoreColor_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "language") {
|
2021-02-13 15:19:45 +00:00
|
|
|
|
if (fromUser) {
|
|
|
|
|
userSelectedIgnoreLanguage_ = value;
|
|
|
|
|
ignoreLanguage_ = value;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
ignoreLanguage_ = (value || userSelectedIgnoreLanguage_);
|
2018-11-09 05:07:17 +00:00
|
|
|
|
}
|
|
|
|
|
else if (type == "sectioning") {
|
|
|
|
|
ignoreSectioning_ = value;
|
|
|
|
|
ignoreFrontMatter_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "font") {
|
|
|
|
|
ignoreSeries_ = value;
|
|
|
|
|
ignoreShape_ = value;
|
|
|
|
|
ignoreFamily_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "series") {
|
|
|
|
|
ignoreSeries_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "shape") {
|
|
|
|
|
ignoreShape_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "family") {
|
|
|
|
|
ignoreFamily_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "markup") {
|
|
|
|
|
ignoreMarkUp_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "underline") {
|
|
|
|
|
ignoreUnderline_ = value;
|
|
|
|
|
}
|
|
|
|
|
else if (type == "strike") {
|
|
|
|
|
ignoreStrikeOut_ = value;
|
|
|
|
|
}
|
2021-01-22 20:11:28 +00:00
|
|
|
|
else if (type == "deleted") {
|
|
|
|
|
ignoreDeleted_ = value;
|
|
|
|
|
}
|
2018-11-09 05:07:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The global variable that can be changed from outside
|
|
|
|
|
IgnoreFormats ignoreFormats;
|
|
|
|
|
|
|
|
|
|
|
2021-02-13 15:19:45 +00:00
|
|
|
|
void setIgnoreFormat(string const & type, bool value, bool fromUser)
|
2018-11-09 05:07:17 +00:00
|
|
|
|
{
|
2021-02-13 15:19:45 +00:00
|
|
|
|
ignoreFormats.setIgnoreFormat(type, value, fromUser);
|
2018-11-09 05:07:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2004-01-07 17:00:03 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
2021-02-15 08:41:16 +00:00
|
|
|
|
bool parse_bool(docstring & howto, bool const defvalue = false)
|
2004-01-07 17:00:03 +00:00
|
|
|
|
{
|
|
|
|
|
if (howto.empty())
|
2021-02-15 08:41:16 +00:00
|
|
|
|
return defvalue;
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring var;
|
2004-01-07 17:00:03 +00:00
|
|
|
|
howto = split(howto, var, ' ');
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return var == "1";
|
2004-01-07 17:00:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-05-04 23:41:18 +00:00
|
|
|
|
class MatchString
|
2003-07-27 12:02:58 +00:00
|
|
|
|
{
|
2003-11-04 12:01:15 +00:00
|
|
|
|
public:
|
2019-10-26 22:06:54 +00:00
|
|
|
|
MatchString(docstring const & s, bool cs, bool mw)
|
|
|
|
|
: str(s), case_sens(cs), whole_words(mw)
|
2004-01-07 14:07:46 +00:00
|
|
|
|
{}
|
|
|
|
|
|
|
|
|
|
// returns true if the specified string is at the specified position
|
2007-06-10 15:07:21 +00:00
|
|
|
|
// del specifies whether deleted strings in ct mode will be considered
|
2010-11-22 07:50:34 +00:00
|
|
|
|
int operator()(Paragraph const & par, pos_type pos, bool del = true) const
|
2004-01-07 17:00:03 +00:00
|
|
|
|
{
|
2010-10-13 18:24:53 +00:00
|
|
|
|
return par.find(str, case_sens, whole_words, pos, del);
|
2003-07-27 12:02:58 +00:00
|
|
|
|
}
|
2004-01-26 17:00:09 +00:00
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
private:
|
2004-01-07 14:07:46 +00:00
|
|
|
|
// search string
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring str;
|
2004-01-07 14:07:46 +00:00
|
|
|
|
// case sensitive
|
2010-10-13 18:24:53 +00:00
|
|
|
|
bool case_sens;
|
2004-01-07 14:07:46 +00:00
|
|
|
|
// match whole words only
|
2010-10-13 18:24:53 +00:00
|
|
|
|
bool whole_words;
|
2003-11-04 12:01:15 +00:00
|
|
|
|
};
|
2003-07-27 12:02:58 +00:00
|
|
|
|
|
|
|
|
|
|
2021-02-16 07:11:09 +00:00
|
|
|
|
int findForward(DocIterator & cur, DocIterator const endcur,
|
|
|
|
|
MatchString const & match,
|
|
|
|
|
bool find_del = true, bool onlysel = false)
|
2001-03-06 10:20:33 +00:00
|
|
|
|
{
|
2021-02-16 07:11:09 +00:00
|
|
|
|
for (; cur; cur.forwardChar()) {
|
|
|
|
|
if (onlysel && endcur.pit() == cur.pit()
|
|
|
|
|
&& endcur.idx() == cur.idx() && endcur.pos() < cur.pos())
|
|
|
|
|
break;
|
2010-11-22 07:50:34 +00:00
|
|
|
|
if (cur.inTexted()) {
|
|
|
|
|
int len = match(cur.paragraph(), cur.pos(), find_del);
|
|
|
|
|
if (len > 0)
|
|
|
|
|
return len;
|
|
|
|
|
}
|
2021-02-16 07:11:09 +00:00
|
|
|
|
}
|
2010-11-22 07:50:34 +00:00
|
|
|
|
return 0;
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
2002-03-21 17:27:08 +00:00
|
|
|
|
|
2001-08-13 10:09:50 +00:00
|
|
|
|
|
2021-02-16 07:11:09 +00:00
|
|
|
|
int findBackwards(DocIterator & cur, DocIterator const endcur,
|
|
|
|
|
MatchString const & match,
|
|
|
|
|
bool find_del = true, bool onlysel = false)
|
2003-11-04 12:01:15 +00:00
|
|
|
|
{
|
2004-09-17 16:28:47 +00:00
|
|
|
|
while (cur) {
|
|
|
|
|
cur.backwardChar();
|
2021-02-16 07:11:09 +00:00
|
|
|
|
if (onlysel && endcur.pit() == cur.pit()
|
|
|
|
|
&& endcur.idx() == cur.idx() && endcur.pos() > cur.pos())
|
|
|
|
|
break;
|
2010-11-22 07:50:34 +00:00
|
|
|
|
if (cur.inTexted()) {
|
|
|
|
|
int len = match(cur.paragraph(), cur.pos(), find_del);
|
|
|
|
|
if (len > 0)
|
|
|
|
|
return len;
|
|
|
|
|
}
|
2004-09-17 16:28:47 +00:00
|
|
|
|
}
|
2010-11-22 07:50:34 +00:00
|
|
|
|
return 0;
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
2003-03-19 14:45:22 +00:00
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
|
2010-10-13 18:27:40 +00:00
|
|
|
|
bool searchAllowed(docstring const & str)
|
2001-03-06 10:20:33 +00:00
|
|
|
|
{
|
2003-11-04 12:01:15 +00:00
|
|
|
|
if (str.empty()) {
|
2007-11-13 23:50:28 +00:00
|
|
|
|
frontend::Alert::error(_("Search error"), _("Search string is empty"));
|
2001-07-20 14:18:48 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
2007-08-21 13:03:55 +00:00
|
|
|
|
return true;
|
2001-03-06 10:20:33 +00:00
|
|
|
|
}
|
2002-06-24 20:28:12 +00:00
|
|
|
|
|
2021-02-14 08:08:23 +00:00
|
|
|
|
} // namespace
|
|
|
|
|
|
2003-11-10 09:06:48 +00:00
|
|
|
|
|
2010-10-13 18:19:21 +00:00
|
|
|
|
bool findOne(BufferView * bv, docstring const & searchstr,
|
2016-09-14 08:23:39 +00:00
|
|
|
|
bool case_sens, bool whole, bool forward,
|
2021-02-15 13:12:07 +00:00
|
|
|
|
bool find_del, bool check_wrap, bool auto_wrap,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool instant, bool onlysel)
|
2002-06-18 15:44:30 +00:00
|
|
|
|
{
|
2021-02-17 15:55:55 +00:00
|
|
|
|
// Clean up previous selections with empty searchstr on instant
|
|
|
|
|
if (searchstr.empty() && instant) {
|
|
|
|
|
if (bv->cursor().selection()) {
|
|
|
|
|
bv->setCursor(bv->cursor().selectionBegin());
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
}
|
2021-02-18 11:59:52 +00:00
|
|
|
|
return true;
|
2021-02-17 15:55:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-10-13 18:27:40 +00:00
|
|
|
|
if (!searchAllowed(searchstr))
|
2003-11-04 12:01:15 +00:00
|
|
|
|
return false;
|
|
|
|
|
|
2021-02-16 07:11:09 +00:00
|
|
|
|
DocIterator const endcur = forward ? bv->cursor().selectionEnd() : bv->cursor().selectionBegin();
|
|
|
|
|
|
|
|
|
|
if (onlysel && bv->cursor().selection()) {
|
|
|
|
|
docstring const matchstring = bv->cursor().selectionAsString(false);
|
|
|
|
|
docstring const lcmatchsting = support::lowercase(matchstring);
|
|
|
|
|
if (matchstring == searchstr || (!case_sens && lcmatchsting == lowercase(searchstr))) {
|
|
|
|
|
docstring q = _("The search string matches the selection, and search is limited to selection.\n"
|
|
|
|
|
"Continue search outside?");
|
|
|
|
|
int search_answer = frontend::Alert::prompt(_("Search outside selection?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
if (search_answer == 0) {
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
if (findOne(bv, searchstr, case_sens, whole, forward,
|
|
|
|
|
find_del, check_wrap, auto_wrap, false, false))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-05-17 15:27:12 +00:00
|
|
|
|
DocIterator cur = forward
|
2021-02-16 07:11:09 +00:00
|
|
|
|
? ((instant || onlysel) ? bv->cursor().selectionBegin() : bv->cursor().selectionEnd())
|
|
|
|
|
: ((instant || onlysel) ? bv->cursor().selectionEnd() : bv->cursor().selectionBegin());
|
2002-06-18 15:44:30 +00:00
|
|
|
|
|
2010-10-13 18:24:53 +00:00
|
|
|
|
MatchString const match(searchstr, case_sens, whole);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2012-10-23 20:58:10 +00:00
|
|
|
|
int match_len = forward
|
2021-02-16 07:11:09 +00:00
|
|
|
|
? findForward(cur, endcur, match, find_del, onlysel)
|
|
|
|
|
: findBackwards(cur, endcur, match, find_del, onlysel);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2010-11-22 07:50:34 +00:00
|
|
|
|
if (match_len > 0)
|
|
|
|
|
bv->putSelectionAt(cur, match_len, !forward);
|
2021-02-16 07:11:09 +00:00
|
|
|
|
else if (onlysel) {
|
|
|
|
|
docstring q = _("The search string was not found within the selection.\n"
|
|
|
|
|
"Continue search outside?");
|
|
|
|
|
int search_answer = frontend::Alert::prompt(_("Search outside selection?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
if (search_answer == 0) {
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
if (findOne(bv, searchstr, case_sens, whole, forward,
|
|
|
|
|
find_del, check_wrap, auto_wrap, false, false))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2016-09-14 08:23:39 +00:00
|
|
|
|
else if (check_wrap) {
|
|
|
|
|
DocIterator cur_orig(bv->cursor());
|
2021-02-15 10:30:03 +00:00
|
|
|
|
if (!auto_wrap) {
|
|
|
|
|
docstring q;
|
|
|
|
|
if (forward)
|
|
|
|
|
q = _("End of file reached while searching forward.\n"
|
|
|
|
|
"Continue searching from the beginning?");
|
|
|
|
|
else
|
|
|
|
|
q = _("Beginning of file reached while searching backward.\n"
|
|
|
|
|
"Continue searching from the end?");
|
|
|
|
|
int wrap_answer = frontend::Alert::prompt(_("Wrap search?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
auto_wrap = wrap_answer == 0;
|
|
|
|
|
}
|
|
|
|
|
if (auto_wrap) {
|
2016-09-14 08:23:39 +00:00
|
|
|
|
if (forward) {
|
|
|
|
|
bv->cursor().clear();
|
|
|
|
|
bv->cursor().push_back(CursorSlice(bv->buffer().inset()));
|
|
|
|
|
} else {
|
|
|
|
|
bv->cursor().setCursor(doc_iterator_end(&bv->buffer()));
|
|
|
|
|
bv->cursor().backwardPos();
|
|
|
|
|
}
|
|
|
|
|
bv->clearSelection();
|
2021-02-15 13:12:07 +00:00
|
|
|
|
if (findOne(bv, searchstr, case_sens, whole, forward,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
find_del, false, false, false, false))
|
2016-09-14 08:23:39 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
bv->cursor().setCursor(cur_orig);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2002-06-18 15:44:30 +00:00
|
|
|
|
|
2010-11-22 07:50:34 +00:00
|
|
|
|
return match_len > 0;
|
2002-06-18 15:44:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-03-04 09:27:27 +00:00
|
|
|
|
|
2021-02-14 08:08:23 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
int replaceAll(BufferView * bv,
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring const & searchstr, docstring const & replacestr,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool case_sens, bool whole, bool onlysel)
|
2003-02-08 19:18:01 +00:00
|
|
|
|
{
|
2007-08-21 13:03:55 +00:00
|
|
|
|
Buffer & buf = bv->buffer();
|
2003-02-08 19:18:01 +00:00
|
|
|
|
|
2010-10-13 18:27:40 +00:00
|
|
|
|
if (!searchAllowed(searchstr) || buf.isReadonly())
|
2003-11-04 12:01:15 +00:00
|
|
|
|
return 0;
|
2004-01-26 17:00:09 +00:00
|
|
|
|
|
2021-02-16 07:11:09 +00:00
|
|
|
|
DocIterator startcur = bv->cursor().selectionBegin();
|
|
|
|
|
DocIterator endcur = bv->cursor().selectionEnd();
|
|
|
|
|
bool const had_selection = bv->cursor().selection();
|
2010-03-21 10:36:59 +00:00
|
|
|
|
|
2010-10-13 18:24:53 +00:00
|
|
|
|
MatchString const match(searchstr, case_sens, whole);
|
2003-11-04 12:01:15 +00:00
|
|
|
|
int num = 0;
|
|
|
|
|
|
|
|
|
|
int const rsize = replacestr.size();
|
|
|
|
|
int const ssize = searchstr.size();
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2008-08-15 19:24:56 +00:00
|
|
|
|
Cursor cur(*bv);
|
2008-11-17 11:46:07 +00:00
|
|
|
|
cur.setCursor(doc_iterator_begin(&buf));
|
2021-02-16 07:11:09 +00:00
|
|
|
|
int match_len = findForward(cur, endcur, match, false, onlysel);
|
2010-11-22 07:50:34 +00:00
|
|
|
|
while (match_len > 0) {
|
2008-10-28 18:00:21 +00:00
|
|
|
|
// Backup current cursor position and font.
|
|
|
|
|
pos_type const pos = cur.pos();
|
|
|
|
|
Font const font = cur.paragraph().getFontSettings(buf.params(), pos);
|
2008-08-15 19:24:56 +00:00
|
|
|
|
cur.recordUndo();
|
2021-02-16 08:42:17 +00:00
|
|
|
|
int ct_deleted_text = ssize -
|
2012-10-23 20:58:10 +00:00
|
|
|
|
cur.paragraph().eraseChars(pos, pos + match_len,
|
2014-03-29 22:52:36 +00:00
|
|
|
|
buf.params().track_changes);
|
2006-12-10 11:52:46 +00:00
|
|
|
|
cur.paragraph().insert(pos, replacestr, font,
|
2014-03-29 22:52:36 +00:00
|
|
|
|
Change(buf.params().track_changes
|
2012-10-23 20:58:10 +00:00
|
|
|
|
? Change::INSERTED
|
|
|
|
|
: Change::UNCHANGED));
|
2021-02-16 08:42:17 +00:00
|
|
|
|
for (int i = 0; i < rsize + ct_deleted_text
|
|
|
|
|
&& cur.pos() < cur.lastpos(); ++i)
|
|
|
|
|
cur.forwardPos();
|
2021-02-16 07:11:09 +00:00
|
|
|
|
if (onlysel && cur.pit() == endcur.pit() && cur.idx() == endcur.idx()) {
|
|
|
|
|
// Adjust end of selection for replace-all in selection
|
|
|
|
|
if (rsize > ssize) {
|
|
|
|
|
int const offset = rsize - ssize;
|
2021-02-16 08:42:17 +00:00
|
|
|
|
for (int i = 0; i < offset + ct_deleted_text
|
|
|
|
|
&& endcur.pos() < endcur.lastpos(); ++i)
|
2021-02-16 07:11:09 +00:00
|
|
|
|
endcur.forwardPos();
|
|
|
|
|
} else {
|
|
|
|
|
int const offset = ssize - rsize;
|
2021-02-16 08:42:17 +00:00
|
|
|
|
for (int i = 0; i < offset && endcur.pos() > 0; ++i)
|
2021-02-16 07:11:09 +00:00
|
|
|
|
endcur.backwardPos();
|
2021-02-16 08:42:17 +00:00
|
|
|
|
for (int i = 0; i < ct_deleted_text
|
|
|
|
|
&& endcur.pos() < endcur.lastpos(); ++i)
|
|
|
|
|
endcur.forwardPos();
|
2021-02-16 07:11:09 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2003-11-04 12:01:15 +00:00
|
|
|
|
++num;
|
2021-02-16 07:11:09 +00:00
|
|
|
|
match_len = findForward(cur, endcur, match, false, onlysel);
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
|
bv->putSelectionAt(doc_iterator_begin(&buf), 0, false);
|
2010-03-21 10:36:59 +00:00
|
|
|
|
|
2021-02-16 07:11:09 +00:00
|
|
|
|
startcur.fixIfBroken();
|
|
|
|
|
bv->setCursor(startcur);
|
|
|
|
|
|
|
|
|
|
// Reset selection, accounting for changes in selection
|
|
|
|
|
if (had_selection) {
|
|
|
|
|
endcur.fixIfBroken();
|
|
|
|
|
bv->cursor().resetAnchor();
|
|
|
|
|
bv->setCursorSelectionTo(endcur);
|
|
|
|
|
}
|
2010-03-21 10:36:59 +00:00
|
|
|
|
|
2003-11-04 12:01:15 +00:00
|
|
|
|
return num;
|
|
|
|
|
}
|
2003-04-16 00:39:24 +00:00
|
|
|
|
|
2003-02-08 19:18:01 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// the idea here is that we are going to replace the string that
|
2015-05-17 15:27:12 +00:00
|
|
|
|
// is selected IF it is the search string.
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// if there is a selection, but it is not the search string, then
|
|
|
|
|
// we basically ignore it. (FIXME We ought to replace only within
|
|
|
|
|
// the selection.)
|
|
|
|
|
// if there is no selection, then:
|
|
|
|
|
// (i) if some search string has been provided, then we find it.
|
|
|
|
|
// (think of how the dialog works when you hit "replace" the
|
2015-05-17 15:27:12 +00:00
|
|
|
|
// first time.)
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// (ii) if no search string has been provided, then we treat the
|
|
|
|
|
// word the cursor is in as the search string. (why? i have no
|
|
|
|
|
// idea.) but this only works in text?
|
|
|
|
|
//
|
2015-05-17 15:27:12 +00:00
|
|
|
|
// returns the number of replacements made (one, if any) and
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// whether anything at all was done.
|
|
|
|
|
pair<bool, int> replaceOne(BufferView * bv, docstring searchstr,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
docstring const & replacestr, bool case_sens,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool whole, bool forward, bool findnext, bool wrap,
|
|
|
|
|
bool onlysel)
|
2003-11-17 09:02:10 +00:00
|
|
|
|
{
|
2010-10-14 14:32:58 +00:00
|
|
|
|
Cursor & cur = bv->cursor();
|
2021-02-16 07:11:09 +00:00
|
|
|
|
if (!cur.selection() || onlysel) {
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// no selection, non-empty search string: find it
|
|
|
|
|
if (!searchstr.empty()) {
|
2021-02-15 13:12:07 +00:00
|
|
|
|
bool const found = findOne(bv, searchstr, case_sens, whole,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
forward, true, findnext, wrap, false, onlysel);
|
2016-09-14 08:23:39 +00:00
|
|
|
|
return make_pair(found, 0);
|
2010-10-14 14:32:58 +00:00
|
|
|
|
}
|
|
|
|
|
// empty search string
|
|
|
|
|
if (!cur.inTexted())
|
|
|
|
|
// bail in math
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(false, 0);
|
2014-02-19 21:18:28 +00:00
|
|
|
|
// select current word and treat it as the search string.
|
|
|
|
|
// This causes a minor bug as undo will restore this selection,
|
|
|
|
|
// which the user did not create (#8986).
|
2010-10-14 14:32:58 +00:00
|
|
|
|
cur.innerText()->selectWord(cur, WHOLE_WORD);
|
2021-01-12 15:13:47 +00:00
|
|
|
|
searchstr = cur.selectionAsString(false, true);
|
2009-07-04 23:02:27 +00:00
|
|
|
|
}
|
2015-05-17 15:27:12 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// if we still don't have a search string, report the error
|
|
|
|
|
// and abort.
|
|
|
|
|
if (!searchAllowed(searchstr))
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(false, 0);
|
2015-05-17 15:27:12 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
bool have_selection = cur.selection();
|
2021-01-12 15:13:47 +00:00
|
|
|
|
docstring const selected = cur.selectionAsString(false, true);
|
2015-05-17 15:27:12 +00:00
|
|
|
|
bool match =
|
2012-10-23 20:58:10 +00:00
|
|
|
|
case_sens
|
|
|
|
|
? searchstr == selected
|
|
|
|
|
: compare_no_case(searchstr, selected) == 0;
|
2010-10-14 14:32:58 +00:00
|
|
|
|
|
|
|
|
|
// no selection or current selection is not search word:
|
|
|
|
|
// just find the search word
|
|
|
|
|
if (!have_selection || !match) {
|
2021-02-15 13:12:07 +00:00
|
|
|
|
bool const found = findOne(bv, searchstr, case_sens, whole, forward,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
true, findnext, wrap, false, onlysel);
|
2016-09-14 08:23:39 +00:00
|
|
|
|
return make_pair(found, 0);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
// we're now actually ready to replace. if the buffer is
|
|
|
|
|
// read-only, we can't, though.
|
|
|
|
|
if (bv->buffer().isReadonly())
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(false, 0);
|
2003-11-17 09:02:10 +00:00
|
|
|
|
|
2012-04-18 10:44:31 +00:00
|
|
|
|
cap::replaceSelectionWithString(cur, replacestr);
|
2010-12-30 15:34:09 +00:00
|
|
|
|
if (forward) {
|
|
|
|
|
cur.pos() += replacestr.length();
|
2013-04-25 21:27:10 +00:00
|
|
|
|
LASSERT(cur.pos() <= cur.lastpos(),
|
|
|
|
|
cur.pos() = cur.lastpos());
|
2010-12-30 15:34:09 +00:00
|
|
|
|
}
|
2011-05-18 20:33:57 +00:00
|
|
|
|
if (findnext)
|
2021-02-15 13:12:07 +00:00
|
|
|
|
findOne(bv, searchstr, case_sens, whole,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
forward, false, findnext, wrap, false, onlysel);
|
2004-01-26 17:00:09 +00:00
|
|
|
|
|
2012-10-23 21:00:13 +00:00
|
|
|
|
return make_pair(true, 1);
|
2003-11-04 12:01:15 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring const find2string(docstring const & search,
|
2021-02-15 10:30:03 +00:00
|
|
|
|
bool casesensitive, bool matchword,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool forward, bool wrap, bool instant,
|
|
|
|
|
bool onlysel)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2006-12-10 11:52:46 +00:00
|
|
|
|
odocstringstream ss;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
ss << search << '\n'
|
|
|
|
|
<< int(casesensitive) << ' '
|
|
|
|
|
<< int(matchword) << ' '
|
2021-02-15 10:30:03 +00:00
|
|
|
|
<< int(forward) << ' '
|
2021-02-15 13:12:07 +00:00
|
|
|
|
<< int(wrap) << ' '
|
2021-02-16 07:11:09 +00:00
|
|
|
|
<< int(instant) << ' '
|
|
|
|
|
<< int(onlysel);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
return ss.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-07-04 23:02:27 +00:00
|
|
|
|
docstring const replace2string(docstring const & replace,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
docstring const & search,
|
|
|
|
|
bool casesensitive, bool matchword,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool all, bool forward, bool findnext,
|
|
|
|
|
bool wrap, bool onlysel)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2006-12-10 11:52:46 +00:00
|
|
|
|
odocstringstream ss;
|
2009-07-04 23:02:27 +00:00
|
|
|
|
ss << replace << '\n'
|
|
|
|
|
<< search << '\n'
|
2004-03-25 09:16:36 +00:00
|
|
|
|
<< int(casesensitive) << ' '
|
|
|
|
|
<< int(matchword) << ' '
|
|
|
|
|
<< int(all) << ' '
|
2011-05-18 20:33:57 +00:00
|
|
|
|
<< int(forward) << ' '
|
2021-02-15 10:30:03 +00:00
|
|
|
|
<< int(findnext) << ' '
|
2021-02-16 07:11:09 +00:00
|
|
|
|
<< int(wrap) << ' '
|
|
|
|
|
<< int(onlysel);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
return ss.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2021-02-12 21:20:02 +00:00
|
|
|
|
docstring const string2find(docstring const & argument,
|
|
|
|
|
bool &casesensitive,
|
|
|
|
|
bool &matchword,
|
2021-02-15 10:30:03 +00:00
|
|
|
|
bool &forward,
|
2021-02-15 13:12:07 +00:00
|
|
|
|
bool &wrap,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool &instant,
|
|
|
|
|
bool &onlysel)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
|
|
|
|
// data is of the form
|
|
|
|
|
// "<search>
|
2021-02-16 07:11:09 +00:00
|
|
|
|
// <casesensitive> <matchword> <forward> <wrap> <onlysel>"
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring search;
|
2021-02-12 21:20:02 +00:00
|
|
|
|
docstring howto = split(argument, search, '\n');
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2021-02-12 21:20:02 +00:00
|
|
|
|
casesensitive = parse_bool(howto);
|
|
|
|
|
matchword = parse_bool(howto);
|
2021-02-15 08:41:16 +00:00
|
|
|
|
forward = parse_bool(howto, true);
|
2021-02-15 13:12:07 +00:00
|
|
|
|
wrap = parse_bool(howto);
|
|
|
|
|
instant = parse_bool(howto);
|
2021-02-16 07:11:09 +00:00
|
|
|
|
onlysel = parse_bool(howto);
|
2021-02-12 21:20:02 +00:00
|
|
|
|
|
|
|
|
|
return search;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool lyxfind(BufferView * bv, FuncRequest const & ev)
|
|
|
|
|
{
|
|
|
|
|
if (!bv || ev.action() != LFUN_WORD_FIND)
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
//lyxerr << "find called, cmd: " << ev << endl;
|
|
|
|
|
bool casesensitive;
|
|
|
|
|
bool matchword;
|
|
|
|
|
bool forward;
|
2021-02-15 10:30:03 +00:00
|
|
|
|
bool wrap;
|
2021-02-15 13:12:07 +00:00
|
|
|
|
bool instant;
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool onlysel;
|
2021-02-15 13:12:07 +00:00
|
|
|
|
|
|
|
|
|
docstring search = string2find(ev.argument(), casesensitive,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
matchword, forward, wrap, instant, onlysel);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2021-02-15 13:12:07 +00:00
|
|
|
|
return findOne(bv, search, casesensitive, matchword, forward,
|
2021-02-16 07:11:09 +00:00
|
|
|
|
false, true, wrap, instant, onlysel);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2021-01-12 15:13:47 +00:00
|
|
|
|
bool lyxreplace(BufferView * bv, FuncRequest const & ev)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2010-04-09 19:00:42 +00:00
|
|
|
|
if (!bv || ev.action() != LFUN_WORD_REPLACE)
|
2010-10-13 18:53:41 +00:00
|
|
|
|
return false;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
// data is of the form
|
|
|
|
|
// "<search>
|
|
|
|
|
// <replace>
|
2021-02-16 07:11:09 +00:00
|
|
|
|
// <casesensitive> <matchword> <all> <forward> <findnext> <wrap> <onlysel>"
|
2006-12-10 11:52:46 +00:00
|
|
|
|
docstring search;
|
|
|
|
|
docstring rplc;
|
2009-07-04 23:02:27 +00:00
|
|
|
|
docstring howto = split(ev.argument(), rplc, '\n');
|
|
|
|
|
howto = split(howto, search, '\n');
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
|
|
|
|
bool casesensitive = parse_bool(howto);
|
|
|
|
|
bool matchword = parse_bool(howto);
|
|
|
|
|
bool all = parse_bool(howto);
|
2021-02-15 08:41:16 +00:00
|
|
|
|
bool forward = parse_bool(howto, true);
|
2021-02-15 10:30:03 +00:00
|
|
|
|
bool findnext = parse_bool(howto, true);
|
|
|
|
|
bool wrap = parse_bool(howto);
|
2021-02-16 07:11:09 +00:00
|
|
|
|
bool onlysel = parse_bool(howto);
|
|
|
|
|
|
|
|
|
|
if (!bv->cursor().selection())
|
|
|
|
|
// only selection only makes sense with selection
|
|
|
|
|
onlysel = false;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2010-10-14 14:32:58 +00:00
|
|
|
|
bool update = false;
|
|
|
|
|
|
2021-01-12 15:13:47 +00:00
|
|
|
|
int replace_count = 0;
|
|
|
|
|
if (all) {
|
2021-02-16 07:11:09 +00:00
|
|
|
|
replace_count = replaceAll(bv, search, rplc, casesensitive,
|
|
|
|
|
matchword, onlysel);
|
2021-01-12 15:13:47 +00:00
|
|
|
|
update = replace_count > 0;
|
|
|
|
|
} else {
|
|
|
|
|
pair<bool, int> rv =
|
2021-02-16 07:11:09 +00:00
|
|
|
|
replaceOne(bv, search, rplc, casesensitive, matchword,
|
|
|
|
|
forward, findnext, wrap, onlysel);
|
2021-01-12 15:13:47 +00:00
|
|
|
|
update = rv.first;
|
|
|
|
|
replace_count = rv.second;
|
|
|
|
|
}
|
2009-05-07 12:40:35 +00:00
|
|
|
|
|
2021-01-12 15:13:47 +00:00
|
|
|
|
Buffer const & buf = bv->buffer();
|
|
|
|
|
if (!update) {
|
|
|
|
|
// emit message signal.
|
2021-02-16 07:11:09 +00:00
|
|
|
|
if (onlysel)
|
|
|
|
|
buf.message(_("String not found in selection."));
|
|
|
|
|
else
|
|
|
|
|
buf.message(_("String not found."));
|
2021-01-12 15:13:47 +00:00
|
|
|
|
} else {
|
|
|
|
|
if (replace_count == 0) {
|
|
|
|
|
buf.message(_("String found."));
|
|
|
|
|
} else if (replace_count == 1) {
|
|
|
|
|
buf.message(_("String has been replaced."));
|
2004-03-25 09:16:36 +00:00
|
|
|
|
} else {
|
2021-02-16 07:11:09 +00:00
|
|
|
|
docstring const str = onlysel
|
|
|
|
|
? bformat(_("%1$d strings have been replaced in the selection."), replace_count)
|
|
|
|
|
: bformat(_("%1$d strings have been replaced."), replace_count);
|
2021-01-12 15:13:47 +00:00
|
|
|
|
buf.message(str);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2010-10-14 14:32:58 +00:00
|
|
|
|
return update;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2016-10-15 14:34:16 +00:00
|
|
|
|
bool findNextChange(BufferView * bv, Cursor & cur, bool const check_wrap)
|
2004-03-25 09:16:36 +00:00
|
|
|
|
{
|
2014-10-14 07:58:40 +00:00
|
|
|
|
for (; cur; cur.forwardPos())
|
|
|
|
|
if (cur.inTexted() && cur.paragraph().isChanged(cur.pos()))
|
2013-05-06 20:50:26 +00:00
|
|
|
|
return true;
|
2016-10-15 14:34:16 +00:00
|
|
|
|
|
|
|
|
|
if (check_wrap) {
|
|
|
|
|
DocIterator cur_orig(bv->cursor());
|
|
|
|
|
docstring q = _("End of file reached while searching forward.\n"
|
|
|
|
|
"Continue searching from the beginning?");
|
|
|
|
|
int wrap_answer = frontend::Alert::prompt(_("Wrap search?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
if (wrap_answer == 0) {
|
|
|
|
|
bv->cursor().clear();
|
|
|
|
|
bv->cursor().push_back(CursorSlice(bv->buffer().inset()));
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
cur.setCursor(bv->cursor().selectionBegin());
|
|
|
|
|
if (findNextChange(bv, cur, false))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
bv->cursor().setCursor(cur_orig);
|
|
|
|
|
}
|
|
|
|
|
|
2013-05-06 20:50:26 +00:00
|
|
|
|
return false;
|
2009-04-05 19:11:25 +00:00
|
|
|
|
}
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2009-04-05 19:11:25 +00:00
|
|
|
|
|
2016-10-15 14:34:16 +00:00
|
|
|
|
bool findPreviousChange(BufferView * bv, Cursor & cur, bool const check_wrap)
|
2009-04-05 19:11:25 +00:00
|
|
|
|
{
|
2014-10-14 07:58:40 +00:00
|
|
|
|
for (cur.backwardPos(); cur; cur.backwardPos()) {
|
|
|
|
|
if (cur.inTexted() && cur.paragraph().isChanged(cur.pos()))
|
|
|
|
|
return true;
|
2009-04-15 22:07:59 +00:00
|
|
|
|
}
|
2016-10-15 14:34:16 +00:00
|
|
|
|
|
|
|
|
|
if (check_wrap) {
|
|
|
|
|
DocIterator cur_orig(bv->cursor());
|
|
|
|
|
docstring q = _("Beginning of file reached while searching backward.\n"
|
|
|
|
|
"Continue searching from the end?");
|
|
|
|
|
int wrap_answer = frontend::Alert::prompt(_("Wrap search?"),
|
|
|
|
|
q, 0, 1, _("&Yes"), _("&No"));
|
|
|
|
|
if (wrap_answer == 0) {
|
|
|
|
|
bv->cursor().setCursor(doc_iterator_end(&bv->buffer()));
|
|
|
|
|
bv->cursor().backwardPos();
|
|
|
|
|
bv->clearSelection();
|
|
|
|
|
cur.setCursor(bv->cursor().selectionBegin());
|
|
|
|
|
if (findPreviousChange(bv, cur, false))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
bv->cursor().setCursor(cur_orig);
|
|
|
|
|
}
|
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
2009-04-15 22:07:59 +00:00
|
|
|
|
|
2009-04-05 19:11:25 +00:00
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
bool selectChange(Cursor & cur, bool forward)
|
|
|
|
|
{
|
|
|
|
|
if (!cur.inTexted() || !cur.paragraph().isChanged(cur.pos()))
|
|
|
|
|
return false;
|
|
|
|
|
Change ch = cur.paragraph().lookupChange(cur.pos());
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
CursorSlice tip1 = cur.top();
|
|
|
|
|
for (; tip1.pit() < tip1.lastpit() || tip1.pos() < tip1.lastpos(); tip1.forwardPos()) {
|
|
|
|
|
Change ch2 = tip1.paragraph().lookupChange(tip1.pos());
|
|
|
|
|
if (!ch2.isSimilarTo(ch))
|
|
|
|
|
break;
|
2004-03-25 09:16:36 +00:00
|
|
|
|
}
|
2014-10-14 07:58:40 +00:00
|
|
|
|
CursorSlice tip2 = cur.top();
|
|
|
|
|
for (; tip2.pit() > 0 || tip2.pos() > 0;) {
|
|
|
|
|
tip2.backwardPos();
|
|
|
|
|
Change ch2 = tip2.paragraph().lookupChange(tip2.pos());
|
|
|
|
|
if (!ch2.isSimilarTo(ch)) {
|
|
|
|
|
// take a step forward to correctly set the selection
|
|
|
|
|
tip2.forwardPos();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2009-04-15 22:07:59 +00:00
|
|
|
|
}
|
2014-10-14 07:58:40 +00:00
|
|
|
|
if (forward)
|
|
|
|
|
swap(tip1, tip2);
|
|
|
|
|
cur.top() = tip1;
|
|
|
|
|
cur.bv().mouseSetCursor(cur, false);
|
|
|
|
|
cur.top() = tip2;
|
|
|
|
|
cur.bv().mouseSetCursor(cur, true);
|
2004-03-25 09:16:36 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
2014-10-14 07:58:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool findChange(BufferView * bv, bool forward)
|
|
|
|
|
{
|
|
|
|
|
Cursor cur(*bv);
|
|
|
|
|
cur.setCursor(forward ? bv->cursor().selectionEnd()
|
|
|
|
|
: bv->cursor().selectionBegin());
|
2016-10-15 14:34:16 +00:00
|
|
|
|
forward ? findNextChange(bv, cur, true) : findPreviousChange(bv, cur, true);
|
2014-10-14 07:58:40 +00:00
|
|
|
|
return selectChange(cur, forward);
|
2013-05-06 20:50:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2013-05-06 20:50:26 +00:00
|
|
|
|
|
|
|
|
|
bool findNextChange(BufferView * bv)
|
|
|
|
|
{
|
|
|
|
|
return findChange(bv, true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool findPreviousChange(BufferView * bv)
|
|
|
|
|
{
|
|
|
|
|
return findChange(bv, false);
|
|
|
|
|
}
|
|
|
|
|
|
2004-03-25 09:16:36 +00:00
|
|
|
|
|
2014-10-14 07:58:40 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
typedef vector<pair<string, string> > Escapes;
|
|
|
|
|
|
2021-01-12 18:33:29 +00:00
|
|
|
|
string string2regex(string in)
|
|
|
|
|
{
|
|
|
|
|
static std::regex specialChars { R"([-[\]{}()*+?.,\^$|#\s\$\\])" };
|
|
|
|
|
string temp = std::regex_replace(in, specialChars, R"(\$&)" );
|
|
|
|
|
string temp2("");
|
|
|
|
|
size_t lastpos = 0;
|
|
|
|
|
size_t fl_pos = 0;
|
|
|
|
|
int offset = 1;
|
|
|
|
|
while (fl_pos < temp.size()) {
|
|
|
|
|
fl_pos = temp.find("\\\\foreignlanguage", lastpos + offset);
|
|
|
|
|
if (fl_pos == string::npos)
|
|
|
|
|
break;
|
|
|
|
|
offset = 16;
|
|
|
|
|
temp2 += temp.substr(lastpos, fl_pos - lastpos);
|
|
|
|
|
temp2 += "\\n";
|
|
|
|
|
lastpos = fl_pos;
|
|
|
|
|
}
|
|
|
|
|
if (lastpos == 0)
|
|
|
|
|
return(temp);
|
|
|
|
|
if (lastpos < temp.size()) {
|
|
|
|
|
temp2 += temp.substr(lastpos, temp.size() - lastpos);
|
|
|
|
|
}
|
|
|
|
|
return temp2;
|
|
|
|
|
}
|
|
|
|
|
|
2021-01-18 11:17:57 +00:00
|
|
|
|
string correctRegex(string t, bool withformat)
|
2021-01-12 18:33:29 +00:00
|
|
|
|
{
|
|
|
|
|
/* Convert \backslash => \
|
|
|
|
|
* and \{, \}, \[, \] => {, }, [, ]
|
|
|
|
|
*/
|
2021-01-14 13:44:21 +00:00
|
|
|
|
string s("");
|
|
|
|
|
regex wordre("(\\\\)*(\\\\((backslash|mathcircumflex) ?|[\\[\\]\\{\\}]))");
|
|
|
|
|
size_t lastpos = 0;
|
|
|
|
|
smatch sub;
|
2021-01-18 15:06:40 +00:00
|
|
|
|
bool backslashed = false;
|
2021-01-14 13:44:21 +00:00
|
|
|
|
for (sregex_iterator it(t.begin(), t.end(), wordre), end; it != end; ++it) {
|
|
|
|
|
sub = *it;
|
|
|
|
|
string replace;
|
|
|
|
|
if ((sub.position(2) - sub.position(0)) % 2 == 1) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
else {
|
2021-01-18 11:17:57 +00:00
|
|
|
|
if (sub.str(4) == "backslash") {
|
2021-01-14 13:44:21 +00:00
|
|
|
|
replace = "\\";
|
2021-01-18 11:17:57 +00:00
|
|
|
|
if (withformat) {
|
2021-01-18 16:06:05 +00:00
|
|
|
|
// transforms '\backslash \{' into '\{'
|
2021-01-18 15:06:40 +00:00
|
|
|
|
// and '\{' into '{'
|
2021-01-18 16:06:05 +00:00
|
|
|
|
string next = t.substr(sub.position(2) + sub.str(2).length(), 2);
|
|
|
|
|
if ((next == "\\{") || (next == "\\}")) {
|
2021-01-18 11:17:57 +00:00
|
|
|
|
replace = "";
|
2021-01-18 15:06:40 +00:00
|
|
|
|
backslashed = true;
|
|
|
|
|
}
|
2021-01-18 11:17:57 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-14 13:44:21 +00:00
|
|
|
|
else if (sub.str(4) == "mathcircumflex")
|
|
|
|
|
replace = "^";
|
2021-01-18 15:06:40 +00:00
|
|
|
|
else if (backslashed) {
|
|
|
|
|
backslashed = false;
|
|
|
|
|
if (withformat && (sub.str(3) == "{"))
|
|
|
|
|
replace = accents["braceleft"];
|
|
|
|
|
else if (withformat && (sub.str(3) == "}"))
|
|
|
|
|
replace = accents["braceright"];
|
|
|
|
|
else {
|
|
|
|
|
// else part should not exist
|
|
|
|
|
LASSERT(1, /**/);
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-14 13:44:21 +00:00
|
|
|
|
else
|
|
|
|
|
replace = sub.str(3);
|
|
|
|
|
}
|
|
|
|
|
if (lastpos < (size_t) sub.position(2))
|
|
|
|
|
s += t.substr(lastpos, sub.position(2) - lastpos);
|
|
|
|
|
s += replace;
|
|
|
|
|
lastpos = sub.position(2) + sub.length(2);
|
|
|
|
|
}
|
|
|
|
|
if (lastpos == 0)
|
|
|
|
|
return t;
|
|
|
|
|
else if (lastpos < t.length())
|
|
|
|
|
s += t.substr(lastpos, t.length() - lastpos);
|
|
|
|
|
return s;
|
2021-01-12 18:33:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-06-12 00:12:26 +00:00
|
|
|
|
/// Within \regexp{} apply get_lyx_unescapes() only (i.e., preserve regexp semantics of the string),
|
|
|
|
|
/// while outside apply get_lyx_unescapes()+get_regexp_escapes().
|
2011-06-19 16:41:23 +00:00
|
|
|
|
/// If match_latex is true, then apply regexp_latex_escapes() to \regexp{} contents as well.
|
2021-01-18 11:17:57 +00:00
|
|
|
|
string escape_for_regex(string s, bool withformat)
|
2021-01-12 18:33:29 +00:00
|
|
|
|
{
|
|
|
|
|
size_t lastpos = 0;
|
|
|
|
|
string result = "";
|
|
|
|
|
while (lastpos < s.size()) {
|
|
|
|
|
size_t regex_pos = s.find("\\regexp{", lastpos);
|
|
|
|
|
if (regex_pos == string::npos) {
|
|
|
|
|
regex_pos = s.size();
|
2011-08-25 19:16:14 +00:00
|
|
|
|
}
|
2021-01-12 18:33:29 +00:00
|
|
|
|
if (regex_pos > lastpos) {
|
|
|
|
|
result += string2regex(s.substr(lastpos, regex_pos-lastpos));
|
|
|
|
|
lastpos = regex_pos;
|
|
|
|
|
if (lastpos == s.size())
|
|
|
|
|
break;
|
2010-02-07 21:44:31 +00:00
|
|
|
|
}
|
2021-01-12 18:33:29 +00:00
|
|
|
|
size_t end_pos = s.find("\\endregexp{}}", regex_pos + 8);
|
2021-01-18 11:17:57 +00:00
|
|
|
|
result += correctRegex(s.substr(regex_pos + 8, end_pos -(regex_pos + 8)), withformat);
|
2021-01-12 18:33:29 +00:00
|
|
|
|
lastpos = end_pos + 13;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
2021-01-12 18:33:29 +00:00
|
|
|
|
return result;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-10-27 13:45:27 +00:00
|
|
|
|
|
2010-06-29 17:09:40 +00:00
|
|
|
|
/// Wrapper for lyx::regex_replace with simpler interface
|
2008-11-15 23:30:27 +00:00
|
|
|
|
bool regex_replace(string const & s, string & t, string const & searchstr,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
string const & replacestr)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2020-11-28 23:05:24 +00:00
|
|
|
|
regex e(searchstr, regex_constants::ECMAScript);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
ostringstream oss;
|
|
|
|
|
ostream_iterator<char, char> it(oss);
|
2020-11-28 23:05:24 +00:00
|
|
|
|
regex_replace(it, s.begin(), s.end(), e, replacestr);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// tolerate t and s be references to the same variable
|
|
|
|
|
bool rv = (s != oss.str());
|
|
|
|
|
t = oss.str();
|
|
|
|
|
return rv;
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
class MatchResult {
|
|
|
|
|
public:
|
2021-01-02 17:37:14 +00:00
|
|
|
|
enum range {
|
|
|
|
|
newIsTooFar,
|
|
|
|
|
newIsBetter,
|
|
|
|
|
newIsInvalid
|
|
|
|
|
};
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int match_len;
|
2021-01-01 20:50:36 +00:00
|
|
|
|
int match_prefix;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int match2end;
|
|
|
|
|
int pos;
|
2021-01-01 20:50:36 +00:00
|
|
|
|
int leadsize;
|
2021-01-10 16:17:37 +00:00
|
|
|
|
int pos_len;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
int searched_size;
|
2021-01-07 16:04:27 +00:00
|
|
|
|
vector <string> result = vector <string>();
|
2021-01-17 12:30:43 +00:00
|
|
|
|
MatchResult(int len = 0): match_len(len),match_prefix(0),match2end(0), pos(0),leadsize(0),pos_len(-1),searched_size(0) {};
|
2019-02-26 22:00:31 +00:00
|
|
|
|
};
|
|
|
|
|
|
2021-01-02 17:37:14 +00:00
|
|
|
|
static MatchResult::range interpretMatch(MatchResult &oldres, MatchResult &newres)
|
|
|
|
|
{
|
2021-01-20 11:24:33 +00:00
|
|
|
|
if (newres.match2end < oldres.match2end)
|
2021-01-02 17:37:14 +00:00
|
|
|
|
return MatchResult::newIsTooFar;
|
|
|
|
|
if (newres.match_len < oldres.match_len)
|
|
|
|
|
return MatchResult::newIsTooFar;
|
2021-01-20 11:24:33 +00:00
|
|
|
|
|
2021-01-19 16:52:36 +00:00
|
|
|
|
if (newres.match_len == oldres.match_len) {
|
2021-01-20 11:24:33 +00:00
|
|
|
|
if (newres.match2end == oldres.match2end)
|
2021-01-19 16:52:36 +00:00
|
|
|
|
return MatchResult::newIsBetter;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
}
|
2021-01-02 17:37:14 +00:00
|
|
|
|
return MatchResult::newIsInvalid;
|
|
|
|
|
}
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/** The class performing a match between a position in the document and the FindAdvOptions.
|
|
|
|
|
**/
|
2020-12-30 12:00:03 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
class MatchStringAdv {
|
|
|
|
|
public:
|
2021-01-10 21:58:58 +00:00
|
|
|
|
MatchStringAdv(lyx::Buffer & buf, FindAndReplaceOptions & opt);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
/** Tests if text starting at the supplied position matches with the one provided to the MatchStringAdv
|
|
|
|
|
** constructor as opt.search, under the opt.* options settings.
|
|
|
|
|
**
|
2008-12-20 16:00:47 +00:00
|
|
|
|
** @param at_begin
|
|
|
|
|
** If set, then match is searched only against beginning of text starting at cur.
|
|
|
|
|
** If unset, then match is searched anywhere in text starting at cur.
|
2009-05-07 12:40:35 +00:00
|
|
|
|
**
|
2008-11-15 23:30:27 +00:00
|
|
|
|
** @return
|
|
|
|
|
** The length of the matching text, or zero if no match was found.
|
|
|
|
|
**/
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult operator()(DocIterator const & cur, int len = -1, bool at_begin = true) const;
|
2021-01-01 20:50:36 +00:00
|
|
|
|
#if QTSEARCH
|
2020-12-30 12:00:03 +00:00
|
|
|
|
bool regexIsValid;
|
|
|
|
|
string regexError;
|
|
|
|
|
#endif
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
/// buffer
|
2009-12-30 18:40:18 +00:00
|
|
|
|
lyx::Buffer * p_buf;
|
|
|
|
|
/// first buffer on which search was started
|
|
|
|
|
lyx::Buffer * const p_first_buf;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/// options
|
2009-01-14 15:34:56 +00:00
|
|
|
|
FindAndReplaceOptions const & opt;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
private:
|
2010-01-09 12:39:29 +00:00
|
|
|
|
/// Auxiliary find method (does not account for opt.matchword)
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult findAux(DocIterator const & cur, int len = -1, bool at_begin = true) const;
|
2021-01-10 16:17:37 +00:00
|
|
|
|
void CreateRegexp(FindAndReplaceOptions const & opt, string regexp_str, string regexp2_str, string par_as_string = "");
|
2010-01-09 12:39:29 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/** Normalize a stringified or latexified LyX paragraph.
|
|
|
|
|
**
|
|
|
|
|
** Normalize means:
|
|
|
|
|
** <ul>
|
|
|
|
|
** <li>if search is not casesensitive, then lowercase the string;
|
|
|
|
|
** <li>remove any newline at begin or end of the string;
|
|
|
|
|
** <li>replace any newline in the middle of the string with a simple space;
|
|
|
|
|
** <li>remove stale empty styles and environments, like \emph{} and \textbf{}.
|
|
|
|
|
** </ul>
|
|
|
|
|
**
|
|
|
|
|
** @todo Normalization should also expand macros, if the corresponding
|
|
|
|
|
** search option was checked.
|
|
|
|
|
**/
|
2021-01-10 16:17:37 +00:00
|
|
|
|
string normalize(docstring const & s) const;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// normalized string to search
|
|
|
|
|
string par_as_string;
|
|
|
|
|
// regular expression to use for searching
|
2020-12-30 12:00:03 +00:00
|
|
|
|
// regexp2 is same as regexp, but prefixed with a ".*?"
|
2021-01-01 20:50:36 +00:00
|
|
|
|
#if QTSEARCH
|
2020-12-30 12:00:03 +00:00
|
|
|
|
QRegularExpression regexp;
|
|
|
|
|
QRegularExpression regexp2;
|
|
|
|
|
#else
|
2020-11-28 23:05:24 +00:00
|
|
|
|
regex regexp;
|
|
|
|
|
regex regexp2;
|
2020-12-30 12:00:03 +00:00
|
|
|
|
#endif
|
2011-06-19 16:41:23 +00:00
|
|
|
|
// leading format material as string
|
|
|
|
|
string lead_as_string;
|
|
|
|
|
// par_as_string after removal of lead_as_string
|
|
|
|
|
string par_as_string_nolead;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// unmatched open braces in the search string/regexp
|
|
|
|
|
int open_braces;
|
|
|
|
|
// number of (.*?) subexpressions added at end of search regexp for closing
|
|
|
|
|
// environments, math mode, styles, etc...
|
|
|
|
|
int close_wildcards;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
public:
|
2011-02-07 20:36:40 +00:00
|
|
|
|
// Are we searching with regular expressions ?
|
|
|
|
|
bool use_regexp;
|
2021-01-09 16:01:50 +00:00
|
|
|
|
static int valid_matches;
|
|
|
|
|
static vector <string> matches;
|
2021-01-08 11:05:51 +00:00
|
|
|
|
void FillResults(MatchResult &found_mr);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
};
|
|
|
|
|
|
2021-01-09 16:01:50 +00:00
|
|
|
|
int MatchStringAdv::valid_matches = 0;
|
|
|
|
|
vector <string> MatchStringAdv::matches = vector <string>(10);
|
|
|
|
|
|
2021-01-08 11:05:51 +00:00
|
|
|
|
void MatchStringAdv::FillResults(MatchResult &found_mr)
|
|
|
|
|
{
|
|
|
|
|
if (found_mr.match_len > 0) {
|
|
|
|
|
valid_matches = found_mr.result.size();
|
|
|
|
|
for (size_t i = 0; i < found_mr.result.size(); i++)
|
|
|
|
|
matches[i] = found_mr.result[i];
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
valid_matches = 0;
|
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2015-05-17 15:27:12 +00:00
|
|
|
|
static docstring buffer_to_latex(Buffer & buffer)
|
2011-02-07 20:36:40 +00:00
|
|
|
|
{
|
2019-03-03 13:08:27 +00:00
|
|
|
|
//OutputParams runparams(&buffer.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2011-02-07 20:36:40 +00:00
|
|
|
|
odocstringstream ods;
|
2016-09-04 02:02:47 +00:00
|
|
|
|
otexstream os(ods);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
runparams.nice = true;
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::XeTeX;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
runparams.linelen = 10000; //lyxrc.plaintext_linelen;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
|
runparams.dryrun = true;
|
2021-01-22 20:11:28 +00:00
|
|
|
|
if (ignoreFormats.getDeleted())
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithoutDeleted;
|
|
|
|
|
else
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithDeleted;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
pit_type const endpit = buffer.paragraphs().size();
|
|
|
|
|
for (pit_type pit = 0; pit != endpit; ++pit) {
|
2011-02-10 20:02:48 +00:00
|
|
|
|
TeXOnePar(buffer, buffer.text(), pit, os, runparams);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "searchString up to here: " << ods.str());
|
|
|
|
|
}
|
|
|
|
|
return ods.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static docstring stringifySearchBuffer(Buffer & buffer, FindAndReplaceOptions const & opt)
|
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
docstring str;
|
|
|
|
|
if (!opt.ignoreformat) {
|
|
|
|
|
str = buffer_to_latex(buffer);
|
|
|
|
|
} else {
|
2019-03-03 13:08:27 +00:00
|
|
|
|
// OutputParams runparams(&buffer.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2011-02-07 20:36:40 +00:00
|
|
|
|
runparams.nice = true;
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::XeTeX;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
runparams.linelen = 10000; //lyxrc.plaintext_linelen;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
runparams.dryrun = true;
|
2021-01-22 20:11:28 +00:00
|
|
|
|
int option = AS_STR_INSETS |AS_STR_PLAINTEXT;
|
|
|
|
|
if (ignoreFormats.getDeleted()) {
|
|
|
|
|
option |= AS_STR_SKIPDELETE;
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithoutDeleted;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithDeleted;
|
|
|
|
|
}
|
2011-05-26 17:08:48 +00:00
|
|
|
|
for (pos_type pit = pos_type(0); pit < (pos_type)buffer.paragraphs().size(); ++pit) {
|
|
|
|
|
Paragraph const & par = buffer.paragraphs().at(pit);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Adding to search string: '"
|
2014-03-27 23:12:56 +00:00
|
|
|
|
<< par.asString(pos_type(0), par.size(),
|
2021-01-22 20:11:28 +00:00
|
|
|
|
option,
|
2014-03-27 23:12:56 +00:00
|
|
|
|
&runparams)
|
2012-10-23 20:58:10 +00:00
|
|
|
|
<< "'");
|
2014-03-27 23:12:56 +00:00
|
|
|
|
str += par.asString(pos_type(0), par.size(),
|
2021-01-22 20:11:28 +00:00
|
|
|
|
option,
|
2014-03-27 23:12:56 +00:00
|
|
|
|
&runparams);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
}
|
2020-12-15 17:08:02 +00:00
|
|
|
|
// Even in ignore-format we have to remove "\text{}, \lyxmathsym{}" parts
|
2020-12-14 19:43:39 +00:00
|
|
|
|
string t = to_utf8(str);
|
2021-02-03 11:18:33 +00:00
|
|
|
|
while (regex_replace(t, t, "\\\\(text|lyxmathsym|ensuremath)\\{([^\\}]*)\\}", "$2"));
|
2020-12-14 19:43:39 +00:00
|
|
|
|
str = from_utf8(t);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
}
|
|
|
|
|
return str;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/// Return separation pos between the leading material and the rest
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static size_t identifyLeading(string const & s)
|
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
string t = s;
|
|
|
|
|
// @TODO Support \item[text]
|
2018-10-02 09:53:01 +00:00
|
|
|
|
// Kornel: Added textsl, textsf, textit, texttt and noun
|
2018-10-23 19:12:22 +00:00
|
|
|
|
// + allow to search for colored text too
|
2020-12-14 19:43:39 +00:00
|
|
|
|
while (regex_replace(t, t, "^\\\\(("
|
|
|
|
|
"(author|title|subtitle|subject|publishers|dedication|uppertitleback|lowertitleback|extratitle|"
|
|
|
|
|
"lyxaddress|lyxrightaddress|"
|
|
|
|
|
"footnotesize|tiny|scriptsize|small|large|Large|LARGE|huge|Huge|"
|
|
|
|
|
"emph|noun|minisec|text(bf|md|sl|sf|it|tt))|"
|
|
|
|
|
"((textcolor|foreignlanguage|latexenvironment)\\{[a-z]+\\*?\\})|"
|
|
|
|
|
"(u|uu)line|(s|x)out|uwave)|((sub)?(((sub)?section)|paragraph)|part|chapter)\\*?)\\{", "")
|
2020-11-28 23:05:24 +00:00
|
|
|
|
|| regex_replace(t, t, "^\\$", "")
|
|
|
|
|
|| regex_replace(t, t, "^\\\\\\[", "")
|
|
|
|
|
|| regex_replace(t, t, "^ ?\\\\item\\{[a-z]+\\}", "")
|
|
|
|
|
|| regex_replace(t, t, "^\\\\begin\\{[a-zA-Z_]*\\*?\\}", ""))
|
2018-10-05 18:26:44 +00:00
|
|
|
|
;
|
|
|
|
|
LYXERR(Debug::FIND, " after removing leading $, \\[ , \\emph{, \\textbf{, etc.: '" << t << "'");
|
2011-02-07 20:36:40 +00:00
|
|
|
|
return s.find(t);
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-06 21:50:50 +00:00
|
|
|
|
/*
|
|
|
|
|
* Given a latexified string, retrieve some handled features
|
|
|
|
|
* The features of the regex will later be compared with the features
|
|
|
|
|
* of the searched text. If the regex features are not a
|
|
|
|
|
* subset of the analized, then, in not format ignoring search
|
|
|
|
|
* we can early stop the search in the relevant inset.
|
|
|
|
|
*/
|
2018-10-05 18:26:44 +00:00
|
|
|
|
typedef map<string, bool> Features;
|
|
|
|
|
|
|
|
|
|
static Features identifyFeatures(string const & s)
|
|
|
|
|
{
|
2019-02-20 13:14:50 +00:00
|
|
|
|
static regex const feature("\\\\(([a-zA-Z]+(\\{([a-z]+\\*?)\\}|\\*)?))\\{");
|
2019-02-19 22:11:09 +00:00
|
|
|
|
static regex const valid("^("
|
|
|
|
|
"("
|
|
|
|
|
"(footnotesize|tiny|scriptsize|small|large|Large|LARGE|huge|Huge|"
|
|
|
|
|
"emph|noun|text(bf|md|sl|sf|it|tt)|"
|
2019-02-20 13:14:50 +00:00
|
|
|
|
"(textcolor|foreignlanguage|item|listitem|latexenvironment)\\{[a-z]+\\*?\\})|"
|
2019-02-19 22:11:09 +00:00
|
|
|
|
"(u|uu)line|(s|x)out|uwave|"
|
|
|
|
|
"(sub|extra)?title|author|subject|publishers|dedication|(upper|lower)titleback|lyx(right)?address)|"
|
|
|
|
|
"((sub)?(((sub)?section)|paragraph)|part|chapter|lyxslide)\\*?)$");
|
2018-10-05 18:26:44 +00:00
|
|
|
|
smatch sub;
|
|
|
|
|
bool displ = true;
|
|
|
|
|
Features info;
|
|
|
|
|
|
|
|
|
|
for (sregex_iterator it(s.begin(), s.end(), feature), end; it != end; ++it) {
|
|
|
|
|
sub = *it;
|
|
|
|
|
if (displ) {
|
|
|
|
|
if (sub.str(1).compare("regexp") == 0) {
|
|
|
|
|
displ = false;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
string token = sub.str(1);
|
|
|
|
|
smatch sub2;
|
|
|
|
|
if (regex_match(token, sub2, valid)) {
|
|
|
|
|
info[token] = true;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// ignore
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (sub.str(1).compare("endregexp") == 0) {
|
|
|
|
|
displ = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return info;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
/*
|
|
|
|
|
* defines values features of a key "\\[a-z]+{"
|
|
|
|
|
*/
|
|
|
|
|
class KeyInfo {
|
2018-10-14 18:39:13 +00:00
|
|
|
|
public:
|
2018-10-18 15:37:15 +00:00
|
|
|
|
enum KeyType {
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* Char type with content discarded
|
|
|
|
|
* like \hspace{1cm} */
|
|
|
|
|
noContent,
|
|
|
|
|
/* Char, like \backslash */
|
2018-10-18 15:37:15 +00:00
|
|
|
|
isChar,
|
2021-01-02 17:37:14 +00:00
|
|
|
|
/* replace starting backslash with '#' */
|
2020-12-31 15:53:46 +00:00
|
|
|
|
isText,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* \part, \section*, ... */
|
2018-10-20 10:47:37 +00:00
|
|
|
|
isSectioning,
|
2019-02-19 22:11:09 +00:00
|
|
|
|
/* title, author etc */
|
|
|
|
|
isTitle,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* \foreignlanguage{ngerman}, ... */
|
|
|
|
|
isMain,
|
2019-02-19 22:11:09 +00:00
|
|
|
|
/* inside \code{}
|
2018-12-14 15:02:33 +00:00
|
|
|
|
* to discard language in content */
|
|
|
|
|
noMain,
|
2018-10-18 15:37:15 +00:00
|
|
|
|
isRegex,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* \begin{eqnarray}...\end{eqnarray}, ... $...$ */
|
2018-10-22 18:19:36 +00:00
|
|
|
|
isMath,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* fonts, colors, markups, ... */
|
2018-10-18 15:37:15 +00:00
|
|
|
|
isStandard,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* footnotesize, ... large, ...
|
|
|
|
|
* Ignore all of them */
|
2018-10-22 18:19:36 +00:00
|
|
|
|
isSize,
|
2018-10-18 15:37:15 +00:00
|
|
|
|
invalid,
|
2019-02-21 13:45:41 +00:00
|
|
|
|
/* inputencoding, ...
|
2018-12-14 15:02:33 +00:00
|
|
|
|
* Discard also content, because they do not help in search */
|
2018-10-18 15:37:15 +00:00
|
|
|
|
doRemove,
|
2019-02-07 12:35:47 +00:00
|
|
|
|
/* twocolumns, ...
|
|
|
|
|
* like remove, but also all arguments */
|
|
|
|
|
removeWithArg,
|
2019-02-17 23:40:55 +00:00
|
|
|
|
/* item, listitem */
|
2018-11-03 10:15:12 +00:00
|
|
|
|
isList,
|
2018-12-14 15:02:33 +00:00
|
|
|
|
/* tex, latex, ... like isChar */
|
2019-02-05 07:04:47 +00:00
|
|
|
|
isIgnored,
|
|
|
|
|
/* like \lettrine[lines=5]{}{} */
|
|
|
|
|
cleanToStart,
|
2020-12-14 19:43:39 +00:00
|
|
|
|
// like isStandard, but always remove head
|
|
|
|
|
headRemove,
|
2019-02-07 12:35:47 +00:00
|
|
|
|
/* End of arguments marker for lettrine,
|
|
|
|
|
* so that they can be ignored */
|
2019-02-05 07:04:47 +00:00
|
|
|
|
endArguments
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
2020-11-01 11:34:49 +00:00
|
|
|
|
KeyInfo() = default;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo(KeyType type, int parcount, bool disable)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
: keytype(type),
|
2018-10-22 18:19:36 +00:00
|
|
|
|
parenthesiscount(parcount),
|
2020-11-01 11:34:49 +00:00
|
|
|
|
disabled(disable) {}
|
|
|
|
|
KeyType keytype = invalid;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
string head;
|
2020-11-01 11:34:49 +00:00
|
|
|
|
int _tokensize = -1;
|
|
|
|
|
int _tokenstart = -1;
|
|
|
|
|
int _dataStart = -1;
|
|
|
|
|
int _dataEnd = -1;
|
|
|
|
|
int parenthesiscount = 1;
|
|
|
|
|
bool disabled = false;
|
|
|
|
|
bool used = false; /* by pattern */
|
2018-10-14 18:39:13 +00:00
|
|
|
|
};
|
2018-10-12 14:47:07 +00:00
|
|
|
|
|
2018-10-19 17:11:20 +00:00
|
|
|
|
class Border {
|
|
|
|
|
public:
|
|
|
|
|
Border(int l=0, int u=0) : low(l), upper(u) {};
|
|
|
|
|
int low;
|
|
|
|
|
int upper;
|
|
|
|
|
};
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
#define MAXOPENED 30
|
|
|
|
|
class Intervall {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
bool isPatternString_;
|
2019-03-20 16:25:25 +00:00
|
|
|
|
public:
|
|
|
|
|
explicit Intervall(bool isPattern, string const & p) :
|
2019-12-29 16:40:13 +00:00
|
|
|
|
isPatternString_(isPattern), par(p), ignoreidx(-1), actualdeptindex(0),
|
|
|
|
|
hasTitle(false), langcount(0)
|
2019-03-20 16:25:25 +00:00
|
|
|
|
{
|
|
|
|
|
depts[0] = 0;
|
|
|
|
|
closes[0] = 0;
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
string par;
|
|
|
|
|
int ignoreidx;
|
2019-03-20 22:20:13 +00:00
|
|
|
|
static vector<Border> borders;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int depts[MAXOPENED];
|
|
|
|
|
int closes[MAXOPENED];
|
|
|
|
|
int actualdeptindex;
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int previousNotIgnored(int) const;
|
|
|
|
|
int nextNotIgnored(int) const;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void handleOpenP(int i);
|
|
|
|
|
void handleCloseP(int i, bool closingAllowed);
|
|
|
|
|
void resetOpenedP(int openPos);
|
|
|
|
|
void addIntervall(int upper);
|
|
|
|
|
void addIntervall(int low, int upper); /* if explicit */
|
2019-03-02 14:42:38 +00:00
|
|
|
|
void removeAccents();
|
2020-11-25 23:11:07 +00:00
|
|
|
|
void setForDefaultLang(KeyInfo const & defLang) const;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
int findclosing(int start, int end, char up, char down, int repeat);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void handleParentheses(int lastpos, bool closingAllowed);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
bool hasTitle;
|
2019-12-29 16:40:13 +00:00
|
|
|
|
int langcount; // Number of disabled language specs up to current position in actual interval
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int isOpeningPar(int pos) const;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
string titleValue;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void output(ostringstream &os, int lastpos);
|
|
|
|
|
// string show(int lastpos);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-03-20 22:20:13 +00:00
|
|
|
|
vector<Border> Intervall::borders = vector<Border>(30);
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int Intervall::isOpeningPar(int pos) const
|
2019-02-23 12:11:34 +00:00
|
|
|
|
{
|
|
|
|
|
if ((pos < 0) || (size_t(pos) >= par.size()))
|
|
|
|
|
return 0;
|
|
|
|
|
if (par[pos] != '{')
|
|
|
|
|
return 0;
|
|
|
|
|
if (size_t(pos) + 2 >= par.size())
|
|
|
|
|
return 1;
|
|
|
|
|
if (par[pos+2] != '}')
|
|
|
|
|
return 1;
|
|
|
|
|
if (par[pos+1] == '[' || par[pos+1] == ']')
|
|
|
|
|
return 3;
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
void Intervall::setForDefaultLang(KeyInfo const & defLang) const
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Enable the use of first token again
|
|
|
|
|
if (ignoreidx >= 0) {
|
2019-02-21 13:45:41 +00:00
|
|
|
|
int value = defLang._tokenstart + defLang._tokensize;
|
2020-12-14 19:43:39 +00:00
|
|
|
|
int borderidx = 0;
|
|
|
|
|
if (hasTitle) {
|
|
|
|
|
borderidx = 1;
|
|
|
|
|
}
|
2019-02-21 13:45:41 +00:00
|
|
|
|
if (value > 0) {
|
2020-12-14 19:43:39 +00:00
|
|
|
|
if (borders[borderidx].low < value)
|
|
|
|
|
borders[borderidx].low = value;
|
|
|
|
|
if (borders[borderidx].upper < value)
|
|
|
|
|
borders[borderidx].upper = value;
|
2019-02-21 13:45:41 +00:00
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
static void checkDepthIndex(int val)
|
2018-10-13 19:02:53 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
static int maxdepthidx = MAXOPENED-2;
|
2019-03-18 17:19:44 +00:00
|
|
|
|
static int lastmaxdepth = 0;
|
|
|
|
|
if (val > lastmaxdepth) {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Depth reached " << val);
|
2019-03-18 17:19:44 +00:00
|
|
|
|
lastmaxdepth = val;
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (val > maxdepthidx) {
|
|
|
|
|
maxdepthidx = val;
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "maxdepthidx now " << val);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-18 17:19:44 +00:00
|
|
|
|
#if 0
|
|
|
|
|
// Not needed, because borders are now dynamically expanded
|
2018-10-18 15:37:15 +00:00
|
|
|
|
static void checkIgnoreIdx(int val)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2019-03-18 17:19:44 +00:00
|
|
|
|
static int lastmaxignore = -1;
|
|
|
|
|
if ((lastmaxignore < val) && (size_t(val+1) >= borders.size())) {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "IgnoreIdx reached " << val);
|
2019-03-18 17:19:44 +00:00
|
|
|
|
lastmaxignore = val;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-18 17:19:44 +00:00
|
|
|
|
#endif
|
2018-10-12 14:47:07 +00:00
|
|
|
|
|
2018-10-13 19:02:53 +00:00
|
|
|
|
/*
|
|
|
|
|
* Expand the region of ignored parts of the input latex string
|
|
|
|
|
* The region is only relevant in output()
|
|
|
|
|
*/
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::addIntervall(int low, int upper)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
|
|
|
|
int idx;
|
|
|
|
|
if (low == upper) return;
|
|
|
|
|
for (idx = ignoreidx+1; idx > 0; --idx) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (low > borders[idx-1].upper) {
|
2018-10-12 14:47:07 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
Border br(low, upper);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
if (idx > ignoreidx) {
|
2019-03-18 17:19:44 +00:00
|
|
|
|
if (borders.size() <= size_t(idx)) {
|
|
|
|
|
borders.push_back(br);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
borders[idx] = br;
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
ignoreidx = idx;
|
2019-03-18 17:19:44 +00:00
|
|
|
|
// checkIgnoreIdx(ignoreidx);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Expand only if one of the new bound is inside the interwall
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// We know here that br.low > borders[idx-1].upper
|
|
|
|
|
if (br.upper < borders[idx].low) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// We have to insert at this pos
|
2019-03-18 17:19:44 +00:00
|
|
|
|
if (size_t(ignoreidx+1) >= borders.size()) {
|
|
|
|
|
borders.push_back(borders[ignoreidx]);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
borders[ignoreidx+1] = borders[ignoreidx];
|
|
|
|
|
}
|
|
|
|
|
for (int i = ignoreidx; i > idx; --i) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[i] = borders[i-1];
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[idx] = br;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
ignoreidx += 1;
|
2019-03-18 17:19:44 +00:00
|
|
|
|
// checkIgnoreIdx(ignoreidx);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// Here we know, that we are overlapping
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (br.low > borders[idx].low)
|
|
|
|
|
br.low = borders[idx].low;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// check what has to be concatenated
|
|
|
|
|
int count = 0;
|
|
|
|
|
for (int i = idx; i <= ignoreidx; i++) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (br.upper >= borders[i].low) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
count++;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (br.upper < borders[i].upper)
|
|
|
|
|
br.upper = borders[i].upper;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// count should be >= 1 here
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[idx] = br;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (count > 1) {
|
|
|
|
|
for (int i = idx + count; i <= ignoreidx; i++) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
borders[i-count+1] = borders[i];
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
ignoreidx -= count - 1;
|
|
|
|
|
return;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-02 21:00:20 +00:00
|
|
|
|
static void buildaccent(string n, string param, string values)
|
2019-03-02 14:42:38 +00:00
|
|
|
|
{
|
2019-03-02 21:00:20 +00:00
|
|
|
|
stringstream s(n);
|
|
|
|
|
string name;
|
|
|
|
|
const char delim = '|';
|
|
|
|
|
while (getline(s, name, delim)) {
|
|
|
|
|
size_t start = 0;
|
2020-10-09 06:04:20 +00:00
|
|
|
|
for (char c : param) {
|
|
|
|
|
string key = name + "{" + c + "}";
|
2019-03-02 21:00:20 +00:00
|
|
|
|
// get the corresponding utf8-value
|
|
|
|
|
if ((values[start] & 0xc0) != 0xc0) {
|
2019-03-04 13:05:44 +00:00
|
|
|
|
// should not happen, utf8 encoding starts at least with 11xxxxxx
|
2019-03-18 17:19:44 +00:00
|
|
|
|
// but value for '\dot{i}' is 'i', which is ascii
|
|
|
|
|
if ((values[start] & 0x80) == 0) {
|
|
|
|
|
// is ascii
|
|
|
|
|
accents[key] = values.substr(start, 1);
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "" << key << "=" << accents[key]);
|
2019-03-18 17:19:44 +00:00
|
|
|
|
}
|
|
|
|
|
start++;
|
|
|
|
|
continue;
|
2019-03-02 21:00:20 +00:00
|
|
|
|
}
|
|
|
|
|
for (int j = 1; ;j++) {
|
2019-03-04 13:05:44 +00:00
|
|
|
|
if (start + j >= values.size()) {
|
|
|
|
|
accents[key] = values.substr(start, j);
|
|
|
|
|
start = values.size() - 1;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2019-03-09 23:29:56 +00:00
|
|
|
|
else if ((values[start+j] & 0xc0) != 0x80) {
|
2019-03-04 13:05:44 +00:00
|
|
|
|
// This is the first byte of following utf8 char
|
|
|
|
|
accents[key] = values.substr(start, j);
|
|
|
|
|
start += j;
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "" << key << "=" << accents[key]);
|
2019-03-04 13:05:44 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-28 15:42:54 +00:00
|
|
|
|
// Helper function
|
|
|
|
|
static string getutf8(unsigned uchar)
|
|
|
|
|
{
|
|
|
|
|
#define maxc 5
|
|
|
|
|
string ret = string();
|
|
|
|
|
char c[maxc] = {0};
|
|
|
|
|
if (uchar <= 0x7f) {
|
|
|
|
|
c[maxc-1] = uchar & 0x7f;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
unsigned char rest = 0x40;
|
|
|
|
|
unsigned char first = 0x80;
|
|
|
|
|
int start = maxc-1;
|
|
|
|
|
for (int i = start; i >=0; --i) {
|
|
|
|
|
if (uchar < rest) {
|
|
|
|
|
c[i] = first + uchar;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
c[i] = 0x80 | (uchar & 0x3f);
|
|
|
|
|
uchar >>= 6;
|
|
|
|
|
rest >>= 1;
|
|
|
|
|
first >>= 1;
|
|
|
|
|
first |= 0x80;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (int i = 0; i < maxc; i++) {
|
|
|
|
|
if (c[i] == 0) continue;
|
|
|
|
|
ret += c[i];
|
|
|
|
|
}
|
|
|
|
|
return(ret);
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-27 13:14:28 +00:00
|
|
|
|
static void addAccents(string latex_in, string unicode_out)
|
|
|
|
|
{
|
|
|
|
|
latex_in = latex_in.substr(1);
|
|
|
|
|
AccentsIterator it_ac = accents.find(latex_in);
|
|
|
|
|
if (it_ac == accents.end()) {
|
|
|
|
|
accents[latex_in] = unicode_out;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
LYXERR0("Key " << latex_in << " already set");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void static fillMissingUnicodesymbols()
|
|
|
|
|
{
|
|
|
|
|
addAccents("\\textyen", getutf8(0x00a5));
|
|
|
|
|
addAccents("\\yen", getutf8(0x00a5));
|
|
|
|
|
addAccents("\\textsection", getutf8(0x00a7));
|
|
|
|
|
addAccents("\\mathsection", getutf8(0x00a7));
|
|
|
|
|
addAccents("\\textlnot", getutf8(0x00ac));
|
|
|
|
|
addAccents("\\neg", getutf8(0x00ac));
|
|
|
|
|
addAccents("\\textpm", getutf8(0x00b1));
|
|
|
|
|
addAccents("\\pm", getutf8(0x00b1));
|
|
|
|
|
addAccents("\\textparagraph", getutf8(0x00b6));
|
|
|
|
|
addAccents("\\mathparagraph", getutf8(0x00b6));
|
|
|
|
|
addAccents("\\textperiodcentered", getutf8(0x00b7));
|
|
|
|
|
addAccents("\\texttimes", getutf8(0x00d7));
|
|
|
|
|
addAccents("\\times", getutf8(0x00d7));
|
|
|
|
|
addAccents("\\dh", getutf8(0x00f0));
|
|
|
|
|
addAccents("\\eth", getutf8(0x00f0));
|
|
|
|
|
addAccents("\\textdiv", getutf8(0x00f7));
|
|
|
|
|
addAccents("\\div", getutf8(0x00f7));
|
|
|
|
|
addAccents("\\o", getutf8(0x00f8));
|
|
|
|
|
addAccents("\\j", getutf8(0x0237));
|
|
|
|
|
addAccents("\\textalpha", getutf8(0x03b1));
|
|
|
|
|
addAccents("\\alpha", getutf8(0x03b1));
|
|
|
|
|
addAccents("\\textbeta", getutf8(0x03b2));
|
|
|
|
|
addAccents("\\beta", getutf8(0x03b2));
|
|
|
|
|
addAccents("\\textgamma", getutf8(0x03b3));
|
|
|
|
|
addAccents("\\gamma", getutf8(0x03b3));
|
|
|
|
|
addAccents("\\textdelta", getutf8(0x03b4));
|
|
|
|
|
addAccents("\\delta", getutf8(0x03b4));
|
|
|
|
|
addAccents("\\textepsilon", getutf8(0x03b5));
|
|
|
|
|
addAccents("\\varepsilon", getutf8(0x03b5));
|
|
|
|
|
addAccents("\\textzeta", getutf8(0x03b6));
|
|
|
|
|
addAccents("\\zeta", getutf8(0x03b6));
|
|
|
|
|
addAccents("\\texteta", getutf8(0x03b7));
|
|
|
|
|
addAccents("\\eta", getutf8(0x03b7));
|
|
|
|
|
addAccents("\\texttheta", getutf8(0x03b8));
|
|
|
|
|
addAccents("\\theta", getutf8(0x03b8));
|
|
|
|
|
addAccents("\\textiota", getutf8(0x03b9));
|
|
|
|
|
addAccents("\\iota", getutf8(0x03b9));
|
|
|
|
|
addAccents("\\textkappa", getutf8(0x03ba));
|
|
|
|
|
addAccents("\\kappa", getutf8(0x03ba));
|
|
|
|
|
addAccents("\\textlambda", getutf8(0x03bb));
|
|
|
|
|
addAccents("\\lambda", getutf8(0x03bb));
|
|
|
|
|
addAccents("\\textmu", getutf8(0x03bc));
|
|
|
|
|
addAccents("\\mu", getutf8(0x03bc));
|
|
|
|
|
addAccents("\\textnu", getutf8(0x03bd));
|
|
|
|
|
addAccents("\\nu", getutf8(0x03bd));
|
|
|
|
|
addAccents("\\textxi", getutf8(0x03be));
|
|
|
|
|
addAccents("\\xi", getutf8(0x03be));
|
|
|
|
|
addAccents("\\textpi", getutf8(0x03c0));
|
|
|
|
|
addAccents("\\pi", getutf8(0x03c0));
|
|
|
|
|
addAccents("\\textrho", getutf8(0x03c1));
|
|
|
|
|
addAccents("\\rho", getutf8(0x03c1));
|
|
|
|
|
addAccents("\\textfinalsigma", getutf8(0x03c2));
|
|
|
|
|
addAccents("\\varsigma", getutf8(0x03c2));
|
|
|
|
|
addAccents("\\textsigma", getutf8(0x03c3));
|
|
|
|
|
addAccents("\\sigma", getutf8(0x03c3));
|
|
|
|
|
addAccents("\\texttau", getutf8(0x03c4));
|
|
|
|
|
addAccents("\\tau", getutf8(0x03c4));
|
|
|
|
|
addAccents("\\textupsilon", getutf8(0x03c5));
|
|
|
|
|
addAccents("\\upsilon", getutf8(0x03c5));
|
|
|
|
|
addAccents("\\textphi", getutf8(0x03c6));
|
|
|
|
|
addAccents("\\varphi", getutf8(0x03c6));
|
|
|
|
|
addAccents("\\textchi", getutf8(0x03c7));
|
|
|
|
|
addAccents("\\chi", getutf8(0x03c7));
|
|
|
|
|
addAccents("\\textpsi", getutf8(0x03c8));
|
|
|
|
|
addAccents("\\psi", getutf8(0x03c8));
|
|
|
|
|
addAccents("\\textomega", getutf8(0x03c9));
|
|
|
|
|
addAccents("\\omega", getutf8(0x03c9));
|
|
|
|
|
addAccents("\\textdigamma", getutf8(0x03dd));
|
|
|
|
|
addAccents("\\digamma", getutf8(0x03dd));
|
|
|
|
|
addAccents("\\hebalef", getutf8(0x05d0));
|
|
|
|
|
addAccents("\\aleph", getutf8(0x05d0));
|
|
|
|
|
addAccents("\\hebbet", getutf8(0x05d1));
|
|
|
|
|
addAccents("\\beth", getutf8(0x05d1));
|
|
|
|
|
addAccents("\\hebgimel", getutf8(0x05d2));
|
|
|
|
|
addAccents("\\gimel", getutf8(0x05d2));
|
|
|
|
|
addAccents("\\hebdalet", getutf8(0x05d3));
|
|
|
|
|
addAccents("\\daleth", getutf8(0x05d3));
|
2021-06-04 12:48:57 +00:00
|
|
|
|
// Thai characters
|
|
|
|
|
addAccents("\\thaiKoKai", getutf8(0x0e01));
|
|
|
|
|
addAccents("\\thaiKhoKhai", getutf8(0x0e02));
|
|
|
|
|
addAccents("\\thaiKhoKhuat", getutf8(0x0e03));
|
|
|
|
|
addAccents("\\thaiKhoKhwai", getutf8(0x0e04));
|
|
|
|
|
addAccents("\\thaiKhoKhon", getutf8(0x0e05));
|
|
|
|
|
addAccents("\\thaiKhoRakhang", getutf8(0x0e06));
|
|
|
|
|
addAccents("\\thaiNgoNgu", getutf8(0x0e07));
|
|
|
|
|
addAccents("\\thaiChoChan", getutf8(0x0e08));
|
|
|
|
|
addAccents("\\thaiChoChing", getutf8(0x0e09));
|
|
|
|
|
addAccents("\\thaiChoChang", getutf8(0x0e0a));
|
|
|
|
|
addAccents("\\thaiSoSo", getutf8(0x0e0b));
|
|
|
|
|
addAccents("\\thaiChoChoe", getutf8(0x0e0c));
|
|
|
|
|
addAccents("\\thaiYoYing", getutf8(0x0e0d));
|
|
|
|
|
addAccents("\\thaiDoChada", getutf8(0x0e0e));
|
|
|
|
|
addAccents("\\thaiToPatak", getutf8(0x0e0f));
|
|
|
|
|
addAccents("\\thaiThoThan", getutf8(0x0e10));
|
|
|
|
|
addAccents("\\thaiThoNangmontho", getutf8(0x0e11));
|
|
|
|
|
addAccents("\\thaiThoPhuthao", getutf8(0x0e12));
|
|
|
|
|
addAccents("\\thaiNoNen", getutf8(0x0e13));
|
|
|
|
|
addAccents("\\thaiDoDek", getutf8(0x0e14));
|
|
|
|
|
addAccents("\\thaiToTao", getutf8(0x0e15));
|
|
|
|
|
addAccents("\\thaiThoThung", getutf8(0x0e16));
|
|
|
|
|
addAccents("\\thaiThoThahan", getutf8(0x0e17));
|
|
|
|
|
addAccents("\\thaiThoThong", getutf8(0x0e18));
|
|
|
|
|
addAccents("\\thaiNoNu", getutf8(0x0e19));
|
|
|
|
|
addAccents("\\thaiBoBaimai", getutf8(0x0e1a));
|
|
|
|
|
addAccents("\\thaiPoPla", getutf8(0x0e1b));
|
|
|
|
|
addAccents("\\thaiPhoPhung", getutf8(0x0e1c));
|
|
|
|
|
addAccents("\\thaiFoFa", getutf8(0x0e1d));
|
|
|
|
|
addAccents("\\thaiPhoPhan", getutf8(0x0e1e));
|
|
|
|
|
addAccents("\\thaiFoFan", getutf8(0x0e1f));
|
|
|
|
|
addAccents("\\thaiPhoSamphao", getutf8(0x0e20));
|
|
|
|
|
addAccents("\\thaiMoMa", getutf8(0x0e21));
|
|
|
|
|
addAccents("\\thaiYoYak", getutf8(0x0e22));
|
|
|
|
|
addAccents("\\thaiRoRua", getutf8(0x0e23));
|
|
|
|
|
addAccents("\\thaiRu", getutf8(0x0e24));
|
|
|
|
|
addAccents("\\thaiLoLing", getutf8(0x0e25));
|
|
|
|
|
addAccents("\\thaiLu", getutf8(0x0e26));
|
|
|
|
|
addAccents("\\thaiWoWaen", getutf8(0x0e27));
|
|
|
|
|
addAccents("\\thaiSoSala", getutf8(0x0e28));
|
|
|
|
|
addAccents("\\thaiSoRusi", getutf8(0x0e29));
|
|
|
|
|
addAccents("\\thaiSoSua", getutf8(0x0e2a));
|
|
|
|
|
addAccents("\\thaiHoHip", getutf8(0x0e2b));
|
|
|
|
|
addAccents("\\thaiLoChula", getutf8(0x0e2c));
|
|
|
|
|
addAccents("\\thaiOAng", getutf8(0x0e2d));
|
|
|
|
|
addAccents("\\thaiHoNokhuk", getutf8(0x0e2e));
|
|
|
|
|
addAccents("\\thaiPaiyannoi", getutf8(0x0e2f));
|
|
|
|
|
addAccents("\\thaiSaraA", getutf8(0x0e30));
|
|
|
|
|
addAccents("\\thaiMaiHanakat", getutf8(0x0e31));
|
|
|
|
|
addAccents("\\thaiSaraAa", getutf8(0x0e32));
|
|
|
|
|
addAccents("\\thaiSaraAm", getutf8(0x0e33));
|
|
|
|
|
addAccents("\\thaiSaraI", getutf8(0x0e34));
|
|
|
|
|
addAccents("\\thaiSaraIi", getutf8(0x0e35));
|
|
|
|
|
addAccents("\\thaiSaraUe", getutf8(0x0e36));
|
|
|
|
|
addAccents("\\thaiSaraUee", getutf8(0x0e37));
|
|
|
|
|
addAccents("\\thaiSaraU", getutf8(0x0e38));
|
|
|
|
|
addAccents("\\thaiSaraUu", getutf8(0x0e39));
|
|
|
|
|
addAccents("\\thaiPhinthu", getutf8(0x0e3a));
|
|
|
|
|
addAccents("\\thaiSaraE", getutf8(0x0e40));
|
|
|
|
|
addAccents("\\thaiSaraAe", getutf8(0x0e41));
|
|
|
|
|
addAccents("\\thaiSaraO", getutf8(0x0e42));
|
|
|
|
|
addAccents("\\thaiSaraAiMaimuan", getutf8(0x0e43));
|
|
|
|
|
addAccents("\\thaiSaraAiMaimalai", getutf8(0x0e44));
|
|
|
|
|
addAccents("\\thaiLakkhangyao", getutf8(0x0e45));
|
|
|
|
|
addAccents("\\thaiMaiyamok", getutf8(0x0e46));
|
|
|
|
|
addAccents("\\thaiMaitaikhu", getutf8(0x0e47));
|
|
|
|
|
addAccents("\\thaiMaiEk", getutf8(0x0e48));
|
|
|
|
|
addAccents("\\thaiMaiTho", getutf8(0x0e49));
|
|
|
|
|
addAccents("\\thaiMaiTri", getutf8(0x0e4a));
|
|
|
|
|
addAccents("\\thaiMaiChattawa", getutf8(0x0e4b));
|
|
|
|
|
addAccents("\\thaiThanthakhat", getutf8(0x0e4c));
|
|
|
|
|
addAccents("\\thaiNikhahit", getutf8(0x0e4d));
|
|
|
|
|
addAccents("\\thaiYamakkan", getutf8(0x0e4e));
|
|
|
|
|
addAccents("\\thaiFongman", getutf8(0x0e4f));
|
|
|
|
|
addAccents("\\thaizero", getutf8(0x0e50));
|
|
|
|
|
addAccents("\\thaione", getutf8(0x0e51));
|
|
|
|
|
addAccents("\\thaitwo", getutf8(0x0e52));
|
|
|
|
|
addAccents("\\thaithree", getutf8(0x0e53));
|
|
|
|
|
addAccents("\\thaifour", getutf8(0x0e54));
|
|
|
|
|
addAccents("\\thaifive", getutf8(0x0e55));
|
|
|
|
|
addAccents("\\thaisix", getutf8(0x0e56));
|
|
|
|
|
addAccents("\\thaiseven", getutf8(0x0e57));
|
|
|
|
|
addAccents("\\thaieight", getutf8(0x0e58));
|
|
|
|
|
addAccents("\\thainine", getutf8(0x0e59));
|
|
|
|
|
addAccents("\\thaiAngkhankhu", getutf8(0x0e5a));
|
|
|
|
|
addAccents("\\thaiKhomut", getutf8(0x0e5b));
|
|
|
|
|
|
2021-02-27 13:14:28 +00:00
|
|
|
|
addAccents("\\dag", getutf8(0x2020));
|
|
|
|
|
addAccents("\\dagger", getutf8(0x2020));
|
|
|
|
|
addAccents("\\ddag", getutf8(0x2021));
|
|
|
|
|
addAccents("\\ddagger", getutf8(0x2021));
|
|
|
|
|
addAccents("\\textbullet", getutf8(0x2022));
|
|
|
|
|
addAccents("\\bullet", getutf8(0x2022));
|
|
|
|
|
addAccents("\\dots", getutf8(0x2026));
|
|
|
|
|
addAccents("\\ldots", getutf8(0x2026));
|
|
|
|
|
addAccents("\\textasciiacute", getutf8(0x2032));
|
|
|
|
|
addAccents("\\prime", getutf8(0x2032));
|
|
|
|
|
addAccents("\\textasciigrave", getutf8(0x2035));
|
|
|
|
|
addAccents("\\backprime", getutf8(0x2035));
|
|
|
|
|
addAccents("\\textasteriskcentered", getutf8(0x204e));
|
|
|
|
|
addAccents("\\ast", getutf8(0x204e));
|
|
|
|
|
addAccents("\\textmho", getutf8(0x2127));
|
|
|
|
|
addAccents("\\mho", getutf8(0x2127));
|
|
|
|
|
addAccents("\\textleftarrow", getutf8(0x2190));
|
|
|
|
|
addAccents("\\leftarrow", getutf8(0x2190));
|
|
|
|
|
addAccents("\\textuparrow", getutf8(0x2191));
|
|
|
|
|
addAccents("\\uparrow", getutf8(0x2191));
|
|
|
|
|
addAccents("\\textrightarrow", getutf8(0x2192));
|
|
|
|
|
addAccents("\\rightarrow", getutf8(0x2192));
|
|
|
|
|
addAccents("\\textdownarrow", getutf8(0x2193));
|
|
|
|
|
addAccents("\\downarrow", getutf8(0x2193));
|
|
|
|
|
addAccents("\\textglobrise", getutf8(0x2197));
|
|
|
|
|
addAccents("\\nearrow", getutf8(0x2197));
|
|
|
|
|
addAccents("\\textglobfall", getutf8(0x2198));
|
|
|
|
|
addAccents("\\searrow", getutf8(0x2198));
|
|
|
|
|
addAccents("\\textsurd", getutf8(0x221a));
|
|
|
|
|
addAccents("\\surd", getutf8(0x221a));
|
|
|
|
|
addAccents("\\textbigcircle", getutf8(0x25ef));
|
|
|
|
|
addAccents("\\bigcirc", getutf8(0x25ef));
|
|
|
|
|
addAccents("\\textlangle", getutf8(0x27e8));
|
|
|
|
|
addAccents("\\langle", getutf8(0x27e8));
|
|
|
|
|
addAccents("\\textrangle", getutf8(0x27e9));
|
|
|
|
|
addAccents("\\rangle", getutf8(0x27e9));
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-02 14:42:38 +00:00
|
|
|
|
static void buildAccentsMap()
|
|
|
|
|
{
|
|
|
|
|
accents["imath"] = "ı";
|
2019-03-04 13:05:44 +00:00
|
|
|
|
accents["i"] = "ı";
|
2019-03-02 21:00:20 +00:00
|
|
|
|
accents["jmath"] = "ȷ";
|
2020-11-23 20:55:13 +00:00
|
|
|
|
accents["cdot"] = "·";
|
2021-01-12 18:33:29 +00:00
|
|
|
|
accents["textasciicircum"] = "^";
|
|
|
|
|
accents["mathcircumflex"] = "^";
|
|
|
|
|
accents["sim"] = "~";
|
2021-01-07 16:04:27 +00:00
|
|
|
|
accents["guillemotright"] = "»";
|
|
|
|
|
accents["guillemotleft"] = "«";
|
2020-12-28 15:42:54 +00:00
|
|
|
|
accents["hairspace"] = getutf8(0xf0000); // select from free unicode plane 15
|
|
|
|
|
accents["thinspace"] = getutf8(0xf0002); // and used _only_ by findadv
|
|
|
|
|
accents["negthinspace"] = getutf8(0xf0003); // to omit backslashed latex macros
|
|
|
|
|
accents["medspace"] = getutf8(0xf0004); // See https://en.wikipedia.org/wiki/Private_Use_Areas
|
|
|
|
|
accents["negmedspace"] = getutf8(0xf0005);
|
|
|
|
|
accents["thickspace"] = getutf8(0xf0006);
|
|
|
|
|
accents["negthickspace"] = getutf8(0xf0007);
|
|
|
|
|
accents["lyx"] = getutf8(0xf0010); // Used logos
|
|
|
|
|
accents["LyX"] = getutf8(0xf0010);
|
|
|
|
|
accents["tex"] = getutf8(0xf0011);
|
|
|
|
|
accents["TeX"] = getutf8(0xf0011);
|
|
|
|
|
accents["latex"] = getutf8(0xf0012);
|
|
|
|
|
accents["LaTeX"] = getutf8(0xf0012);
|
|
|
|
|
accents["latexe"] = getutf8(0xf0013);
|
|
|
|
|
accents["LaTeXe"] = getutf8(0xf0013);
|
2021-01-04 06:16:59 +00:00
|
|
|
|
accents["lyxarrow"] = getutf8(0xf0020);
|
2021-01-18 11:17:57 +00:00
|
|
|
|
accents["braceleft"] = getutf8(0xf0030);
|
|
|
|
|
accents["braceright"] = getutf8(0xf0031);
|
2020-12-28 15:42:54 +00:00
|
|
|
|
accents["backslash lyx"] = getutf8(0xf0010); // Used logos inserted with starting \backslash
|
|
|
|
|
accents["backslash LyX"] = getutf8(0xf0010);
|
|
|
|
|
accents["backslash tex"] = getutf8(0xf0011);
|
|
|
|
|
accents["backslash TeX"] = getutf8(0xf0011);
|
|
|
|
|
accents["backslash latex"] = getutf8(0xf0012);
|
|
|
|
|
accents["backslash LaTeX"] = getutf8(0xf0012);
|
|
|
|
|
accents["backslash latexe"] = getutf8(0xf0013);
|
|
|
|
|
accents["backslash LaTeXe"] = getutf8(0xf0013);
|
2021-01-04 06:16:59 +00:00
|
|
|
|
accents["backslash lyxarrow"] = getutf8(0xf0020);
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["ddot{\\imath}"] = "ï";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("ddot", "aAeEhHiIioOtuUwWxXyY",
|
|
|
|
|
"äÄëËḧḦïÏïöÖẗüÜẅẄẍẌÿŸ"); // umlaut
|
|
|
|
|
buildaccent("dot|.", "aAbBcCdDeEfFGghHIimMnNoOpPrRsStTwWxXyYzZ",
|
|
|
|
|
"ȧȦḃḂċĊḋḊėĖḟḞĠġḣḢİİṁṀṅṄȯȮṗṖṙṘṡṠṫṪẇẆẋẊẏẎżŻ"); // dot{i} can only happen if ignoring case, but there is no lowercase of 'İ'
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["acute{\\imath}"] = "í";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("acute", "aAcCeEgGkKlLmMoOnNpPrRsSuUwWyYzZiI",
|
|
|
|
|
"áÁćĆéÉǵǴḱḰĺĹḿḾóÓńŃṕṔŕŔśŚúÚẃẂýÝźŹíÍ");
|
2019-03-03 13:08:27 +00:00
|
|
|
|
buildaccent("dacute|H|h", "oOuU", "őŐűŰ"); // double acute
|
2019-03-08 21:44:00 +00:00
|
|
|
|
buildaccent("mathring|r", "aAuUwy",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"åÅůŮẘẙ"); // ring
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["check{\\imath}"] = "ǐ";
|
|
|
|
|
accents["check{\\jmath}"] = "ǰ";
|
2019-03-09 23:29:56 +00:00
|
|
|
|
buildaccent("check|v", "cCdDaAeEiIoOuUgGkKhHlLnNrRsSTtzZ",
|
|
|
|
|
"čČďĎǎǍěĚǐǏǒǑǔǓǧǦǩǨȟȞľĽňŇřŘšŠŤťžŽ"); // caron
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["hat{\\imath}"] = "î";
|
|
|
|
|
accents["hat{\\jmath}"] = "ĵ";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("hat|^", "aAcCeEgGhHiIjJoOsSuUwWyYzZ",
|
|
|
|
|
"âÂĉĈêÊĝĜĥĤîÎĵĴôÔŝŜûÛŵŴŷŶẑẐ"); // circ
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["bar{\\imath}"] = "ī";
|
2019-03-04 13:05:44 +00:00
|
|
|
|
buildaccent("bar|=", "aAeEiIoOuUyY",
|
|
|
|
|
"āĀēĒīĪōŌūŪȳȲ"); // macron
|
2019-03-03 13:08:27 +00:00
|
|
|
|
accents["tilde{\\imath}"] = "ĩ";
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("tilde", "aAeEiInNoOuUvVyY",
|
|
|
|
|
"ãÃẽẼĩĨñÑõÕũŨṽṼỹỸ"); // tilde
|
2019-03-04 13:05:44 +00:00
|
|
|
|
accents["breve{\\imath}"] = "ĭ";
|
|
|
|
|
buildaccent("breve|u", "aAeEgGiIoOuU",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ăĂĕĔğĞĭĬŏŎŭŬ"); // breve
|
2019-03-04 13:05:44 +00:00
|
|
|
|
accents["grave{\\imath}"] = "ì";
|
|
|
|
|
buildaccent("grave|`", "aAeEiIoOuUnNwWyY",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"àÀèÈìÌòÒùÙǹǸẁẀỳỲ"); // grave
|
2019-03-08 21:44:00 +00:00
|
|
|
|
buildaccent("subdot|d", "BbDdHhKkLlMmNnRrSsTtVvWwZzAaEeIiOoUuYy",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ḄḅḌḍḤḥḲḳḶḷṂṃṆṇṚṛṢṣṬṭṾṿẈẉẒẓẠạẸẹỊịỌọỤụỴỵ"); // dot below
|
2019-03-17 12:06:56 +00:00
|
|
|
|
buildaccent("ogonek|k", "AaEeIiUuOo",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ĄąĘęĮįŲųǪǫ"); // ogonek
|
|
|
|
|
buildaccent("cedilla|c", "CcGgKkLlNnRrSsTtEeDdHh",
|
2020-12-15 17:08:02 +00:00
|
|
|
|
"ÇçĢģĶķĻļŅņŖŗŞşŢţȨȩḐḑḨḩ"); // cedilla
|
2019-03-18 11:59:40 +00:00
|
|
|
|
buildaccent("subring|textsubring", "Aa",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"Ḁḁ"); // subring
|
2019-03-18 11:59:40 +00:00
|
|
|
|
buildaccent("subhat|textsubcircum", "DdEeLlNnTtUu",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ḒḓḘḙḼḽṊṋṰṱṶṷ"); // subcircum
|
2019-03-18 11:59:40 +00:00
|
|
|
|
buildaccent("subtilde|textsubtilde", "EeIiUu",
|
2019-03-18 17:19:44 +00:00
|
|
|
|
"ḚḛḬḭṴṵ"); // subtilde
|
2019-03-20 22:20:13 +00:00
|
|
|
|
accents["dgrave{\\imath}"] = "ȉ";
|
|
|
|
|
accents["textdoublegrave{\\i}"] = "ȉ";
|
|
|
|
|
buildaccent("dgrave|textdoublegrave", "AaEeIiOoRrUu",
|
|
|
|
|
"ȀȁȄȅȈȉȌȍȐȑȔȕ"); // double grave
|
|
|
|
|
accents["rcap{\\imath}"] = "ȉ";
|
|
|
|
|
accents["textroundcap{\\i}"] = "ȉ";
|
|
|
|
|
buildaccent("rcap|textroundcap", "AaEeIiOoRrUu",
|
|
|
|
|
"ȂȃȆȇȊȋȎȏȒȓȖȗ"); // inverted breve
|
2019-07-30 13:21:56 +00:00
|
|
|
|
buildaccent("slashed", "oO",
|
|
|
|
|
"øØ"); // slashed
|
2021-02-27 13:14:28 +00:00
|
|
|
|
fillMissingUnicodesymbols(); // Add some still not handled entries contained in 'unicodesynbols'
|
|
|
|
|
// LYXERR0("Number of accents " << accents.size());
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Created accents in math or regexp environment
|
|
|
|
|
* are macros, but we need the utf8 equivalent
|
|
|
|
|
*/
|
|
|
|
|
void Intervall::removeAccents()
|
|
|
|
|
{
|
|
|
|
|
if (accents.empty())
|
|
|
|
|
buildAccentsMap();
|
2020-12-27 11:01:23 +00:00
|
|
|
|
static regex const accre("\\\\(([\\S]|grave|breve|ddot|dot|acute|dacute|mathring|check|hat|bar|tilde|subdot|ogonek|"
|
|
|
|
|
"cedilla|subring|textsubring|subhat|textsubcircum|subtilde|textsubtilde|dgrave|textdoublegrave|rcap|textroundcap|slashed)\\{[^\\{\\}]+\\}"
|
2021-01-18 11:17:57 +00:00
|
|
|
|
"|((i|imath|jmath|cdot|[a-z]+space)|((backslash )?([lL]y[xX]|[tT]e[xX]|[lL]a[tT]e[xX]e?|lyxarrow))|(brace|guillemot)(left|right)|textasciicircum|mathcircumflex|sim)(?![a-zA-Z]))");
|
2019-03-02 14:42:38 +00:00
|
|
|
|
smatch sub;
|
|
|
|
|
for (sregex_iterator itacc(par.begin(), par.end(), accre), end; itacc != end; ++itacc) {
|
|
|
|
|
sub = *itacc;
|
|
|
|
|
string key = sub.str(1);
|
2021-01-31 08:53:06 +00:00
|
|
|
|
AccentsIterator it_ac = accents.find(key);
|
|
|
|
|
if (it_ac != accents.end()) {
|
|
|
|
|
string val = it_ac->second;
|
2019-03-18 08:38:34 +00:00
|
|
|
|
size_t pos = sub.position(size_t(0));
|
2019-03-02 14:42:38 +00:00
|
|
|
|
for (size_t i = 0; i < val.size(); i++) {
|
|
|
|
|
par[pos+i] = val[i];
|
|
|
|
|
}
|
2020-11-23 20:55:13 +00:00
|
|
|
|
// Remove possibly following space too
|
|
|
|
|
if (par[pos+sub.str(0).size()] == ' ')
|
2021-01-02 17:37:14 +00:00
|
|
|
|
addIntervall(pos+val.size(), pos + sub.str(0).size()+1);
|
2020-11-23 20:55:13 +00:00
|
|
|
|
else
|
2021-01-02 17:37:14 +00:00
|
|
|
|
addIntervall(pos+val.size(), pos + sub.str(0).size());
|
2019-03-03 13:08:27 +00:00
|
|
|
|
for (size_t i = pos+val.size(); i < pos + sub.str(0).size(); i++) {
|
2019-03-08 21:44:00 +00:00
|
|
|
|
// remove traces of any remaining chars
|
2019-03-03 13:08:27 +00:00
|
|
|
|
par[i] = ' ';
|
|
|
|
|
}
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Not added accent for \"" << key << "\"");
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::handleOpenP(int i)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
actualdeptindex++;
|
|
|
|
|
depts[actualdeptindex] = i+1;
|
|
|
|
|
closes[actualdeptindex] = -1;
|
|
|
|
|
checkDepthIndex(actualdeptindex);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Intervall::handleCloseP(int i, bool closingAllowed)
|
|
|
|
|
{
|
|
|
|
|
if (actualdeptindex <= 0) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (! closingAllowed)
|
|
|
|
|
LYXERR(Debug::FIND, "Bad closing parenthesis in latex"); /* should not happen, but the latex input may be wrong */
|
|
|
|
|
// if we are at the very end
|
|
|
|
|
addIntervall(i, i+1);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
else {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
closes[actualdeptindex] = i+1;
|
|
|
|
|
actualdeptindex--;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::resetOpenedP(int openPos)
|
|
|
|
|
{
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// Used as initializer for foreignlanguage entry
|
2018-10-18 15:37:15 +00:00
|
|
|
|
actualdeptindex = 1;
|
|
|
|
|
depts[1] = openPos+1;
|
|
|
|
|
closes[1] = -1;
|
|
|
|
|
}
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int Intervall::previousNotIgnored(int start) const
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-14 18:39:13 +00:00
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
for (idx = ignoreidx; idx >= 0; --idx) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start > borders[idx].upper)
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return start;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start >= borders[idx].low)
|
|
|
|
|
start = borders[idx].low-1;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-14 18:39:13 +00:00
|
|
|
|
return start;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int Intervall::nextNotIgnored(int start) const
|
2018-10-13 19:02:53 +00:00
|
|
|
|
{
|
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
for (idx = 0; idx <= ignoreidx; idx++) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start < borders[idx].low)
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return start;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (start < borders[idx].upper)
|
|
|
|
|
start = borders[idx].upper;
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
|
|
|
|
return start;
|
|
|
|
|
}
|
|
|
|
|
|
2021-01-31 08:53:06 +00:00
|
|
|
|
typedef unordered_map<string, KeyInfo> KeysMap;
|
|
|
|
|
typedef unordered_map<string, KeyInfo>::const_iterator KeysIterator;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
typedef vector< KeyInfo> Entries;
|
2021-01-31 08:53:06 +00:00
|
|
|
|
static KeysMap keys = unordered_map<string, KeyInfo>();
|
2018-10-18 15:37:15 +00:00
|
|
|
|
|
|
|
|
|
class LatexInfo {
|
|
|
|
|
private:
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int entidx_;
|
|
|
|
|
Entries entries_;
|
|
|
|
|
Intervall interval_;
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void buildKeys(bool);
|
|
|
|
|
void buildEntries(bool);
|
|
|
|
|
void makeKey(const string &, KeyInfo, bool isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
void processRegion(int start, int region_end); /* remove {} parts */
|
2020-10-09 15:50:24 +00:00
|
|
|
|
void removeHead(KeyInfo const &, int count=0);
|
2018-10-27 14:57:42 +00:00
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
public:
|
2019-09-13 14:23:49 +00:00
|
|
|
|
LatexInfo(string const & par, bool isPatternString)
|
2021-01-02 17:37:14 +00:00
|
|
|
|
: entidx_(-1), interval_(isPatternString, par)
|
2019-03-20 16:25:25 +00:00
|
|
|
|
{
|
2018-10-27 14:57:42 +00:00
|
|
|
|
buildKeys(isPatternString);
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_ = vector<KeyInfo>();
|
2018-10-27 14:57:42 +00:00
|
|
|
|
buildEntries(isPatternString);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
|
|
|
|
int getFirstKey() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entidx_ = 0;
|
|
|
|
|
if (entries_.empty()) {
|
|
|
|
|
return -1;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_[0].keytype == KeyInfo::isTitle) {
|
2020-12-14 19:43:39 +00:00
|
|
|
|
interval_.hasTitle = true;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (! entries_[0].disabled) {
|
|
|
|
|
interval_.titleValue = entries_[0].head;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.titleValue = "";
|
2019-02-19 22:11:09 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
removeHead(entries_[0]);
|
|
|
|
|
if (entries_.size() > 1)
|
|
|
|
|
return 1;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
else
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return 0;
|
|
|
|
|
};
|
|
|
|
|
int getNextKey() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entidx_++;
|
|
|
|
|
if (int(entries_.size()) > entidx_) {
|
|
|
|
|
return entidx_;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
bool setNextKey(int idx) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((idx == entidx_) && (entidx_ >= 0)) {
|
|
|
|
|
entidx_--;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
else
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return false;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
2020-11-25 23:11:07 +00:00
|
|
|
|
int find(int start, KeyInfo::KeyType keytype) const {
|
2019-02-07 12:35:47 +00:00
|
|
|
|
if (start < 0)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int tmpIdx = start;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (tmpIdx < int(entries_.size())) {
|
|
|
|
|
if (entries_[tmpIdx].keytype == keytype)
|
2019-02-07 12:35:47 +00:00
|
|
|
|
return tmpIdx;
|
|
|
|
|
tmpIdx++;
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return -1;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
};
|
2020-10-09 15:50:24 +00:00
|
|
|
|
int process(ostringstream & os, KeyInfo const & actual);
|
|
|
|
|
int dispatch(ostringstream & os, int previousStart, KeyInfo & actual);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// string show(int lastpos) { return interval.show(lastpos);};
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int nextNotIgnored(int start) { return interval_.nextNotIgnored(start);};
|
2018-10-18 15:37:15 +00:00
|
|
|
|
KeyInfo &getKeyInfo(int keyinfo) {
|
|
|
|
|
static KeyInfo invalidInfo = KeyInfo();
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((keyinfo < 0) || ( keyinfo >= int(entries_.size())))
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return invalidInfo;
|
|
|
|
|
else
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[keyinfo];
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
2020-11-25 23:11:07 +00:00
|
|
|
|
void setForDefaultLang(KeyInfo const & defLang) {interval_.setForDefaultLang(defLang);};
|
2019-03-21 11:53:41 +00:00
|
|
|
|
void addIntervall(int low, int up) { interval_.addIntervall(low, up); };
|
2018-10-18 15:37:15 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
2019-02-17 23:40:55 +00:00
|
|
|
|
int Intervall::findclosing(int start, int end, char up = '{', char down = '}', int repeat = 1)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int skip = 0;
|
|
|
|
|
int depth = 0;
|
|
|
|
|
for (int i = start; i < end; i += 1 + skip) {
|
|
|
|
|
char c;
|
|
|
|
|
c = par[i];
|
|
|
|
|
skip = 0;
|
|
|
|
|
if (c == '\\') skip = 1;
|
2018-11-03 10:15:12 +00:00
|
|
|
|
else if (c == up) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
depth++;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
else if (c == down) {
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if (depth == 0) {
|
2020-01-03 12:08:32 +00:00
|
|
|
|
repeat--;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if ((repeat <= 0) || (par[i+1] != up))
|
|
|
|
|
return i;
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
--depth;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return end;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-30 19:52:29 +00:00
|
|
|
|
class MathInfo {
|
|
|
|
|
class MathEntry {
|
|
|
|
|
public:
|
|
|
|
|
string wait;
|
|
|
|
|
size_t mathEnd;
|
2021-02-11 13:03:46 +00:00
|
|
|
|
size_t mathpostfixsize;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
size_t mathStart;
|
2021-02-11 13:03:46 +00:00
|
|
|
|
size_t mathprefixsize;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
size_t mathSize;
|
|
|
|
|
};
|
2019-03-21 11:53:41 +00:00
|
|
|
|
size_t actualIdx_;
|
|
|
|
|
vector<MathEntry> entries_;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
public:
|
|
|
|
|
MathInfo() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actualIdx_ = 0;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2021-02-11 13:03:46 +00:00
|
|
|
|
void insert(string const & wait, size_t start, size_t prefixsize, size_t end, size_t postfixsize) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
MathEntry m = MathEntry();
|
|
|
|
|
m.wait = wait;
|
|
|
|
|
m.mathStart = start;
|
2021-02-11 13:03:46 +00:00
|
|
|
|
m.mathprefixsize = prefixsize;
|
2021-02-13 15:19:45 +00:00
|
|
|
|
m.mathEnd = end + postfixsize;
|
2021-02-11 13:03:46 +00:00
|
|
|
|
m.mathpostfixsize = postfixsize;
|
2021-02-13 15:19:45 +00:00
|
|
|
|
m.mathSize = m.mathEnd - m.mathStart;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_.push_back(m);
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2020-11-25 23:11:07 +00:00
|
|
|
|
bool empty() const { return entries_.empty(); };
|
|
|
|
|
size_t getEndPos() const {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return 0;
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[actualIdx_].mathEnd;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2020-11-25 23:11:07 +00:00
|
|
|
|
size_t getStartPos() const {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return 100000; /* definitely enough? */
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[actualIdx_].mathStart;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2021-02-11 13:03:46 +00:00
|
|
|
|
size_t getPrefixSize() const {
|
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
return entries_[actualIdx_].mathprefixsize;
|
|
|
|
|
}
|
|
|
|
|
size_t getPostfixSize() const {
|
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
return entries_[actualIdx_].mathpostfixsize;
|
|
|
|
|
}
|
2018-10-30 19:52:29 +00:00
|
|
|
|
size_t getFirstPos() {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actualIdx_ = 0;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return getStartPos();
|
|
|
|
|
}
|
2020-11-25 23:11:07 +00:00
|
|
|
|
size_t getSize() const {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.empty() || (actualIdx_ >= entries_.size())) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
return size_t(0);
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return entries_[actualIdx_].mathSize;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
void incrEntry() { actualIdx_++; };
|
2018-10-30 19:52:29 +00:00
|
|
|
|
};
|
|
|
|
|
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void LatexInfo::buildEntries(bool isPatternString)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
{
|
2021-02-19 16:06:37 +00:00
|
|
|
|
static regex const rmath("(\\\\)*(\\$|\\\\\\[|\\\\\\]|\\\\(begin|end)\\{((eqnarray|equation|flalign|gather|multline|align|x?x?alignat)\\*?\\})(\\{[0-9]+\\})?)");
|
2021-01-06 13:22:26 +00:00
|
|
|
|
static regex const rkeys("(\\\\)*(\\$|\\\\\\[|\\\\\\]|\\\\((([a-zA-Z]+\\*?)(\\{([a-z]+\\*?)\\}|=[0-9]+[a-z]+)?)))");
|
2018-10-28 18:40:14 +00:00
|
|
|
|
static bool disableLanguageOverride = false;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
smatch sub, submath;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
bool evaluatingRegexp = false;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
MathInfo mi;
|
2018-10-29 12:17:54 +00:00
|
|
|
|
bool evaluatingMath = false;
|
2018-12-11 16:27:50 +00:00
|
|
|
|
bool evaluatingCode = false;
|
|
|
|
|
size_t codeEnd = 0;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
bool evaluatingOptional = false;
|
|
|
|
|
size_t optionalEnd = 0;
|
2018-12-11 16:27:50 +00:00
|
|
|
|
int codeStart = -1;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
KeyInfo found;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
bool math_end_waiting = false;
|
|
|
|
|
size_t math_pos = 10000;
|
2021-02-11 13:03:46 +00:00
|
|
|
|
size_t math_prefix_size = 1;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
string math_end;
|
2020-12-31 15:53:46 +00:00
|
|
|
|
static vector<string> usedText = vector<string>();
|
2021-02-11 13:03:46 +00:00
|
|
|
|
static bool removeMathHull = false;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.removeAccents();
|
2019-03-02 14:42:38 +00:00
|
|
|
|
|
2019-03-21 11:53:41 +00:00
|
|
|
|
for (sregex_iterator itmath(interval_.par.begin(), interval_.par.end(), rmath), end; itmath != end; ++itmath) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
submath = *itmath;
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if ((submath.position(2) - submath.position(0)) %2 == 1) {
|
|
|
|
|
// prefixed by odd count of '\\'
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (math_end_waiting) {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
size_t pos = submath.position(size_t(2));
|
2018-11-09 12:36:47 +00:00
|
|
|
|
if ((math_end == "$") &&
|
2021-01-06 13:22:26 +00:00
|
|
|
|
(submath.str(2) == "$")) {
|
2021-02-13 15:19:45 +00:00
|
|
|
|
mi.insert("$", math_pos, 1, pos, 1);
|
2018-11-09 12:36:47 +00:00
|
|
|
|
math_end_waiting = false;
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
2018-11-09 12:36:47 +00:00
|
|
|
|
else if ((math_end == "\\]") &&
|
2021-01-06 13:22:26 +00:00
|
|
|
|
(submath.str(2) == "\\]")) {
|
2021-02-13 15:19:45 +00:00
|
|
|
|
mi.insert("\\]", math_pos, 2, pos, 2);
|
2018-11-09 12:36:47 +00:00
|
|
|
|
math_end_waiting = false;
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
2021-01-06 13:22:26 +00:00
|
|
|
|
else if ((submath.str(3).compare("end") == 0) &&
|
2021-02-19 16:06:37 +00:00
|
|
|
|
(submath.str(5).compare(math_end) == 0)) {
|
2021-02-13 15:19:45 +00:00
|
|
|
|
mi.insert(math_end, math_pos, math_prefix_size, pos, submath.str(2).length());
|
2018-10-22 18:19:36 +00:00
|
|
|
|
math_end_waiting = false;
|
|
|
|
|
}
|
2018-11-09 12:36:47 +00:00
|
|
|
|
else
|
|
|
|
|
continue;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if (submath.str(3).compare("begin") == 0) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
math_end_waiting = true;
|
2021-02-19 16:06:37 +00:00
|
|
|
|
math_end = submath.str(5);
|
2021-01-06 13:22:26 +00:00
|
|
|
|
math_pos = submath.position(size_t(2));
|
2021-02-11 13:03:46 +00:00
|
|
|
|
math_prefix_size = submath.str(2).length();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
2021-01-06 13:22:26 +00:00
|
|
|
|
else if (submath.str(2).compare("\\[") == 0) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
math_end_waiting = true;
|
|
|
|
|
math_end = "\\]";
|
2021-01-06 13:22:26 +00:00
|
|
|
|
math_pos = submath.position(size_t(2));
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
2021-01-06 13:22:26 +00:00
|
|
|
|
else if (submath.str(2) == "$") {
|
|
|
|
|
size_t pos = submath.position(size_t(2));
|
|
|
|
|
math_end_waiting = true;
|
|
|
|
|
math_end = "$";
|
|
|
|
|
math_pos = pos;
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-02 09:32:28 +00:00
|
|
|
|
// Ignore language if there is math somewhere in pattern-string
|
2018-10-28 18:40:14 +00:00
|
|
|
|
if (isPatternString) {
|
2020-12-31 15:53:46 +00:00
|
|
|
|
for (auto s: usedText) {
|
|
|
|
|
// Remove entries created in previous search runs
|
|
|
|
|
keys.erase(s);
|
|
|
|
|
}
|
|
|
|
|
usedText = vector<string>();
|
2018-10-30 19:52:29 +00:00
|
|
|
|
if (! mi.empty()) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
// Disable language
|
|
|
|
|
keys["foreignlanguage"].disabled = true;
|
|
|
|
|
disableLanguageOverride = true;
|
2021-02-11 13:03:46 +00:00
|
|
|
|
removeMathHull = false;
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
2021-02-11 13:03:46 +00:00
|
|
|
|
else {
|
|
|
|
|
removeMathHull = true; // used later if not isPatternString
|
2018-10-28 18:40:14 +00:00
|
|
|
|
disableLanguageOverride = false;
|
2021-02-11 13:03:46 +00:00
|
|
|
|
}
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (disableLanguageOverride) {
|
|
|
|
|
keys["foreignlanguage"].disabled = true;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-30 19:52:29 +00:00
|
|
|
|
math_pos = mi.getFirstPos();
|
2019-03-21 11:53:41 +00:00
|
|
|
|
for (sregex_iterator it(interval_.par.begin(), interval_.par.end(), rkeys), end; it != end; ++it) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
sub = *it;
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if ((sub.position(2) - sub.position(0)) %2 == 1) {
|
|
|
|
|
// prefixed by odd count of '\\'
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
string key = sub.str(5);
|
2018-10-28 18:40:14 +00:00
|
|
|
|
if (key == "") {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if (sub.str(2)[0] == '\\')
|
|
|
|
|
key = sub.str(2)[1];
|
2018-12-05 12:36:43 +00:00
|
|
|
|
else {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
key = sub.str(2);
|
2018-12-05 12:36:43 +00:00
|
|
|
|
}
|
2021-01-06 01:12:58 +00:00
|
|
|
|
}
|
2021-01-31 08:53:06 +00:00
|
|
|
|
KeysIterator it_key = keys.find(key);
|
|
|
|
|
if (it_key != keys.end()) {
|
|
|
|
|
if (it_key->second.keytype == KeyInfo::headRemove) {
|
|
|
|
|
KeyInfo found1 = it_key->second;
|
2020-12-14 19:43:39 +00:00
|
|
|
|
found1.disabled = true;
|
|
|
|
|
found1.head = "\\" + key + "{";
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found1._tokenstart = sub.position(size_t(2));
|
2020-12-14 19:43:39 +00:00
|
|
|
|
found1._tokensize = found1.head.length();
|
|
|
|
|
found1._dataStart = found1._tokenstart + found1.head.length();
|
|
|
|
|
int endpos = interval_.findclosing(found1._dataStart, interval_.par.length(), '{', '}', 1);
|
|
|
|
|
found1._dataEnd = endpos;
|
|
|
|
|
removeHead(found1);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (evaluatingRegexp) {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if (sub.str(3).compare("endregexp") == 0) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
evaluatingRegexp = false;
|
|
|
|
|
// found._tokenstart already set
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found._dataEnd = sub.position(size_t(2)) + 13;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
found._tokensize = found._dataEnd - found._tokenstart;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2019-03-02 14:42:38 +00:00
|
|
|
|
else {
|
2019-02-22 12:21:23 +00:00
|
|
|
|
continue;
|
2019-03-02 14:42:38 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-29 12:17:54 +00:00
|
|
|
|
if (evaluatingMath) {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if (size_t(sub.position(size_t(2))) < mi.getEndPos())
|
2018-10-29 12:17:54 +00:00
|
|
|
|
continue;
|
|
|
|
|
evaluatingMath = false;
|
2018-10-30 19:52:29 +00:00
|
|
|
|
mi.incrEntry();
|
|
|
|
|
math_pos = mi.getStartPos();
|
2018-10-29 12:17:54 +00:00
|
|
|
|
}
|
2021-01-31 08:53:06 +00:00
|
|
|
|
if (it_key == keys.end()) {
|
2019-02-12 13:21:14 +00:00
|
|
|
|
found = KeyInfo(KeyInfo::isStandard, 0, true);
|
2020-12-31 15:53:46 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Undefined key " << key << " ==> will be used as text");
|
|
|
|
|
found = KeyInfo(KeyInfo::isText, 0, false);
|
2019-02-12 13:21:14 +00:00
|
|
|
|
if (isPatternString) {
|
|
|
|
|
found.keytype = KeyInfo::isChar;
|
|
|
|
|
found.disabled = false;
|
|
|
|
|
found.used = true;
|
|
|
|
|
}
|
|
|
|
|
keys[key] = found;
|
2020-12-31 15:53:46 +00:00
|
|
|
|
usedText.push_back(key);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2019-02-12 13:21:14 +00:00
|
|
|
|
else
|
|
|
|
|
found = keys[key];
|
2018-10-28 18:40:14 +00:00
|
|
|
|
if (key.compare("regexp") == 0) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
evaluatingRegexp = true;
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found._tokenstart = sub.position(size_t(2));
|
2018-10-18 15:37:15 +00:00
|
|
|
|
found._tokensize = 0;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Handle the other params of key
|
|
|
|
|
if (found.keytype == KeyInfo::isIgnored)
|
|
|
|
|
continue;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else if (found.keytype == KeyInfo::isMath) {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if (size_t(sub.position(size_t(2))) == math_pos) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
found = keys[key];
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found._tokenstart = sub.position(size_t(2));
|
2018-10-30 19:52:29 +00:00
|
|
|
|
found._tokensize = mi.getSize();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
found._dataEnd = found._tokenstart + found._tokensize;
|
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2021-02-11 13:03:46 +00:00
|
|
|
|
if (removeMathHull) {
|
|
|
|
|
interval_.addIntervall(found._tokenstart, found._tokenstart + mi.getPrefixSize());
|
|
|
|
|
interval_.addIntervall(found._dataEnd - mi.getPostfixSize(), found._dataEnd);
|
|
|
|
|
}
|
2021-02-19 16:06:37 +00:00
|
|
|
|
else {
|
|
|
|
|
// Treate all math constructs as simple math
|
|
|
|
|
interval_.par[found._tokenstart] = '$';
|
|
|
|
|
interval_.par[found._dataEnd - mi.getPostfixSize()] = '$';
|
|
|
|
|
interval_.addIntervall(found._tokenstart + 1, found._tokenstart + mi.getPrefixSize());
|
|
|
|
|
interval_.addIntervall(found._dataEnd - mi.getPostfixSize() + 1, found._dataEnd);
|
|
|
|
|
}
|
2018-10-29 12:17:54 +00:00
|
|
|
|
evaluatingMath = true;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
2018-11-07 08:35:16 +00:00
|
|
|
|
else {
|
|
|
|
|
// begin|end of unknown env, discard
|
2018-11-12 11:17:16 +00:00
|
|
|
|
// First handle tables
|
|
|
|
|
// longtable|tabular
|
|
|
|
|
bool discardComment;
|
2019-02-10 17:00:55 +00:00
|
|
|
|
found = keys[key];
|
|
|
|
|
found.keytype = KeyInfo::doRemove;
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if ((sub.str(7).compare("longtable") == 0) ||
|
|
|
|
|
(sub.str(7).compare("tabular") == 0)) {
|
2018-11-12 11:17:16 +00:00
|
|
|
|
discardComment = true; /* '%' */
|
|
|
|
|
}
|
2019-02-10 17:00:55 +00:00
|
|
|
|
else {
|
2018-11-12 11:17:16 +00:00
|
|
|
|
discardComment = false;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
static regex const removeArgs("^(multicols|multipar|sectionbox|subsectionbox|tcolorbox)$");
|
|
|
|
|
smatch sub2;
|
2021-01-06 13:22:26 +00:00
|
|
|
|
string token = sub.str(7);
|
2019-02-16 17:39:10 +00:00
|
|
|
|
if (regex_match(token, sub2, removeArgs)) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
found.keytype = KeyInfo::removeWithArg;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
}
|
2019-02-10 17:00:55 +00:00
|
|
|
|
}
|
2021-01-06 13:22:26 +00:00
|
|
|
|
// discard spaces before pos(2)
|
|
|
|
|
int pos = sub.position(size_t(2));
|
2018-11-07 08:35:16 +00:00
|
|
|
|
int count;
|
|
|
|
|
for (count = 0; pos - count > 0; count++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
char c = interval_.par[pos-count-1];
|
2018-11-12 11:17:16 +00:00
|
|
|
|
if (discardComment) {
|
|
|
|
|
if ((c != ' ') && (c != '%'))
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
else if (c != ' ')
|
2018-11-07 08:35:16 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
found._tokenstart = pos - count;
|
2021-01-06 13:22:26 +00:00
|
|
|
|
if (sub.str(3).compare(0, 5, "begin") == 0) {
|
|
|
|
|
size_t pos1 = pos + sub.str(2).length();
|
|
|
|
|
if (sub.str(7).compare("cjk") == 0) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
pos1 = interval_.findclosing(pos1+1, interval_.par.length()) + 1;
|
|
|
|
|
if ((interval_.par[pos1] == '{') && (interval_.par[pos1+1] == '}'))
|
2018-11-18 09:37:12 +00:00
|
|
|
|
pos1 += 2;
|
|
|
|
|
found.keytype = KeyInfo::isMain;
|
|
|
|
|
found._dataStart = pos1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found._dataEnd = interval_.par.length();
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found.disabled = keys["foreignlanguage"].disabled;
|
|
|
|
|
found.used = keys["foreignlanguage"].used;
|
|
|
|
|
found._tokensize = pos1 - found._tokenstart;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-11-07 12:14:50 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-02-22 12:21:23 +00:00
|
|
|
|
// Swallow possible optional params
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (interval_.par[pos1] == '[') {
|
|
|
|
|
pos1 = interval_.findclosing(pos1+1, interval_.par.length(), '[', ']')+1;
|
2018-11-18 09:37:12 +00:00
|
|
|
|
}
|
2019-02-22 12:21:23 +00:00
|
|
|
|
// Swallow also the eventual parameter
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[pos1] == '{') {
|
|
|
|
|
found._dataEnd = interval_.findclosing(pos1+1, interval_.par.length()) + 1;
|
2018-11-18 09:37:12 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
found._dataEnd = pos1;
|
|
|
|
|
}
|
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
found._tokensize = count + found._dataEnd - pos;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found.disabled = true;
|
2018-11-07 12:14:50 +00:00
|
|
|
|
}
|
2018-11-07 08:35:16 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-11-18 09:37:12 +00:00
|
|
|
|
// Handle "\end{...}"
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found._dataStart = pos + sub.str(2).length();
|
2018-11-07 08:35:16 +00:00
|
|
|
|
found._dataEnd = found._dataStart;
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found._tokensize = count + found._dataEnd - pos;
|
|
|
|
|
found.parenthesiscount = 0;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
found.head = interval_.par.substr(found._tokenstart, found._tokensize);
|
2018-11-18 09:37:12 +00:00
|
|
|
|
found.disabled = true;
|
2018-11-07 08:35:16 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else if (found.keytype != KeyInfo::isRegex) {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found._tokenstart = sub.position(size_t(2));
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (found.parenthesiscount == 0) {
|
|
|
|
|
// Probably to be discarded
|
2021-01-06 13:22:26 +00:00
|
|
|
|
size_t following_pos = sub.position(size_t(2)) + sub.str(5).length() + 1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
char following = interval_.par[following_pos];
|
2018-10-24 09:07:11 +00:00
|
|
|
|
if (following == ' ')
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found.head = "\\" + sub.str(5) + " ";
|
2018-10-24 09:07:11 +00:00
|
|
|
|
else if (following == '=') {
|
|
|
|
|
// like \uldepth=1000pt
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found.head = sub.str(2);
|
2018-10-24 09:07:11 +00:00
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
else
|
2018-10-28 18:40:14 +00:00
|
|
|
|
found.head = "\\" + key;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
found._tokensize = found.head.length();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
found._dataEnd = found._tokenstart + found._tokensize;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
found._dataStart = found._dataEnd;
|
|
|
|
|
}
|
|
|
|
|
else {
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int params = found._tokenstart + key.length() + 1;
|
|
|
|
|
if (evaluatingOptional) {
|
|
|
|
|
if (size_t(found._tokenstart) > optionalEnd) {
|
|
|
|
|
evaluatingOptional = false;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
found.disabled = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
int optend = params;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (interval_.par[optend] == '[') {
|
2019-02-07 12:35:47 +00:00
|
|
|
|
// discard optional parameters
|
2019-03-21 11:53:41 +00:00
|
|
|
|
optend = interval_.findclosing(optend+1, interval_.par.length(), '[', ']') + 1;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
}
|
|
|
|
|
if (optend > params) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
key += interval_.par.substr(params, optend-params);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
evaluatingOptional = true;
|
|
|
|
|
optionalEnd = optend;
|
2021-01-14 22:25:58 +00:00
|
|
|
|
if (found.keytype == KeyInfo::isSectioning) {
|
|
|
|
|
// Remove optional values (but still keep in header)
|
|
|
|
|
interval_.addIntervall(params, optend);
|
|
|
|
|
}
|
2019-02-07 12:35:47 +00:00
|
|
|
|
}
|
2021-01-06 13:22:26 +00:00
|
|
|
|
string token = sub.str(7);
|
2021-01-02 17:37:14 +00:00
|
|
|
|
int closings;
|
|
|
|
|
if (interval_.par[optend] != '{') {
|
|
|
|
|
closings = 0;
|
|
|
|
|
found.parenthesiscount = 0;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
found.head = "\\" + key;
|
2021-01-02 17:37:14 +00:00
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
closings = found.parenthesiscount;
|
2018-10-20 10:47:37 +00:00
|
|
|
|
if (found.parenthesiscount == 1) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
found.head = "\\" + key + "{";
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
else if (found.parenthesiscount > 1) {
|
|
|
|
|
if (token != "") {
|
2021-01-06 13:22:26 +00:00
|
|
|
|
found.head = sub.str(2) + "{";
|
2019-02-17 23:40:55 +00:00
|
|
|
|
closings = found.parenthesiscount - 1;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
found.head = "\\" + key + "{";
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2019-02-21 13:45:41 +00:00
|
|
|
|
found._tokensize = found.head.length();
|
2018-10-18 15:37:15 +00:00
|
|
|
|
found._dataStart = found._tokenstart + found.head.length();
|
2019-12-29 16:40:13 +00:00
|
|
|
|
if (found.keytype == KeyInfo::doRemove) {
|
2021-01-04 06:16:59 +00:00
|
|
|
|
if (closings > 0) {
|
|
|
|
|
size_t endpar = 2 + interval_.findclosing(found._dataStart, interval_.par.length(), '{', '}', closings);
|
2021-01-06 01:12:58 +00:00
|
|
|
|
if (endpar >= interval_.par.length())
|
|
|
|
|
found._dataStart = interval_.par.length();
|
|
|
|
|
else
|
|
|
|
|
found._dataStart = endpar;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
found._tokensize = found._dataStart - found._tokenstart;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
found._dataStart = found._tokenstart + found._tokensize;
|
2021-01-12 18:33:29 +00:00
|
|
|
|
}
|
2020-01-01 13:03:21 +00:00
|
|
|
|
closings = 0;
|
2019-12-29 16:40:13 +00:00
|
|
|
|
}
|
2021-01-14 13:44:21 +00:00
|
|
|
|
if (interval_.par.substr(found._dataStart, 15).compare("\\endarguments{}") == 0) {
|
2019-02-17 23:40:55 +00:00
|
|
|
|
found._dataStart += 15;
|
|
|
|
|
}
|
2020-01-01 13:03:21 +00:00
|
|
|
|
size_t endpos;
|
|
|
|
|
if (closings < 1)
|
|
|
|
|
endpos = found._dataStart - 1;
|
|
|
|
|
else
|
|
|
|
|
endpos = interval_.findclosing(found._dataStart, interval_.par.length(), '{', '}', closings);
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if (found.keytype == KeyInfo::isList) {
|
|
|
|
|
// Check if it really is list env
|
|
|
|
|
static regex const listre("^([a-z]+)$");
|
|
|
|
|
smatch sub2;
|
|
|
|
|
if (!regex_match(token, sub2, listre)) {
|
|
|
|
|
// Change the key of this entry. It is not in a list/item environment
|
|
|
|
|
found.keytype = KeyInfo::endArguments;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-12-11 16:27:50 +00:00
|
|
|
|
if (found.keytype == KeyInfo::noMain) {
|
|
|
|
|
evaluatingCode = true;
|
|
|
|
|
codeEnd = endpos;
|
|
|
|
|
codeStart = found._dataStart;
|
|
|
|
|
}
|
|
|
|
|
else if (evaluatingCode) {
|
|
|
|
|
if (size_t(found._dataStart) > codeEnd)
|
|
|
|
|
evaluatingCode = false;
|
|
|
|
|
else if (found.keytype == KeyInfo::isMain) {
|
|
|
|
|
// Disable this key, treate it as standard
|
|
|
|
|
found.keytype = KeyInfo::isStandard;
|
|
|
|
|
found.disabled = true;
|
2020-12-31 15:53:46 +00:00
|
|
|
|
if ((codeEnd +1 >= interval_.par.length()) &&
|
2018-12-11 16:27:50 +00:00
|
|
|
|
(found._tokenstart == codeStart)) {
|
|
|
|
|
// trickery, because the code inset starts
|
|
|
|
|
// with \selectlanguage ...
|
|
|
|
|
codeEnd = endpos;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (entries_.size() > 1) {
|
|
|
|
|
entries_[entries_.size()-1]._dataEnd = codeEnd;
|
2018-12-11 16:27:50 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((endpos == interval_.par.length()) &&
|
2018-11-08 08:59:51 +00:00
|
|
|
|
(found.keytype == KeyInfo::doRemove)) {
|
|
|
|
|
// Missing closing => error in latex-input?
|
|
|
|
|
// therefore do not delete remaining data
|
|
|
|
|
found._dataStart -= 1;
|
|
|
|
|
found._dataEnd = found._dataStart;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
found._dataEnd = endpos;
|
2018-12-16 13:50:38 +00:00
|
|
|
|
}
|
|
|
|
|
if (isPatternString) {
|
|
|
|
|
keys[key].used = true;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_.push_back(found);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void LatexInfo::makeKey(const string &keysstring, KeyInfo keyI, bool isPatternString)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-19 17:11:20 +00:00
|
|
|
|
stringstream s(keysstring);
|
|
|
|
|
string key;
|
|
|
|
|
const char delim = '|';
|
|
|
|
|
while (getline(s, key, delim)) {
|
2018-10-27 14:57:42 +00:00
|
|
|
|
KeyInfo keyII(keyI);
|
|
|
|
|
if (isPatternString) {
|
|
|
|
|
keyII.used = false;
|
|
|
|
|
}
|
|
|
|
|
else if ( !keys[key].used)
|
|
|
|
|
keyII.disabled = true;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
keys[key] = keyII;
|
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-27 14:57:42 +00:00
|
|
|
|
void LatexInfo::buildKeys(bool isPatternString)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-27 14:57:42 +00:00
|
|
|
|
|
|
|
|
|
static bool keysBuilt = false;
|
|
|
|
|
if (keysBuilt && !isPatternString) return;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
|
2020-12-14 19:43:39 +00:00
|
|
|
|
// Keys to ignore in any case
|
2021-02-03 11:18:33 +00:00
|
|
|
|
makeKey("text|textcyrillic|lyxmathsym|ensuremath", KeyInfo(KeyInfo::headRemove, 1, true), true);
|
2018-12-11 16:27:50 +00:00
|
|
|
|
// Known standard keys with 1 parameter.
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// Split is done, if not at start of region
|
2018-11-09 05:07:17 +00:00
|
|
|
|
makeKey("textsf|textss|texttt", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getFamily()), isPatternString);
|
|
|
|
|
makeKey("textbf", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getSeries()), isPatternString);
|
|
|
|
|
makeKey("textit|textsc|textsl", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getShape()), isPatternString);
|
|
|
|
|
makeKey("uuline|uline|uwave", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getUnderline()), isPatternString);
|
|
|
|
|
makeKey("emph|noun", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getMarkUp()), isPatternString);
|
|
|
|
|
makeKey("sout|xout", KeyInfo(KeyInfo::isStandard, 1, ignoreFormats.getStrikeOut()), isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
|
2018-10-23 19:12:22 +00:00
|
|
|
|
makeKey("section|subsection|subsubsection|paragraph|subparagraph|minisec",
|
2018-11-09 05:07:17 +00:00
|
|
|
|
KeyInfo(KeyInfo::isSectioning, 1, ignoreFormats.getSectioning()), isPatternString);
|
2018-10-23 19:12:22 +00:00
|
|
|
|
makeKey("section*|subsection*|subsubsection*|paragraph*",
|
2018-11-09 05:07:17 +00:00
|
|
|
|
KeyInfo(KeyInfo::isSectioning, 1, ignoreFormats.getSectioning()), isPatternString);
|
|
|
|
|
makeKey("part|part*|chapter|chapter*", KeyInfo(KeyInfo::isSectioning, 1, ignoreFormats.getSectioning()), isPatternString);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
makeKey("title|subtitle|author|subject|publishers|dedication|uppertitleback|lowertitleback|extratitle|lyxaddress|lyxrightaddress", KeyInfo(KeyInfo::isTitle, 1, ignoreFormats.getFrontMatter()), isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// Regex
|
2018-10-27 14:57:42 +00:00
|
|
|
|
makeKey("regexp", KeyInfo(KeyInfo::isRegex, 1, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Split is done, if not at start of region
|
2018-11-09 05:07:17 +00:00
|
|
|
|
makeKey("textcolor", KeyInfo(KeyInfo::isStandard, 2, ignoreFormats.getColor()), isPatternString);
|
2019-02-20 13:14:50 +00:00
|
|
|
|
makeKey("latexenvironment", KeyInfo(KeyInfo::isStandard, 2, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Split is done always.
|
2018-11-09 05:07:17 +00:00
|
|
|
|
makeKey("foreignlanguage", KeyInfo(KeyInfo::isMain, 2, ignoreFormats.getLanguage()), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
2019-02-19 22:11:09 +00:00
|
|
|
|
// Known charaters
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// No split
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("backslash|textbackslash|slash", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("textasciicircum|textasciitilde", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2019-02-10 17:00:55 +00:00
|
|
|
|
makeKey("textasciiacute|texemdash", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("dots|ldots", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
// Spaces
|
|
|
|
|
makeKey("quad|qquad|hfill|dotfill", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("textvisiblespace|nobreakspace", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("negthickspace|negmedspace|negthinspace", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2020-12-10 10:32:59 +00:00
|
|
|
|
makeKey("thickspace|medspace|thinspace", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2018-12-13 16:12:57 +00:00
|
|
|
|
// Skip
|
2019-02-12 13:21:14 +00:00
|
|
|
|
// makeKey("enskip|smallskip|medskip|bigskip|vfill", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2018-12-13 16:12:57 +00:00
|
|
|
|
// Custom space/skip, remove the content (== length value)
|
2019-02-25 10:59:54 +00:00
|
|
|
|
makeKey("vspace|vspace*|hspace|hspace*|mspace", KeyInfo(KeyInfo::noContent, 1, false), isPatternString);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
// Found in fr/UserGuide.lyx
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("og|fg", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
// quotes
|
|
|
|
|
makeKey("textquotedbl|quotesinglbase|lyxarrow", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
|
|
|
|
makeKey("textquotedblleft|textquotedblright", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// Known macros to remove (including their parameter)
|
|
|
|
|
// No split
|
2019-02-21 19:32:08 +00:00
|
|
|
|
makeKey("input|inputencoding|label|ref|index|bibitem", KeyInfo(KeyInfo::doRemove, 1, false), isPatternString);
|
2019-02-16 17:39:10 +00:00
|
|
|
|
makeKey("addtocounter|setlength", KeyInfo(KeyInfo::noContent, 2, true), isPatternString);
|
2018-12-11 16:27:50 +00:00
|
|
|
|
// handle like standard keys with 1 parameter.
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("url|href|vref|thanks", KeyInfo(KeyInfo::isStandard, 1, false), isPatternString);
|
2018-12-11 16:27:50 +00:00
|
|
|
|
|
2019-12-29 16:40:13 +00:00
|
|
|
|
// Ignore deleted text
|
2020-01-01 13:03:21 +00:00
|
|
|
|
makeKey("lyxdeleted", KeyInfo(KeyInfo::doRemove, 3, false), isPatternString);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
// but preserve added text
|
2020-01-01 13:03:21 +00:00
|
|
|
|
makeKey("lyxadded", KeyInfo(KeyInfo::doRemove, 2, false), isPatternString);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
|
2018-10-19 17:11:20 +00:00
|
|
|
|
// Macros to remove, but let the parameter survive
|
|
|
|
|
// No split
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("menuitem|textmd|textrm", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
2018-12-11 16:27:50 +00:00
|
|
|
|
|
|
|
|
|
// Remove language spec from content of these insets
|
2019-02-10 17:00:55 +00:00
|
|
|
|
makeKey("code", KeyInfo(KeyInfo::noMain, 1, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Same effect as previous, parameter will survive (because there is no one anyway)
|
|
|
|
|
// No split
|
2019-02-19 22:11:09 +00:00
|
|
|
|
makeKey("noindent|textcompwordmark|maketitle", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2018-11-08 08:59:51 +00:00
|
|
|
|
// Remove table decorations
|
2018-11-07 08:35:16 +00:00
|
|
|
|
makeKey("hline|tabularnewline|toprule|bottomrule|midrule", KeyInfo(KeyInfo::doRemove, 0, true), isPatternString);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
// Discard shape-header.
|
2019-02-21 13:45:41 +00:00
|
|
|
|
// For footnote or shortcut too, because of lang settings
|
2019-02-19 22:11:09 +00:00
|
|
|
|
// and wrong handling if used 'KeyInfo::noMain'
|
2018-11-08 08:59:51 +00:00
|
|
|
|
makeKey("circlepar|diamondpar|heartpar|nutpar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
|
|
|
|
makeKey("trianglerightpar|hexagonpar|starpar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
|
|
|
|
makeKey("triangleuppar|triangledownpar|droppar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
|
|
|
|
makeKey("triangleleftpar|shapepar|dropuppar", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
2019-02-25 10:59:54 +00:00
|
|
|
|
makeKey("hphantom|vphantom|footnote|shortcut|include|includegraphics", KeyInfo(KeyInfo::isStandard, 1, true), isPatternString);
|
2019-02-21 19:32:08 +00:00
|
|
|
|
makeKey("parbox", KeyInfo(KeyInfo::doRemove, 1, true), isPatternString);
|
2018-11-12 11:17:16 +00:00
|
|
|
|
// like ('tiny{}' or '\tiny ' ... )
|
2018-12-16 13:50:38 +00:00
|
|
|
|
makeKey("footnotesize|tiny|scriptsize|small|large|Large|LARGE|huge|Huge", KeyInfo(KeyInfo::isSize, 0, false), isPatternString);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
|
|
|
|
|
// Survives, like known character
|
2020-12-27 11:01:23 +00:00
|
|
|
|
// makeKey("lyx|LyX|latex|LaTeX|latexe|LaTeXe|tex|TeX", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2020-12-31 15:53:46 +00:00
|
|
|
|
makeKey("tableofcontents", KeyInfo(KeyInfo::isChar, 0, false), isPatternString);
|
2019-02-17 23:40:55 +00:00
|
|
|
|
makeKey("item|listitem", KeyInfo(KeyInfo::isList, 1, false), isPatternString);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
|
2018-10-28 18:40:14 +00:00
|
|
|
|
makeKey("begin|end", KeyInfo(KeyInfo::isMath, 1, false), isPatternString);
|
|
|
|
|
makeKey("[|]", KeyInfo(KeyInfo::isMath, 1, false), isPatternString);
|
|
|
|
|
makeKey("$", KeyInfo(KeyInfo::isMath, 1, false), isPatternString);
|
2018-10-23 17:59:08 +00:00
|
|
|
|
|
2019-02-17 23:40:55 +00:00
|
|
|
|
makeKey("par|uldepth|ULdepth|protect|nobreakdash|medskip|relax", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2018-11-20 13:36:11 +00:00
|
|
|
|
// Remove RTL/LTR marker
|
2018-12-13 16:12:57 +00:00
|
|
|
|
makeKey("l|r|textlr|textfr|textar|beginl|endl", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2019-02-05 07:04:47 +00:00
|
|
|
|
makeKey("lettrine", KeyInfo(KeyInfo::cleanToStart, 0, true), isPatternString);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
makeKey("lyxslide", KeyInfo(KeyInfo::isSectioning, 1, true), isPatternString);
|
2019-02-05 07:04:47 +00:00
|
|
|
|
makeKey("endarguments", KeyInfo(KeyInfo::endArguments, 0, true), isPatternString);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
makeKey("twocolumn", KeyInfo(KeyInfo::removeWithArg, 2, true), isPatternString);
|
2019-02-16 17:39:10 +00:00
|
|
|
|
makeKey("tnotetext|ead|fntext|cortext|address", KeyInfo(KeyInfo::removeWithArg, 0, true), isPatternString);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
makeKey("lyxend", KeyInfo(KeyInfo::isStandard, 0, true), isPatternString);
|
2018-10-27 14:57:42 +00:00
|
|
|
|
if (isPatternString) {
|
|
|
|
|
// Allow the first searched string to rebuild the keys too
|
|
|
|
|
keysBuilt = false;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// no need to rebuild again
|
|
|
|
|
keysBuilt = true;
|
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-14 18:39:13 +00:00
|
|
|
|
/*
|
2018-10-18 15:37:15 +00:00
|
|
|
|
* Keep the list of actual opened parentheses actual
|
|
|
|
|
* (e.g. depth == 4 means there are 4 '{' not processed yet)
|
2018-10-14 18:39:13 +00:00
|
|
|
|
*/
|
2018-10-18 15:37:15 +00:00
|
|
|
|
void Intervall::handleParentheses(int lastpos, bool closingAllowed)
|
2018-10-13 19:02:53 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int skip = 0;
|
|
|
|
|
for (int i = depts[actualdeptindex]; i < lastpos; i+= 1 + skip) {
|
|
|
|
|
char c;
|
|
|
|
|
c = par[i];
|
|
|
|
|
skip = 0;
|
|
|
|
|
if (c == '\\') skip = 1;
|
|
|
|
|
else if (c == '{') {
|
|
|
|
|
handleOpenP(i);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
else if (c == '}') {
|
|
|
|
|
handleCloseP(i, closingAllowed);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-18 15:37:15 +00:00
|
|
|
|
#if (0)
|
|
|
|
|
string Intervall::show(int lastpos)
|
2018-10-14 18:39:13 +00:00
|
|
|
|
{
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
string s;
|
|
|
|
|
int i = 0;
|
|
|
|
|
for (idx = 0; idx <= ignoreidx; idx++) {
|
|
|
|
|
while (i < lastpos) {
|
|
|
|
|
int printsize;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i <= borders[idx].low) {
|
|
|
|
|
if (borders[idx].low > lastpos)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
printsize = lastpos - i;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
else
|
2018-10-19 17:11:20 +00:00
|
|
|
|
printsize = borders[idx].low - i;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
s += par.substr(i, printsize);
|
|
|
|
|
i += printsize;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i >= borders[idx].low)
|
|
|
|
|
i = borders[idx].upper;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
i = borders[idx].upper;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
break;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
if (lastpos > i) {
|
|
|
|
|
s += par.substr(i, lastpos-i);
|
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return s;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
void Intervall::output(ostringstream &os, int lastpos)
|
|
|
|
|
{
|
|
|
|
|
// get number of chars to output
|
|
|
|
|
int idx = 0; /* int intervalls */
|
|
|
|
|
int i = 0;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
int printed = 0;
|
|
|
|
|
string startTitle = titleValue;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
for (idx = 0; idx <= ignoreidx; idx++) {
|
|
|
|
|
if (i < lastpos) {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i <= borders[idx].low) {
|
2018-12-18 05:53:58 +00:00
|
|
|
|
int printsize;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (borders[idx].low > lastpos)
|
2018-10-18 15:37:15 +00:00
|
|
|
|
printsize = lastpos - i;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
else
|
2018-10-19 17:11:20 +00:00
|
|
|
|
printsize = borders[idx].low - i;
|
2019-02-19 22:11:09 +00:00
|
|
|
|
if (printsize > 0) {
|
|
|
|
|
os << startTitle << par.substr(i, printsize);
|
|
|
|
|
i += printsize;
|
|
|
|
|
printed += printsize;
|
|
|
|
|
startTitle = "";
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
handleParentheses(i, false);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
if (i >= borders[idx].low)
|
|
|
|
|
i = borders[idx].upper;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-19 17:11:20 +00:00
|
|
|
|
i = borders[idx].upper;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
else
|
|
|
|
|
break;
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
if (lastpos > i) {
|
2019-02-19 22:11:09 +00:00
|
|
|
|
os << startTitle << par.substr(i, lastpos-i);
|
|
|
|
|
printed += lastpos-i;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
handleParentheses(lastpos, false);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
int startindex;
|
|
|
|
|
if (keys["foreignlanguage"].disabled)
|
|
|
|
|
startindex = actualdeptindex-langcount;
|
|
|
|
|
else
|
|
|
|
|
startindex = actualdeptindex;
|
|
|
|
|
for (int i = startindex; i > 0; --i) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
os << "}";
|
|
|
|
|
}
|
2019-02-19 22:11:09 +00:00
|
|
|
|
if (hasTitle && (printed > 0))
|
|
|
|
|
os << "}";
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (! isPatternString_)
|
2018-11-02 09:32:28 +00:00
|
|
|
|
os << "\n";
|
2018-10-18 15:37:15 +00:00
|
|
|
|
handleParentheses(lastpos, true); /* extra closings '}' allowed here */
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2018-10-13 19:02:53 +00:00
|
|
|
|
|
2018-10-22 18:19:36 +00:00
|
|
|
|
void LatexInfo::processRegion(int start, int region_end)
|
2018-10-14 18:39:13 +00:00
|
|
|
|
{
|
2018-10-29 06:37:32 +00:00
|
|
|
|
while (start < region_end) { /* Let {[} and {]} survive */
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int cnt = interval_.isOpeningPar(start);
|
2019-02-23 12:11:34 +00:00
|
|
|
|
if (cnt == 1) {
|
2018-10-23 17:59:08 +00:00
|
|
|
|
// Closing is allowed past the region
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int closing = interval_.findclosing(start+1, interval_.par.length());
|
|
|
|
|
interval_.addIntervall(start, start+1);
|
|
|
|
|
interval_.addIntervall(closing, closing+1);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
2019-02-23 12:11:34 +00:00
|
|
|
|
else if (cnt == 3)
|
|
|
|
|
start += 2;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
start = interval_.nextNotIgnored(start+1);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-09 15:50:24 +00:00
|
|
|
|
void LatexInfo::removeHead(KeyInfo const & actual, int count)
|
2018-10-12 14:47:07 +00:00
|
|
|
|
{
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (actual.parenthesiscount == 0) {
|
|
|
|
|
// "{\tiny{} ...}" ==> "{{} ...}"
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._tokenstart + actual._tokensize);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else {
|
|
|
|
|
// Remove header hull, that is "\url{abcd}" ==> "abcd"
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart - count, actual._dataStart);
|
|
|
|
|
interval_.addIntervall(actual._dataEnd, actual._dataEnd+1);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int LatexInfo::dispatch(ostringstream &os, int previousStart, KeyInfo &actual)
|
|
|
|
|
{
|
2018-11-04 13:54:06 +00:00
|
|
|
|
int nextKeyIdx = 0;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
switch (actual.keytype)
|
2018-11-04 13:54:06 +00:00
|
|
|
|
{
|
2019-02-19 22:11:09 +00:00
|
|
|
|
case KeyInfo::isTitle: {
|
|
|
|
|
removeHead(actual);
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2019-02-05 07:04:47 +00:00
|
|
|
|
case KeyInfo::cleanToStart: {
|
|
|
|
|
actual._dataEnd = actual._dataStart;
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
// Search for end of arguments
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int tmpIdx = find(nextKeyIdx, KeyInfo::endArguments);
|
|
|
|
|
if (tmpIdx > 0) {
|
|
|
|
|
for (int i = nextKeyIdx; i <= tmpIdx; i++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_[i].disabled = true;
|
2019-02-05 07:04:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = entries_[tmpIdx]._dataEnd;
|
2019-02-05 07:04:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
while (interval_.par[actual._dataEnd] == ' ')
|
2019-02-05 07:04:47 +00:00
|
|
|
|
actual._dataEnd++;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(0, actual._dataEnd+1);
|
|
|
|
|
interval_.actualdeptindex = 0;
|
|
|
|
|
interval_.depts[0] = actual._dataEnd+1;
|
|
|
|
|
interval_.closes[0] = -1;
|
2019-02-05 07:04:47 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2020-12-31 15:53:46 +00:00
|
|
|
|
case KeyInfo::isText:
|
2021-01-02 17:37:14 +00:00
|
|
|
|
interval_.par[actual._tokenstart] = '#';
|
|
|
|
|
//interval_.addIntervall(actual._tokenstart, actual._tokenstart+1);
|
2021-01-01 20:50:36 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
2020-12-31 15:53:46 +00:00
|
|
|
|
break;
|
2018-12-14 15:02:33 +00:00
|
|
|
|
case KeyInfo::noContent: { /* char like "\hspace{2cm}" */
|
2019-02-12 13:21:14 +00:00
|
|
|
|
if (actual.disabled)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd);
|
2019-02-12 13:21:14 +00:00
|
|
|
|
else
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataEnd);
|
2018-12-14 15:02:33 +00:00
|
|
|
|
}
|
|
|
|
|
// fall through
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isChar: {
|
|
|
|
|
nextKeyIdx = getNextKey();
|
2018-10-18 15:37:15 +00:00
|
|
|
|
break;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isSize: {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (actual.disabled || (interval_.par[actual._dataStart] != '{') || (interval_.par[actual._dataStart-1] == ' ')) {
|
2021-01-04 06:16:59 +00:00
|
|
|
|
if (actual.parenthesiscount == 0)
|
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd);
|
|
|
|
|
else {
|
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd+1);
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
} else {
|
2018-12-18 05:53:58 +00:00
|
|
|
|
// Here _dataStart points to '{', so correct it
|
|
|
|
|
actual._dataStart += 1;
|
|
|
|
|
actual._tokensize += 1;
|
|
|
|
|
actual.parenthesiscount = 1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[actual._dataStart] == '}') {
|
2018-12-18 05:53:58 +00:00
|
|
|
|
// Determine the end if used like '{\tiny{}...}'
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = interval_.findclosing(actual._dataStart+1, interval_.par.length()) + 1;
|
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataStart+1);
|
2018-12-18 05:53:58 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// Determine the end if used like '\tiny{...}'
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = interval_.findclosing(actual._dataStart, interval_.par.length()) + 1;
|
2018-12-16 13:50:38 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// Split on this key if not at start
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int start = interval_.nextNotIgnored(previousStart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (start < actual._tokenstart) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.output(os, actual._tokenstart);
|
|
|
|
|
interval_.addIntervall(start, actual._tokenstart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
// discard entry if at end of actual
|
|
|
|
|
nextKeyIdx = process(os, actual);
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2020-12-14 19:43:39 +00:00
|
|
|
|
case KeyInfo::endArguments: {
|
2019-02-17 23:40:55 +00:00
|
|
|
|
// Remove trailing '{}' too
|
2019-02-28 12:00:12 +00:00
|
|
|
|
actual._dataStart += 1;
|
|
|
|
|
actual._dataEnd += 1;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd+1);
|
2019-02-05 07:04:47 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
2020-12-14 19:43:39 +00:00
|
|
|
|
}
|
2018-12-11 16:27:50 +00:00
|
|
|
|
case KeyInfo::noMain:
|
|
|
|
|
// fall through
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isStandard: {
|
|
|
|
|
if (actual.disabled) {
|
|
|
|
|
removeHead(actual);
|
|
|
|
|
processRegion(actual._dataStart, actual._dataStart+1);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
} else {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
// Split on this key if not at datastart of calling entry
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int start = interval_.nextNotIgnored(previousStart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (start < actual._tokenstart) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.output(os, actual._tokenstart);
|
|
|
|
|
interval_.addIntervall(start, actual._tokenstart);
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// discard entry if at end of actual
|
|
|
|
|
nextKeyIdx = process(os, actual);
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-02-07 12:35:47 +00:00
|
|
|
|
case KeyInfo::removeWithArg: {
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
// Search for end of arguments
|
|
|
|
|
int tmpIdx = find(nextKeyIdx, KeyInfo::endArguments);
|
|
|
|
|
if (tmpIdx > 0) {
|
|
|
|
|
for (int i = nextKeyIdx; i <= tmpIdx; i++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_[i].disabled = true;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = entries_[tmpIdx]._dataEnd;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._dataEnd+1);
|
2019-02-07 12:35:47 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::doRemove: {
|
2018-11-07 08:35:16 +00:00
|
|
|
|
// Remove the key with all parameters and following spaces
|
|
|
|
|
size_t pos;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
size_t start;
|
|
|
|
|
if (interval_.par[actual._dataEnd-1] == ' ')
|
|
|
|
|
start = actual._dataEnd;
|
|
|
|
|
else
|
|
|
|
|
start = actual._dataEnd+1;
|
|
|
|
|
for (pos = start; pos < interval_.par.length(); pos++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((interval_.par[pos] != ' ') && (interval_.par[pos] != '%'))
|
2018-11-07 08:35:16 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-12-29 16:40:13 +00:00
|
|
|
|
// Remove also enclosing parentheses [] and {}
|
|
|
|
|
int numpars = 0;
|
|
|
|
|
int spaces = 0;
|
|
|
|
|
while (actual._tokenstart > numpars) {
|
2020-05-26 16:49:50 +00:00
|
|
|
|
if (pos+numpars >= interval_.par.size())
|
|
|
|
|
break;
|
|
|
|
|
else if (interval_.par[pos+numpars] == ']' && interval_.par[actual._tokenstart-numpars-1] == '[')
|
2019-12-29 16:40:13 +00:00
|
|
|
|
numpars++;
|
|
|
|
|
else if (interval_.par[pos+numpars] == '}' && interval_.par[actual._tokenstart-numpars-1] == '{')
|
|
|
|
|
numpars++;
|
|
|
|
|
else
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (numpars > 0) {
|
|
|
|
|
if (interval_.par[pos+numpars] == ' ')
|
|
|
|
|
spaces++;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
interval_.addIntervall(actual._tokenstart-numpars, pos+numpars+spaces);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
case KeyInfo::isList: {
|
|
|
|
|
// Discard space before _tokenstart
|
|
|
|
|
int count;
|
|
|
|
|
for (count = 0; count < actual._tokenstart; count++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[actual._tokenstart-count-1] != ' ')
|
2018-11-03 10:15:12 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2019-02-16 17:39:10 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
int tmpIdx = find(nextKeyIdx, KeyInfo::endArguments);
|
|
|
|
|
if (tmpIdx > 0) {
|
2019-02-17 23:40:55 +00:00
|
|
|
|
// Special case: \item is not a list, but a command (like in Style Author_Biography in maa-monthly.layout)
|
|
|
|
|
// with arguments
|
|
|
|
|
// How else can we catch this one?
|
2019-02-16 17:39:10 +00:00
|
|
|
|
for (int i = nextKeyIdx; i <= tmpIdx; i++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
entries_[i].disabled = true;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
actual._dataEnd = entries_[tmpIdx]._dataEnd;
|
2019-02-16 17:39:10 +00:00
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
else if (nextKeyIdx > 0) {
|
|
|
|
|
// Ignore any lang entries inside data region
|
2019-03-21 11:53:41 +00:00
|
|
|
|
for (int i = nextKeyIdx; i < int(entries_.size()) && entries_[i]._tokenstart < actual._dataEnd; i++) {
|
|
|
|
|
if (entries_[i].keytype == KeyInfo::isMain)
|
|
|
|
|
entries_[i].disabled = true;
|
2019-02-17 23:40:55 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
if (actual.disabled) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._dataEnd+1);
|
2018-11-03 10:15:12 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._tokenstart);
|
2018-11-03 10:15:12 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[actual._dataEnd+1] == '[') {
|
|
|
|
|
int posdown = interval_.findclosing(actual._dataEnd+2, interval_.par.length(), '[', ']');
|
|
|
|
|
if ((interval_.par[actual._dataEnd+2] == '{') &&
|
|
|
|
|
(interval_.par[posdown-1] == '}')) {
|
|
|
|
|
interval_.addIntervall(actual._dataEnd+1,actual._dataEnd+3);
|
|
|
|
|
interval_.addIntervall(posdown-1, posdown+1);
|
2018-11-05 11:58:45 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._dataEnd+1, actual._dataEnd+2);
|
|
|
|
|
interval_.addIntervall(posdown, posdown+1);
|
2018-11-05 11:58:45 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int blk = interval_.nextNotIgnored(actual._dataEnd+1);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
if (blk > posdown) {
|
2018-11-06 14:28:43 +00:00
|
|
|
|
// Discard at most 1 space after empty item
|
2018-11-04 20:41:04 +00:00
|
|
|
|
int count;
|
2018-11-06 14:28:43 +00:00
|
|
|
|
for (count = 0; count < 1; count++) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par[blk+count] != ' ')
|
2018-11-04 20:41:04 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (count > 0)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(blk, blk+count);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
}
|
2018-11-03 10:15:12 +00:00
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
case KeyInfo::isSectioning: {
|
2018-11-08 08:59:51 +00:00
|
|
|
|
// Discard spaces before _tokenstart
|
2018-10-22 18:19:36 +00:00
|
|
|
|
int count;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
int val = actual._tokenstart;
|
|
|
|
|
for (count = 0; count < actual._tokenstart;) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
val = interval_.previousNotIgnored(val-1);
|
2020-05-26 13:58:23 +00:00
|
|
|
|
if (val < 0 || interval_.par[val] != ' ')
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
2019-02-07 12:35:47 +00:00
|
|
|
|
else {
|
|
|
|
|
count = actual._tokenstart - val;
|
|
|
|
|
}
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (actual.disabled) {
|
|
|
|
|
removeHead(actual, count);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
2018-10-22 18:19:36 +00:00
|
|
|
|
} else {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart-count, actual._tokenstart);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = process(os, actual);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isMath: {
|
|
|
|
|
// Same as regex, use the content unchanged
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isRegex: {
|
|
|
|
|
// DO NOT SPLIT ON REGEX
|
|
|
|
|
// Do not disable
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isIgnored: {
|
|
|
|
|
// Treat like a character for now
|
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::isMain: {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if (interval_.par.substr(actual._dataStart, 2) == "% ")
|
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataStart+2);
|
2019-02-21 19:32:08 +00:00
|
|
|
|
if (actual._tokenstart > 0) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int prev = interval_.previousNotIgnored(actual._tokenstart - 1);
|
|
|
|
|
if ((prev >= 0) && interval_.par[prev] == '%')
|
|
|
|
|
interval_.addIntervall(prev, prev+1);
|
2019-02-21 19:32:08 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (actual.disabled) {
|
|
|
|
|
removeHead(actual);
|
2019-12-29 16:40:13 +00:00
|
|
|
|
interval_.langcount++;
|
2019-03-21 11:53:41 +00:00
|
|
|
|
if ((interval_.par.substr(actual._dataStart, 3) == " \\[") ||
|
|
|
|
|
(interval_.par.substr(actual._dataStart, 8) == " \\begin{")) {
|
2018-10-28 18:40:14 +00:00
|
|
|
|
// Discard also the space before math-equation
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._dataStart, actual._dataStart+1);
|
2018-10-28 18:40:14 +00:00
|
|
|
|
}
|
2019-02-07 12:35:47 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
// interval.resetOpenedP(actual._dataStart-1);
|
2018-10-20 10:47:37 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
else {
|
2019-02-21 13:45:41 +00:00
|
|
|
|
if (actual._tokenstart < 26) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// for the first (and maybe dummy) language
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.setForDefaultLang(actual);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.resetOpenedP(actual._dataStart-1);
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
case KeyInfo::invalid:
|
2020-12-14 19:43:39 +00:00
|
|
|
|
case KeyInfo::headRemove:
|
|
|
|
|
// These two cases cannot happen, already handled
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// fall through
|
|
|
|
|
default: {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "Unhandled keytype");
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextKeyIdx = getNextKey();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-11-04 13:54:06 +00:00
|
|
|
|
}
|
2018-11-04 20:41:04 +00:00
|
|
|
|
return nextKeyIdx;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-10-09 15:50:24 +00:00
|
|
|
|
int LatexInfo::process(ostringstream & os, KeyInfo const & actual )
|
2018-10-22 18:19:36 +00:00
|
|
|
|
{
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int end = interval_.nextNotIgnored(actual._dataEnd);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
int oldStart = actual._dataStart;
|
|
|
|
|
int nextKeyIdx = getNextKey();
|
|
|
|
|
while (true) {
|
|
|
|
|
if ((nextKeyIdx < 0) ||
|
2019-03-21 11:53:41 +00:00
|
|
|
|
(entries_[nextKeyIdx]._tokenstart >= actual._dataEnd) ||
|
|
|
|
|
(entries_[nextKeyIdx].keytype == KeyInfo::invalid)) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if (oldStart <= end) {
|
|
|
|
|
processRegion(oldStart, end);
|
|
|
|
|
oldStart = end+1;
|
2018-10-19 17:11:20 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
break;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo &nextKey = getKeyInfo(nextKeyIdx);
|
|
|
|
|
|
2019-02-07 12:35:47 +00:00
|
|
|
|
if ((nextKey.keytype == KeyInfo::isMain) && !nextKey.disabled) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
(void) dispatch(os, actual._dataStart, nextKey);
|
|
|
|
|
end = nextKey._tokenstart;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
processRegion(oldStart, nextKey._tokenstart);
|
|
|
|
|
nextKeyIdx = dispatch(os, actual._dataStart, nextKey);
|
|
|
|
|
|
|
|
|
|
oldStart = nextKey._dataEnd+1;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// now nextKey is either invalid or is outside of actual._dataEnd
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// output the remaining and discard myself
|
|
|
|
|
if (oldStart <= end) {
|
|
|
|
|
processRegion(oldStart, end);
|
|
|
|
|
}
|
2020-05-26 16:49:50 +00:00
|
|
|
|
if (interval_.par.size() > (size_t) end && interval_.par[end] == '}') {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
end += 1;
|
|
|
|
|
// This is the normal case.
|
|
|
|
|
// But if using the firstlanguage, the closing may be missing
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
// get minimum of 'end' and 'actual._dataEnd' in case that the nextKey.keytype was 'KeyInfo::isMain'
|
|
|
|
|
int output_end;
|
|
|
|
|
if (actual._dataEnd < end)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
output_end = interval_.nextNotIgnored(actual._dataEnd);
|
2020-12-31 15:53:46 +00:00
|
|
|
|
else if (interval_.par.size() > (size_t) end)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
output_end = interval_.nextNotIgnored(end);
|
2020-12-31 15:53:46 +00:00
|
|
|
|
else
|
|
|
|
|
output_end = interval_.par.size();
|
FindAdv: Added lyx-function search-ignore
Enable/disable ignoring the specified type
language: e.g. british, slovak, latin, ...
color: blue, red, ...
sectioning: part, chapter, ..
font:
series: bold, ...
shape: upright, italic, slanted
family: serif, monospace ...
markup: enphasize, noun
underline:
strike:
Examples:
search-ignore language true
search-ignore shape true
2018-11-15 13:20:50 +00:00
|
|
|
|
if ((actual.keytype == KeyInfo::isMain) && actual.disabled) {
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.addIntervall(actual._tokenstart, actual._tokenstart+actual._tokensize);
|
FindAdv: Added lyx-function search-ignore
Enable/disable ignoring the specified type
language: e.g. british, slovak, latin, ...
color: blue, red, ...
sectioning: part, chapter, ..
font:
series: bold, ...
shape: upright, italic, slanted
family: serif, monospace ...
markup: enphasize, noun
underline:
strike:
Examples:
search-ignore language true
search-ignore shape true
2018-11-15 13:20:50 +00:00
|
|
|
|
}
|
2018-12-16 13:50:38 +00:00
|
|
|
|
// Remove possible empty data
|
2019-03-21 11:53:41 +00:00
|
|
|
|
int dstart = interval_.nextNotIgnored(actual._dataStart);
|
|
|
|
|
while (interval_.isOpeningPar(dstart) == 1) {
|
|
|
|
|
interval_.addIntervall(dstart, dstart+1);
|
|
|
|
|
int dend = interval_.findclosing(dstart+1, output_end);
|
|
|
|
|
interval_.addIntervall(dend, dend+1);
|
|
|
|
|
dstart = interval_.nextNotIgnored(dstart+1);
|
2018-12-16 13:50:38 +00:00
|
|
|
|
}
|
|
|
|
|
if (dstart < output_end)
|
2019-03-21 11:53:41 +00:00
|
|
|
|
interval_.output(os, output_end);
|
2021-02-11 13:03:46 +00:00
|
|
|
|
if (nextKeyIdx < 0)
|
|
|
|
|
interval_.addIntervall(0, end);
|
|
|
|
|
else
|
|
|
|
|
interval_.addIntervall(actual._tokenstart, end);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
return nextKeyIdx;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-10 17:00:55 +00:00
|
|
|
|
string splitOnKnownMacros(string par, bool isPatternString)
|
|
|
|
|
{
|
2018-10-12 14:47:07 +00:00
|
|
|
|
ostringstream os;
|
2018-10-27 14:57:42 +00:00
|
|
|
|
LatexInfo li(par, isPatternString);
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "Berfore split: " << par);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo DummyKey = KeyInfo(KeyInfo::KeyType::isMain, 2, true);
|
2018-10-23 17:59:08 +00:00
|
|
|
|
DummyKey.head = "";
|
2018-10-22 18:19:36 +00:00
|
|
|
|
DummyKey._tokensize = 0;
|
|
|
|
|
DummyKey._dataStart = 0;
|
|
|
|
|
DummyKey._dataEnd = par.length();
|
|
|
|
|
DummyKey.disabled = true;
|
2018-10-18 15:37:15 +00:00
|
|
|
|
int firstkeyIdx = li.getFirstKey();
|
|
|
|
|
string s;
|
|
|
|
|
if (firstkeyIdx >= 0) {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
KeyInfo firstKey = li.getKeyInfo(firstkeyIdx);
|
2019-02-19 22:11:09 +00:00
|
|
|
|
DummyKey._tokenstart = firstKey._tokenstart;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
int nextkeyIdx;
|
|
|
|
|
if ((firstKey.keytype != KeyInfo::isMain) || firstKey.disabled) {
|
2018-10-30 19:52:29 +00:00
|
|
|
|
// Use dummy firstKey
|
2018-10-22 18:19:36 +00:00
|
|
|
|
firstKey = DummyKey;
|
|
|
|
|
(void) li.setNextKey(firstkeyIdx);
|
|
|
|
|
}
|
2018-11-16 11:12:06 +00:00
|
|
|
|
else {
|
|
|
|
|
if (par.substr(firstKey._dataStart, 2) == "% ")
|
|
|
|
|
li.addIntervall(firstKey._dataStart, firstKey._dataStart+2);
|
|
|
|
|
}
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, firstKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
while (nextkeyIdx >= 0) {
|
|
|
|
|
// Check for a possible gap between the last
|
|
|
|
|
// entry and this one
|
|
|
|
|
int datastart = li.nextNotIgnored(firstKey._dataStart);
|
|
|
|
|
KeyInfo &nextKey = li.getKeyInfo(nextkeyIdx);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
if ((nextKey._tokenstart > datastart)) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Handle the gap
|
|
|
|
|
firstKey._dataStart = datastart;
|
2018-10-22 18:19:36 +00:00
|
|
|
|
firstKey._dataEnd = par.length();
|
2018-10-18 15:37:15 +00:00
|
|
|
|
(void) li.setNextKey(nextkeyIdx);
|
2019-02-21 13:45:41 +00:00
|
|
|
|
// Fake the last opened parenthesis
|
|
|
|
|
li.setForDefaultLang(firstKey);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, firstKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (nextKey.keytype != KeyInfo::isMain) {
|
|
|
|
|
firstKey._dataStart = datastart;
|
|
|
|
|
firstKey._dataEnd = nextKey._dataEnd+1;
|
|
|
|
|
(void) li.setNextKey(nextkeyIdx);
|
2019-02-21 13:45:41 +00:00
|
|
|
|
li.setForDefaultLang(firstKey);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, firstKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2018-10-22 18:19:36 +00:00
|
|
|
|
nextkeyIdx = li.process(os, nextKey);
|
2018-10-18 15:37:15 +00:00
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
// Handle the remaining
|
|
|
|
|
firstKey._dataStart = li.nextNotIgnored(firstKey._dataStart);
|
|
|
|
|
firstKey._dataEnd = par.length();
|
2018-11-05 11:58:45 +00:00
|
|
|
|
// Check if ! empty
|
|
|
|
|
if ((firstKey._dataStart < firstKey._dataEnd) &&
|
|
|
|
|
(par[firstKey._dataStart] != '}')) {
|
2019-02-21 13:45:41 +00:00
|
|
|
|
li.setForDefaultLang(firstKey);
|
2018-10-22 18:19:36 +00:00
|
|
|
|
(void) li.process(os, firstKey);
|
2018-11-04 20:41:04 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
s = os.str();
|
2018-11-08 08:59:51 +00:00
|
|
|
|
if (s.empty()) {
|
|
|
|
|
// return string definitelly impossible to match
|
|
|
|
|
s = "\\foreignlanguage{ignore}{ }";
|
|
|
|
|
}
|
2018-10-12 14:47:07 +00:00
|
|
|
|
}
|
2018-10-18 15:37:15 +00:00
|
|
|
|
else
|
2018-10-28 18:40:14 +00:00
|
|
|
|
s = par; /* no known macros found */
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "After split: " << s);
|
2018-10-12 14:47:07 +00:00
|
|
|
|
return s;
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-06 21:50:50 +00:00
|
|
|
|
/*
|
|
|
|
|
* Try to unify the language specs in the latexified text.
|
|
|
|
|
* Resulting modified string is set to "", if
|
|
|
|
|
* the searched tex does not contain all the features in the search pattern
|
|
|
|
|
*/
|
2021-01-25 09:52:14 +00:00
|
|
|
|
static string correctlanguagesetting(string par, bool isPatternString, bool withformat, lyx::Buffer *pbuf = nullptr)
|
2018-10-05 18:26:44 +00:00
|
|
|
|
{
|
|
|
|
|
static Features regex_f;
|
|
|
|
|
static int missed = 0;
|
|
|
|
|
static bool regex_with_format = false;
|
|
|
|
|
|
|
|
|
|
int parlen = par.length();
|
|
|
|
|
|
|
|
|
|
while ((parlen > 0) && (par[parlen-1] == '\n')) {
|
|
|
|
|
parlen--;
|
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
if (isPatternString && (parlen > 0) && (par[parlen-1] == '~')) {
|
|
|
|
|
// Happens to be there in case of description or labeling environment
|
|
|
|
|
parlen--;
|
|
|
|
|
}
|
2018-10-15 06:09:19 +00:00
|
|
|
|
string result;
|
|
|
|
|
if (withformat) {
|
|
|
|
|
// Split the latex input into pieces which
|
|
|
|
|
// can be digested by our search engine
|
2018-10-19 17:11:20 +00:00
|
|
|
|
LYXERR(Debug::FIND, "input: \"" << par << "\"");
|
2021-01-25 09:52:14 +00:00
|
|
|
|
if (isPatternString && (pbuf != nullptr)) { // Check if we should disable/enable test for language
|
|
|
|
|
// We check for polyglossia, because in runparams.flavor we use Flavor::XeTeX
|
|
|
|
|
string doclang = pbuf->params().language->polyglossia();
|
|
|
|
|
static regex langre("\\\\(foreignlanguage)\\{([^\\}]+)\\}");
|
|
|
|
|
smatch sub;
|
|
|
|
|
bool toIgnoreLang = true;
|
|
|
|
|
for (sregex_iterator it(par.begin(), par.end(), langre), end; it != end; ++it) {
|
|
|
|
|
sub = *it;
|
|
|
|
|
if (sub.str(2) != doclang) {
|
|
|
|
|
toIgnoreLang = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-02-13 15:19:45 +00:00
|
|
|
|
setIgnoreFormat("language", toIgnoreLang, false);
|
2021-01-25 09:52:14 +00:00
|
|
|
|
|
|
|
|
|
}
|
2019-02-17 23:40:55 +00:00
|
|
|
|
result = splitOnKnownMacros(par.substr(0,parlen), isPatternString);
|
2021-01-19 16:52:36 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After splitOnKnownMacros:\n\"" << result << "\"");
|
2018-10-15 06:09:19 +00:00
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
result = par.substr(0, parlen);
|
2018-10-27 14:57:42 +00:00
|
|
|
|
if (isPatternString) {
|
2018-10-05 18:26:44 +00:00
|
|
|
|
missed = 0;
|
|
|
|
|
if (withformat) {
|
2018-10-12 14:47:07 +00:00
|
|
|
|
regex_f = identifyFeatures(result);
|
2019-03-04 13:05:44 +00:00
|
|
|
|
string features = "";
|
2018-10-05 18:26:44 +00:00
|
|
|
|
for (auto it = regex_f.cbegin(); it != regex_f.cend(); ++it) {
|
|
|
|
|
string a = it->first;
|
|
|
|
|
regex_with_format = true;
|
2019-03-04 13:05:44 +00:00
|
|
|
|
features += " " + a;
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "Identified regex format:" << a);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
2019-03-04 13:05:44 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Identified Features" << features);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
} else if (regex_with_format) {
|
2018-10-12 14:47:07 +00:00
|
|
|
|
Features info = identifyFeatures(result);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
for (auto it = regex_f.cbegin(); it != regex_f.cend(); ++it) {
|
|
|
|
|
string a = it->first;
|
|
|
|
|
bool b = it->second;
|
|
|
|
|
if (b && ! info[a]) {
|
|
|
|
|
missed++;
|
2018-10-12 14:47:07 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Missed(" << missed << " " << a <<", srclen = " << parlen );
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return "";
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-18 00:10:07 +00:00
|
|
|
|
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
// LYXERR(Debug::INFO, "No regex formats");
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
2019-03-21 11:53:41 +00:00
|
|
|
|
return result;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
2011-06-19 16:41:23 +00:00
|
|
|
|
// Remove trailing closure of math, macros and environments, so to catch parts of them.
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static int identifyClosing(string & t)
|
|
|
|
|
{
|
2011-06-19 16:41:23 +00:00
|
|
|
|
int open_braces = 0;
|
|
|
|
|
do {
|
|
|
|
|
LYXERR(Debug::FIND, "identifyClosing(): t now is '" << t << "'");
|
2020-11-28 23:05:24 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\$$", "$1"))
|
2011-06-19 16:41:23 +00:00
|
|
|
|
continue;
|
2020-11-28 23:05:24 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\\\\\]$", "$1"))
|
2011-06-19 16:41:23 +00:00
|
|
|
|
continue;
|
2020-11-28 23:05:24 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\\\end\\{[a-zA-Z_]*\\*?\\}$", "$1"))
|
2011-06-19 16:41:23 +00:00
|
|
|
|
continue;
|
2020-11-28 23:05:24 +00:00
|
|
|
|
if (regex_replace(t, t, "(.*[^\\\\])\\}$", "$1")) {
|
2011-06-19 16:41:23 +00:00
|
|
|
|
++open_braces;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
} while (true);
|
|
|
|
|
return open_braces;
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-13 13:06:18 +00:00
|
|
|
|
static int num_replaced = 0;
|
2019-03-16 07:17:09 +00:00
|
|
|
|
static bool previous_single_replace = true;
|
2011-06-19 16:41:23 +00:00
|
|
|
|
|
2021-01-10 16:17:37 +00:00
|
|
|
|
void MatchStringAdv::CreateRegexp(FindAndReplaceOptions const & opt, string regexp_str, string regexp2_str, string par_as_string)
|
|
|
|
|
{
|
|
|
|
|
#if QTSEARCH
|
|
|
|
|
// Handle \w properly
|
|
|
|
|
QRegularExpression::PatternOptions popts = QRegularExpression::UseUnicodePropertiesOption | QRegularExpression::MultilineOption;
|
|
|
|
|
if (! opt.casesensitive) {
|
|
|
|
|
popts |= QRegularExpression::CaseInsensitiveOption;
|
|
|
|
|
}
|
|
|
|
|
regexp = QRegularExpression(QString::fromStdString(regexp_str), popts);
|
|
|
|
|
regexp2 = QRegularExpression(QString::fromStdString(regexp2_str), popts);
|
|
|
|
|
regexError = "";
|
|
|
|
|
if (regexp.isValid() && regexp2.isValid()) {
|
|
|
|
|
regexIsValid = true;
|
|
|
|
|
// Check '{', '}' pairs inside the regex
|
|
|
|
|
int balanced = 0;
|
|
|
|
|
int skip = 1;
|
|
|
|
|
for (unsigned i = 0; i < par_as_string.size(); i+= skip) {
|
|
|
|
|
char c = par_as_string[i];
|
|
|
|
|
if (c == '\\') {
|
|
|
|
|
skip = 2;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if (c == '{')
|
|
|
|
|
balanced++;
|
|
|
|
|
else if (c == '}') {
|
|
|
|
|
balanced--;
|
|
|
|
|
if (balanced < 0)
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
skip = 1;
|
|
|
|
|
}
|
|
|
|
|
if (balanced != 0) {
|
|
|
|
|
regexIsValid = false;
|
|
|
|
|
regexError = "Unbalanced curly brackets in regexp \"" + regexp_str + "\"";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
regexIsValid = false;
|
|
|
|
|
if (!regexp.isValid())
|
|
|
|
|
regexError += "Invalid regexp \"" + regexp_str + "\", error = " + regexp.errorString().toStdString();
|
|
|
|
|
else
|
|
|
|
|
regexError += "Invalid regexp2 \"" + regexp2_str + "\", error = " + regexp2.errorString().toStdString();
|
|
|
|
|
}
|
|
|
|
|
#else
|
2021-03-05 10:54:11 +00:00
|
|
|
|
(void)par_as_string;
|
2021-01-10 16:17:37 +00:00
|
|
|
|
if (opt.casesensitive) {
|
|
|
|
|
regexp = regex(regexp_str);
|
|
|
|
|
regexp2 = regex(regexp2_str);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
regexp = regex(regexp_str, std::regex_constants::icase);
|
|
|
|
|
regexp2 = regex(regexp2_str, std::regex_constants::icase);
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
2021-01-10 21:58:58 +00:00
|
|
|
|
static void modifyRegexForMatchWord(string &t)
|
|
|
|
|
{
|
|
|
|
|
string s("");
|
|
|
|
|
regex wordre("(\\\\)*((\\.|\\\\b))");
|
|
|
|
|
size_t lastpos = 0;
|
|
|
|
|
smatch sub;
|
|
|
|
|
for (sregex_iterator it(t.begin(), t.end(), wordre), end; it != end; ++it) {
|
|
|
|
|
sub = *it;
|
|
|
|
|
if ((sub.position(2) - sub.position(0)) % 2 == 1) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
else if (sub.str(2) == "\\\\b")
|
|
|
|
|
return;
|
|
|
|
|
if (lastpos < (size_t) sub.position(2))
|
|
|
|
|
s += t.substr(lastpos, sub.position(2) - lastpos);
|
|
|
|
|
s += "\\S";
|
|
|
|
|
lastpos = sub.position(2) + sub.length(2);
|
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
if (lastpos == 0) {
|
|
|
|
|
s = "\\b" + t + "\\b";
|
|
|
|
|
t = s;
|
2021-01-10 21:58:58 +00:00
|
|
|
|
return;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
}
|
2021-01-10 21:58:58 +00:00
|
|
|
|
else if (lastpos < t.length())
|
|
|
|
|
s += t.substr(lastpos, t.length() - lastpos);
|
|
|
|
|
t = "\\b" + s + "\\b";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
MatchStringAdv::MatchStringAdv(lyx::Buffer & buf, FindAndReplaceOptions & opt)
|
2009-12-30 18:40:18 +00:00
|
|
|
|
: p_buf(&buf), p_first_buf(&buf), opt(opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
Buffer & find_buf = *theBufferList().getBuffer(FileName(to_utf8(opt.find_buf_name)), true);
|
2011-08-25 19:16:14 +00:00
|
|
|
|
docstring const & ds = stringifySearchBuffer(find_buf, opt);
|
|
|
|
|
use_regexp = lyx::to_utf8(ds).find("\\regexp{") != std::string::npos;
|
2019-03-16 07:17:09 +00:00
|
|
|
|
if (opt.replace_all && previous_single_replace) {
|
|
|
|
|
previous_single_replace = false;
|
|
|
|
|
num_replaced = 0;
|
|
|
|
|
}
|
|
|
|
|
else if (!opt.replace_all) {
|
2019-03-13 13:06:18 +00:00
|
|
|
|
num_replaced = 0; // count number of replaced strings
|
2019-03-16 07:17:09 +00:00
|
|
|
|
previous_single_replace = true;
|
|
|
|
|
}
|
2011-08-25 19:16:14 +00:00
|
|
|
|
// When using regexp, braces are hacked already by escape_for_regex()
|
2021-01-10 16:17:37 +00:00
|
|
|
|
par_as_string = normalize(ds);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
open_braces = 0;
|
|
|
|
|
close_wildcards = 0;
|
|
|
|
|
|
2011-06-19 16:41:23 +00:00
|
|
|
|
size_t lead_size = 0;
|
2018-10-05 18:26:44 +00:00
|
|
|
|
// correct the language settings
|
2021-01-25 09:52:14 +00:00
|
|
|
|
par_as_string = correctlanguagesetting(par_as_string, true, !opt.ignoreformat, &buf);
|
|
|
|
|
opt.matchAtStart = false;
|
2021-01-10 16:17:37 +00:00
|
|
|
|
if (!use_regexp) {
|
|
|
|
|
identifyClosing(par_as_string); // Removes math closings ($, ], ...) at end of string
|
|
|
|
|
if (opt.ignoreformat) {
|
|
|
|
|
lead_size = 0;
|
2015-05-17 15:27:12 +00:00
|
|
|
|
}
|
2021-01-10 16:17:37 +00:00
|
|
|
|
else {
|
|
|
|
|
lead_size = identifyLeading(par_as_string);
|
|
|
|
|
}
|
|
|
|
|
lead_as_string = par_as_string.substr(0, lead_size);
|
2021-01-12 18:33:29 +00:00
|
|
|
|
string lead_as_regex_string = string2regex(lead_as_string);
|
2021-01-10 16:17:37 +00:00
|
|
|
|
par_as_string_nolead = par_as_string.substr(lead_size, par_as_string.size() - lead_size);
|
2021-01-12 18:33:29 +00:00
|
|
|
|
string par_as_regex_string_nolead = string2regex(par_as_string_nolead);
|
2021-01-10 21:58:58 +00:00
|
|
|
|
/* Handle whole words too in this case
|
|
|
|
|
*/
|
|
|
|
|
if (opt.matchword) {
|
|
|
|
|
par_as_regex_string_nolead = "\\b" + par_as_regex_string_nolead + "\\b";
|
|
|
|
|
opt.matchword = false;
|
|
|
|
|
}
|
2021-01-10 16:17:37 +00:00
|
|
|
|
string regexp_str = "(" + lead_as_regex_string + ")()" + par_as_regex_string_nolead;
|
|
|
|
|
string regexp2_str = "(" + lead_as_regex_string + ")(.*?)" + par_as_regex_string_nolead;
|
|
|
|
|
CreateRegexp(opt, regexp_str, regexp2_str);
|
|
|
|
|
use_regexp = true;
|
2021-01-12 18:33:29 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Setting regexp to : '" << regexp_str << "'");
|
|
|
|
|
LYXERR(Debug::FIND, "Setting regexp2 to: '" << regexp2_str << "'");
|
2021-01-10 16:17:37 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!opt.ignoreformat) {
|
2011-06-19 16:41:23 +00:00
|
|
|
|
lead_size = identifyLeading(par_as_string);
|
2018-10-05 18:26:44 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Lead_size: " << lead_size);
|
2011-06-19 16:41:23 +00:00
|
|
|
|
lead_as_string = par_as_string.substr(0, lead_size);
|
|
|
|
|
par_as_string_nolead = par_as_string.substr(lead_size, par_as_string.size() - lead_size);
|
|
|
|
|
}
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
2021-01-10 16:17:37 +00:00
|
|
|
|
// Here we are using regexp
|
|
|
|
|
LASSERT(use_regexp, /**/);
|
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
string lead_as_regexp;
|
|
|
|
|
if (lead_size > 0) {
|
2021-01-12 18:33:29 +00:00
|
|
|
|
lead_as_regexp = string2regex(par_as_string.substr(0, lead_size));
|
2021-03-05 12:12:58 +00:00
|
|
|
|
(void)regex_replace(par_as_string_nolead, par_as_string_nolead, "}$", "");
|
2011-06-19 16:41:23 +00:00
|
|
|
|
par_as_string = par_as_string_nolead;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "lead_as_regexp is '" << lead_as_regexp << "'");
|
|
|
|
|
LYXERR(Debug::FIND, "par_as_string now is '" << par_as_string << "'");
|
|
|
|
|
}
|
2021-01-19 16:52:36 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "par_as_string before escape_for_regex() is '" << par_as_string << "'");
|
2021-01-18 11:17:57 +00:00
|
|
|
|
par_as_string = escape_for_regex(par_as_string, !opt.ignoreformat);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// Insert (.*?) before trailing closure of math, macros and environments, so to catch parts of them.
|
2021-01-19 16:52:36 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "par_as_string now is '" << par_as_string << "'");
|
2021-01-12 18:33:29 +00:00
|
|
|
|
++close_wildcards;
|
2021-01-14 13:44:21 +00:00
|
|
|
|
size_t lng = par_as_string.size();
|
2018-09-29 07:23:30 +00:00
|
|
|
|
if (!opt.ignoreformat) {
|
2019-02-23 12:11:34 +00:00
|
|
|
|
// Remove extra '\}' at end if not part of \{\.\}
|
|
|
|
|
while(lng > 2) {
|
|
|
|
|
if (par_as_string.substr(lng-2, 2).compare("\\}") == 0) {
|
|
|
|
|
if (lng >= 6) {
|
|
|
|
|
if (par_as_string.substr(lng-6,3).compare("\\{\\") == 0)
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
lng -= 2;
|
|
|
|
|
open_braces++;
|
|
|
|
|
}
|
2020-12-30 12:00:03 +00:00
|
|
|
|
else
|
2019-02-23 12:11:34 +00:00
|
|
|
|
break;
|
2020-12-30 12:00:03 +00:00
|
|
|
|
}
|
2019-02-23 12:11:34 +00:00
|
|
|
|
if (lng < par_as_string.size())
|
|
|
|
|
par_as_string = par_as_string.substr(0,lng);
|
2021-01-14 13:44:21 +00:00
|
|
|
|
}
|
2021-01-19 16:52:36 +00:00
|
|
|
|
LYXERR(Debug::FIND, "par_as_string after correctRegex is '" << par_as_string << "'");
|
2021-01-14 13:44:21 +00:00
|
|
|
|
if ((lng > 0) && (par_as_string[0] == '^')) {
|
|
|
|
|
par_as_string = par_as_string.substr(1);
|
|
|
|
|
--lng;
|
2021-01-25 09:52:14 +00:00
|
|
|
|
opt.matchAtStart = true;
|
2018-09-29 07:23:30 +00:00
|
|
|
|
}
|
2021-01-19 16:52:36 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "par_as_string now is '" << par_as_string << "'");
|
|
|
|
|
// LYXERR(Debug::FIND, "Open braces: " << open_braces);
|
|
|
|
|
// LYXERR(Debug::FIND, "Replaced text (to be used as regex): " << par_as_string);
|
2017-04-16 21:10:17 +00:00
|
|
|
|
|
2009-08-17 07:06:01 +00:00
|
|
|
|
// If entered regexp must match at begin of searched string buffer
|
2018-09-30 14:08:47 +00:00
|
|
|
|
// Kornel: Added parentheses to use $1 for size of the leading string
|
2018-09-30 16:37:55 +00:00
|
|
|
|
string regexp_str;
|
|
|
|
|
string regexp2_str;
|
|
|
|
|
{
|
|
|
|
|
// TODO: Adapt '\[12345678]' in par_as_string to acount for the first '()
|
|
|
|
|
// Unfortunately is '\1', '\2', etc not working for strings with extra format
|
|
|
|
|
// so the convert has no effect in that case
|
2021-01-01 20:50:36 +00:00
|
|
|
|
for (int i = 7; i > 0; --i) {
|
2018-09-30 16:37:55 +00:00
|
|
|
|
string orig = "\\\\" + std::to_string(i);
|
2021-01-01 20:50:36 +00:00
|
|
|
|
string dest = "\\" + std::to_string(i+2);
|
2018-09-30 16:37:55 +00:00
|
|
|
|
while (regex_replace(par_as_string, par_as_string, orig, dest));
|
|
|
|
|
}
|
2021-01-10 21:58:58 +00:00
|
|
|
|
if (opt.matchword) {
|
|
|
|
|
modifyRegexForMatchWord(par_as_string);
|
|
|
|
|
opt.matchword = false;
|
|
|
|
|
}
|
2021-01-01 20:50:36 +00:00
|
|
|
|
regexp_str = "(" + lead_as_regexp + ")()" + par_as_string;
|
|
|
|
|
regexp2_str = "(" + lead_as_regexp + ")(.*?)" + par_as_string;
|
2018-09-30 16:37:55 +00:00
|
|
|
|
}
|
2011-06-19 16:41:23 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Setting regexp to : '" << regexp_str << "'");
|
|
|
|
|
LYXERR(Debug::FIND, "Setting regexp2 to: '" << regexp2_str << "'");
|
2021-01-10 16:17:37 +00:00
|
|
|
|
CreateRegexp(opt, regexp_str, regexp2_str, par_as_string);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult MatchStringAdv::findAux(DocIterator const & cur, int len, bool at_begin) const
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult mres;
|
|
|
|
|
|
2021-01-17 12:30:43 +00:00
|
|
|
|
mres.searched_size = len;
|
2013-08-23 19:36:50 +00:00
|
|
|
|
if (at_begin &&
|
|
|
|
|
(opt.restr == FindAndReplaceOptions::R_ONLY_MATHS && !cur.inMathed()) )
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2016-12-18 03:12:27 +00:00
|
|
|
|
|
2008-12-20 16:00:47 +00:00
|
|
|
|
docstring docstr = stringifyFromForSearch(opt, cur, len);
|
2020-12-27 11:01:23 +00:00
|
|
|
|
string str;
|
2021-01-18 16:06:05 +00:00
|
|
|
|
str = normalize(docstr);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
if (!opt.ignoreformat) {
|
2018-10-18 15:37:15 +00:00
|
|
|
|
str = correctlanguagesetting(str, false, !opt.ignoreformat);
|
2021-01-18 11:17:57 +00:00
|
|
|
|
// remove closing '}' and '\n' to allow for use of '$' in regex
|
|
|
|
|
size_t lng = str.size();
|
|
|
|
|
while ((lng > 1) && ((str[lng -1] == '}') || (str[lng -1] == '\n')))
|
|
|
|
|
lng--;
|
|
|
|
|
if (lng != str.size()) {
|
|
|
|
|
str = str.substr(0, lng);
|
|
|
|
|
}
|
2018-10-14 18:39:13 +00:00
|
|
|
|
}
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (str.empty()) {
|
|
|
|
|
mres.match_len = -1;
|
|
|
|
|
return mres;
|
|
|
|
|
}
|
2021-01-19 16:52:36 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After normalization: Matching against:\n'" << str << "'");
|
2016-12-18 03:12:27 +00:00
|
|
|
|
|
2021-01-18 16:06:05 +00:00
|
|
|
|
LASSERT(use_regexp, /**/);
|
|
|
|
|
{
|
|
|
|
|
// use_regexp always true
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Searching in regexp mode: at_begin=" << at_begin);
|
2021-01-01 20:50:36 +00:00
|
|
|
|
#if QTSEARCH
|
2020-12-30 12:00:03 +00:00
|
|
|
|
QString qstr = QString::fromStdString(str);
|
|
|
|
|
QRegularExpression const *p_regexp;
|
|
|
|
|
QRegularExpression::MatchType flags = QRegularExpression::NormalMatch;
|
|
|
|
|
if (at_begin) {
|
|
|
|
|
p_regexp = ®exp;
|
|
|
|
|
} else {
|
|
|
|
|
p_regexp = ®exp2;
|
|
|
|
|
}
|
|
|
|
|
QRegularExpressionMatch match = p_regexp->match(qstr, 0, flags);
|
|
|
|
|
if (!match.hasMatch())
|
|
|
|
|
return mres;
|
|
|
|
|
#else
|
2017-04-16 21:10:17 +00:00
|
|
|
|
regex const *p_regexp;
|
|
|
|
|
regex_constants::match_flag_type flags;
|
|
|
|
|
if (at_begin) {
|
|
|
|
|
flags = regex_constants::match_continuous;
|
|
|
|
|
p_regexp = ®exp;
|
|
|
|
|
} else {
|
|
|
|
|
flags = regex_constants::match_default;
|
|
|
|
|
p_regexp = ®exp2;
|
|
|
|
|
}
|
|
|
|
|
sregex_iterator re_it(str.begin(), str.end(), *p_regexp, flags);
|
2017-04-16 17:43:54 +00:00
|
|
|
|
if (re_it == sregex_iterator())
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2016-12-18 03:09:03 +00:00
|
|
|
|
match_results<string::const_iterator> const & m = *re_it;
|
2020-12-30 12:00:03 +00:00
|
|
|
|
#endif
|
2021-01-07 16:04:27 +00:00
|
|
|
|
// Whole found string, including the leading
|
|
|
|
|
// std: m[0].second - m[0].first
|
|
|
|
|
// Qt: match.capturedEnd(0) - match.capturedStart(0)
|
2018-09-30 14:08:47 +00:00
|
|
|
|
//
|
2021-01-07 16:04:27 +00:00
|
|
|
|
// Size of the leading string
|
|
|
|
|
// std: m[1].second - m[1].first
|
|
|
|
|
// Qt: match.capturedEnd(1) - match.capturedStart(1)
|
2018-09-30 14:08:47 +00:00
|
|
|
|
int leadingsize = 0;
|
2021-01-01 20:50:36 +00:00
|
|
|
|
#if QTSEARCH
|
|
|
|
|
if (match.lastCapturedIndex() > 0) {
|
2020-12-30 12:00:03 +00:00
|
|
|
|
leadingsize = match.capturedEnd(1) - match.capturedStart(1);
|
2021-01-01 20:50:36 +00:00
|
|
|
|
}
|
2020-12-30 12:00:03 +00:00
|
|
|
|
|
|
|
|
|
#else
|
2021-01-01 20:50:36 +00:00
|
|
|
|
if (m.size() > 2) {
|
2018-09-30 14:08:47 +00:00
|
|
|
|
leadingsize = m[1].second - m[1].first;
|
2021-01-01 20:50:36 +00:00
|
|
|
|
}
|
2020-12-30 12:00:03 +00:00
|
|
|
|
#endif
|
2021-01-01 20:50:36 +00:00
|
|
|
|
#if QTSEARCH
|
|
|
|
|
mres.match_prefix = match.capturedEnd(2) - match.capturedStart(2);
|
2021-01-02 17:37:14 +00:00
|
|
|
|
mres.match_len = match.capturedEnd(0) - match.capturedEnd(2);
|
2021-01-04 06:16:59 +00:00
|
|
|
|
// because of different number of closing at end of string
|
|
|
|
|
// we have to 'unify' the length of the post-match.
|
|
|
|
|
// Done by ignoring closing parenthesis and linefeeds at string end
|
|
|
|
|
int matchend = match.capturedEnd(0);
|
|
|
|
|
size_t strsize = qstr.size();
|
2021-01-21 12:39:25 +00:00
|
|
|
|
if (!opt.ignoreformat) {
|
|
|
|
|
while (mres.match_len > 0) {
|
|
|
|
|
QChar c = qstr.at(matchend - 1);
|
|
|
|
|
if ((c == '\n') || (c == '}') || (c == '{')) {
|
|
|
|
|
mres.match_len--;
|
|
|
|
|
matchend--;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
while (strsize > (size_t) match.capturedEnd(0)) {
|
|
|
|
|
QChar c = qstr.at(strsize-1);
|
|
|
|
|
if ((c == '\n') || (c == '}')) {
|
|
|
|
|
--strsize;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
break;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// LYXERR0(qstr.toStdString());
|
|
|
|
|
mres.match2end = strsize - matchend;
|
2021-01-02 17:37:14 +00:00
|
|
|
|
mres.pos = match.capturedStart(2);
|
2020-12-30 12:00:03 +00:00
|
|
|
|
#else
|
2021-01-02 17:37:14 +00:00
|
|
|
|
mres.match_prefix = m[2].second - m[2].first;
|
|
|
|
|
mres.match_len = m[0].second - m[2].second;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
// ignore closing parenthesis and linefeeds at string end
|
|
|
|
|
size_t strend = m[0].second - m[0].first;
|
|
|
|
|
int matchend = strend;
|
|
|
|
|
size_t strsize = str.size();
|
2021-01-21 12:39:25 +00:00
|
|
|
|
if (!opt.ignoreformat) {
|
|
|
|
|
while (mres.match_len > 0) {
|
|
|
|
|
char c = str.at(matchend - 1);
|
|
|
|
|
if ((c == '\n') || (c == '}') || (c == '{')) {
|
|
|
|
|
mres.match_len--;
|
|
|
|
|
matchend--;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
while (strsize > strend) {
|
|
|
|
|
if ((str.at(strsize-1) == '}') || (str.at(strsize-1) == '\n')) {
|
|
|
|
|
--strsize;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
break;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// LYXERR0(str);
|
|
|
|
|
mres.match2end = strsize - matchend;
|
|
|
|
|
mres.pos = m[2].first - m[0].first;;
|
2020-12-30 12:00:03 +00:00
|
|
|
|
#endif
|
2021-01-04 06:16:59 +00:00
|
|
|
|
if (mres.match2end < 0)
|
|
|
|
|
mres.match_len = 0;
|
2021-01-01 20:50:36 +00:00
|
|
|
|
mres.leadsize = leadingsize;
|
2021-01-08 11:05:51 +00:00
|
|
|
|
#if QTSEARCH
|
2021-01-07 16:04:27 +00:00
|
|
|
|
if (mres.match_len > 0) {
|
|
|
|
|
string a0 = match.captured(0).mid(mres.pos + mres.match_prefix, mres.match_len).toStdString();
|
|
|
|
|
mres.result.push_back(a0);
|
|
|
|
|
for (int i = 3; i <= match.lastCapturedIndex(); i++) {
|
|
|
|
|
mres.result.push_back(match.captured(i).toStdString());
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-08 11:05:51 +00:00
|
|
|
|
#else
|
2021-01-07 16:04:27 +00:00
|
|
|
|
if (mres.match_len > 0) {
|
|
|
|
|
string a0 = m[0].str().substr(mres.pos + mres.match_prefix, mres.match_len);
|
|
|
|
|
mres.result.push_back(a0);
|
|
|
|
|
for (size_t i = 3; i < m.size(); i++) {
|
|
|
|
|
mres.result.push_back(m[i]);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2019-02-26 22:00:31 +00:00
|
|
|
|
return mres;
|
2016-12-18 03:16:49 +00:00
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult MatchStringAdv::operator()(DocIterator const & cur, int len, bool at_begin) const
|
2010-01-09 12:39:29 +00:00
|
|
|
|
{
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult mres = findAux(cur, len, at_begin);
|
|
|
|
|
int res = mres.match_len;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
"res=" << res << ", at_begin=" << at_begin
|
2021-01-25 09:52:14 +00:00
|
|
|
|
<< ", matchAtStart=" << opt.matchAtStart
|
2012-10-23 20:58:10 +00:00
|
|
|
|
<< ", inTexted=" << cur.inTexted());
|
2021-01-25 09:52:14 +00:00
|
|
|
|
if (opt.matchAtStart) {
|
2021-01-11 13:50:03 +00:00
|
|
|
|
if (cur.pos() != 0)
|
|
|
|
|
mres.match_len = 0;
|
|
|
|
|
else if (mres.match_prefix > 0)
|
|
|
|
|
mres.match_len = 0;
|
|
|
|
|
return mres;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
return mres;
|
2010-01-09 12:39:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-01-10 16:17:37 +00:00
|
|
|
|
#if 0
|
2021-01-06 13:22:26 +00:00
|
|
|
|
static bool simple_replace(string &t, string from, string to)
|
|
|
|
|
{
|
|
|
|
|
regex repl("(\\\\)*(" + from + ")");
|
|
|
|
|
string s("");
|
|
|
|
|
size_t lastpos = 0;
|
|
|
|
|
smatch sub;
|
|
|
|
|
for (sregex_iterator it(t.begin(), t.end(), repl), end; it != end; ++it) {
|
|
|
|
|
sub = *it;
|
|
|
|
|
if ((sub.position(2) - sub.position(0)) % 2 == 1)
|
|
|
|
|
continue;
|
|
|
|
|
if (lastpos < (size_t) sub.position(2))
|
|
|
|
|
s += t.substr(lastpos, sub.position(2) - lastpos);
|
|
|
|
|
s += to;
|
|
|
|
|
lastpos = sub.position(2) + sub.length(2);
|
|
|
|
|
}
|
|
|
|
|
if (lastpos == 0)
|
|
|
|
|
return false;
|
|
|
|
|
else if (lastpos < t.length())
|
|
|
|
|
s += t.substr(lastpos, t.length() - lastpos);
|
|
|
|
|
t = s;
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2021-01-10 16:17:37 +00:00
|
|
|
|
#endif
|
2010-01-09 12:39:29 +00:00
|
|
|
|
|
2021-01-10 16:17:37 +00:00
|
|
|
|
string MatchStringAdv::normalize(docstring const & s) const
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
|
|
|
|
string t;
|
2020-12-27 11:01:23 +00:00
|
|
|
|
t = lyx::to_utf8(s);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// Remove \n at begin
|
2012-10-21 19:14:16 +00:00
|
|
|
|
while (!t.empty() && t[0] == '\n')
|
2008-11-15 23:30:27 +00:00
|
|
|
|
t = t.substr(1);
|
|
|
|
|
// Remove \n at end
|
2012-10-21 19:14:16 +00:00
|
|
|
|
while (!t.empty() && t[t.size() - 1] == '\n')
|
2008-11-15 23:30:27 +00:00
|
|
|
|
t = t.substr(0, t.size() - 1);
|
|
|
|
|
size_t pos;
|
2019-02-10 17:00:55 +00:00
|
|
|
|
// Handle all other '\n'
|
2019-02-07 12:35:47 +00:00
|
|
|
|
while ((pos = t.find("\n")) != string::npos) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
if (pos > 1 && t[pos-1] == '\\' && t[pos-2] == '\\' ) {
|
|
|
|
|
// Handle '\\\n'
|
2019-02-28 12:00:12 +00:00
|
|
|
|
if (isAlnumASCII(t[pos+1])) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
t.replace(pos-2, 3, " ");
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
t.replace(pos-2, 3, "");
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-02-28 12:00:12 +00:00
|
|
|
|
else if (!isAlnumASCII(t[pos+1]) || !isAlnumASCII(t[pos-1])) {
|
2019-02-10 17:00:55 +00:00
|
|
|
|
// '\n' adjacent to non-alpha-numerics, discard
|
2019-02-07 12:35:47 +00:00
|
|
|
|
t.replace(pos, 1, "");
|
2019-02-10 17:00:55 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// Replace all other \n with spaces
|
2019-02-07 12:35:47 +00:00
|
|
|
|
t.replace(pos, 1, " ");
|
2019-02-10 17:00:55 +00:00
|
|
|
|
}
|
2019-02-07 12:35:47 +00:00
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
// Remove stale empty \emph{}, \textbf{} and similar blocks from latexify
|
2018-10-02 09:53:01 +00:00
|
|
|
|
// Kornel: Added textsl, textsf, textit, texttt and noun
|
|
|
|
|
// + allow to seach for colored text too
|
2021-01-19 16:52:36 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Removing stale empty macros from: " << t);
|
2018-10-13 19:02:53 +00:00
|
|
|
|
while (regex_replace(t, t, "\\\\(emph|noun|text(bf|sl|sf|it|tt)|(u|uu)line|(s|x)out|uwave)(\\{(\\{\\})?\\})+", ""))
|
|
|
|
|
LYXERR(Debug::FIND, " further removing stale empty \\emph{}, \\textbf{} macros from: " << t);
|
|
|
|
|
while (regex_replace(t, t, "\\\\((sub)?(((sub)?section)|paragraph)|part)\\*?(\\{(\\{\\})?\\})+", ""))
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, " further removing stale empty \\emph{}, \\textbf{} macros from: " << t);
|
2018-11-03 10:15:12 +00:00
|
|
|
|
while (regex_replace(t, t, "\\\\(foreignlanguage|textcolor|item)\\{[a-z]+\\}(\\{(\\{\\})?\\})+", ""));
|
2011-08-25 19:16:14 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return t;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
docstring stringifyFromCursor(DocIterator const & cur, int len)
|
|
|
|
|
{
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Stringifying with len=" << len << " from cursor at pos: " << cur);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
if (cur.inTexted()) {
|
2012-10-23 20:58:10 +00:00
|
|
|
|
Paragraph const & par = cur.paragraph();
|
|
|
|
|
// TODO what about searching beyond/across paragraph breaks ?
|
|
|
|
|
// TODO Try adding a AS_STR_INSERTS as last arg
|
|
|
|
|
pos_type end = ( len == -1 || cur.pos() + len > int(par.size()) ) ?
|
|
|
|
|
int(par.size()) : cur.pos() + len;
|
2019-03-03 13:08:27 +00:00
|
|
|
|
// OutputParams runparams(&cur.buffer()->params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2012-10-23 20:58:10 +00:00
|
|
|
|
runparams.nice = true;
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::XeTeX;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
runparams.linelen = 10000; //lyxrc.plaintext_linelen;
|
2012-10-23 20:58:10 +00:00
|
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
|
runparams.dryrun = true;
|
2021-01-22 20:11:28 +00:00
|
|
|
|
int option = AS_STR_INSETS | AS_STR_PLAINTEXT;
|
|
|
|
|
if (ignoreFormats.getDeleted()) {
|
|
|
|
|
option |= AS_STR_SKIPDELETE;
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithoutDeleted;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithDeleted;
|
|
|
|
|
}
|
2012-10-23 20:58:10 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Stringifying with cur: "
|
|
|
|
|
<< cur << ", from pos: " << cur.pos() << ", end: " << end);
|
2019-02-27 09:17:56 +00:00
|
|
|
|
return par.asString(cur.pos(), end,
|
2021-01-22 20:11:28 +00:00
|
|
|
|
option,
|
2014-03-27 23:12:56 +00:00
|
|
|
|
&runparams);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} else if (cur.inMathed()) {
|
2012-10-23 20:58:10 +00:00
|
|
|
|
CursorSlice cs = cur.top();
|
|
|
|
|
MathData md = cs.cell();
|
|
|
|
|
MathData::const_iterator it_end =
|
|
|
|
|
(( len == -1 || cs.pos() + len > int(md.size()))
|
|
|
|
|
? md.end()
|
|
|
|
|
: md.begin() + cs.pos() + len );
|
2018-11-02 09:32:28 +00:00
|
|
|
|
MathData md2;
|
2012-10-23 20:58:10 +00:00
|
|
|
|
for (MathData::const_iterator it = md.begin() + cs.pos();
|
|
|
|
|
it != it_end; ++it)
|
2018-11-02 09:32:28 +00:00
|
|
|
|
md2.push_back(*it);
|
|
|
|
|
docstring s = asString(md2);
|
2012-10-23 20:58:10 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Stringified math: '" << s << "'");
|
|
|
|
|
return s;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Don't know how to stringify from here: " << cur);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return docstring();
|
|
|
|
|
}
|
|
|
|
|
|
2009-08-19 15:14:28 +00:00
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
|
/** Computes the LaTeX export of buf starting from cur and ending len positions
|
|
|
|
|
* after cur, if len is positive, or at the paragraph or innermost inset end
|
|
|
|
|
* if len is -1.
|
|
|
|
|
*/
|
2008-12-20 16:00:47 +00:00
|
|
|
|
docstring latexifyFromCursor(DocIterator const & cur, int len)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2021-01-19 16:52:36 +00:00
|
|
|
|
/*
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Latexifying with len=" << len << " from cursor at pos: " << cur);
|
|
|
|
|
LYXERR(Debug::FIND, " with cur.lastpost=" << cur.lastpos() << ", cur.lastrow="
|
2012-10-23 20:58:10 +00:00
|
|
|
|
<< cur.lastrow() << ", cur.lastcol=" << cur.lastcol());
|
2021-01-19 16:52:36 +00:00
|
|
|
|
*/
|
2008-12-20 16:00:47 +00:00
|
|
|
|
Buffer const & buf = *cur.buffer();
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
odocstringstream ods;
|
2016-09-04 02:02:47 +00:00
|
|
|
|
otexstream os(ods);
|
2019-03-03 13:08:27 +00:00
|
|
|
|
//OutputParams runparams(&buf.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
runparams.nice = false;
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::XeTeX;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
runparams.linelen = 8000; //lyxrc.plaintext_linelen;
|
|
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
|
runparams.dryrun = true;
|
2021-01-22 20:11:28 +00:00
|
|
|
|
if (ignoreFormats.getDeleted()) {
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithoutDeleted;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
runparams.for_searchAdv = OutputParams::SearchWithDeleted;
|
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
if (cur.inTexted()) {
|
2010-12-18 15:57:27 +00:00
|
|
|
|
// @TODO what about searching beyond/across paragraph breaks ?
|
|
|
|
|
pos_type endpos = cur.paragraph().size();
|
|
|
|
|
if (len != -1 && endpos > cur.pos() + len)
|
|
|
|
|
endpos = cur.pos() + len;
|
2011-02-10 20:02:48 +00:00
|
|
|
|
TeXOnePar(buf, *cur.innerText(), cur.pit(), os, runparams,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
string(), cur.pos(), endpos);
|
2018-10-14 18:39:13 +00:00
|
|
|
|
string s = lyx::to_utf8(ods.str());
|
2018-10-12 14:47:07 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Latexified +modified text: '" << s << "'");
|
2018-10-05 18:26:44 +00:00
|
|
|
|
return(lyx::from_utf8(s));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} else if (cur.inMathed()) {
|
|
|
|
|
// Retrieve the math environment type, and add '$' or '$[' or others (\begin{equation}) accordingly
|
|
|
|
|
for (int s = cur.depth() - 1; s >= 0; --s) {
|
2012-10-23 20:58:10 +00:00
|
|
|
|
CursorSlice const & cs = cur[s];
|
2015-10-10 19:23:52 +00:00
|
|
|
|
if (cs.asInsetMath() && cs.asInsetMath()->asHullInset()) {
|
2020-12-26 19:04:36 +00:00
|
|
|
|
TeXMathStream ws(os);
|
2012-10-23 20:58:10 +00:00
|
|
|
|
cs.asInsetMath()->asHullInset()->header_write(ws);
|
|
|
|
|
break;
|
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CursorSlice const & cs = cur.top();
|
|
|
|
|
MathData md = cs.cell();
|
2012-10-23 20:58:10 +00:00
|
|
|
|
MathData::const_iterator it_end =
|
|
|
|
|
((len == -1 || cs.pos() + len > int(md.size()))
|
|
|
|
|
? md.end()
|
|
|
|
|
: md.begin() + cs.pos() + len);
|
2018-11-02 09:32:28 +00:00
|
|
|
|
MathData md2;
|
2012-10-23 20:58:10 +00:00
|
|
|
|
for (MathData::const_iterator it = md.begin() + cs.pos();
|
|
|
|
|
it != it_end; ++it)
|
2018-11-02 09:32:28 +00:00
|
|
|
|
md2.push_back(*it);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2018-11-02 09:32:28 +00:00
|
|
|
|
ods << asString(md2);
|
2008-11-17 11:46:07 +00:00
|
|
|
|
// Retrieve the math environment type, and add '$' or '$]'
|
|
|
|
|
// or others (\end{equation}) accordingly
|
2008-11-15 23:30:27 +00:00
|
|
|
|
for (int s = cur.depth() - 1; s >= 0; --s) {
|
2018-02-24 05:32:14 +00:00
|
|
|
|
CursorSlice const & cs2 = cur[s];
|
|
|
|
|
InsetMath * inset = cs2.asInsetMath();
|
2008-11-17 11:46:07 +00:00
|
|
|
|
if (inset && inset->asHullInset()) {
|
2020-12-26 19:04:36 +00:00
|
|
|
|
TeXMathStream ws(os);
|
2008-11-17 11:46:07 +00:00
|
|
|
|
inset->asHullInset()->footer_write(ws);
|
|
|
|
|
break;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Latexified math: '" << lyx::to_utf8(ods.str()) << "'");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
} else {
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Don't know how to stringify from here: " << cur);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
return ods.str();
|
|
|
|
|
}
|
|
|
|
|
|
2021-01-07 16:04:27 +00:00
|
|
|
|
#if defined(ResultsDebug)
|
2021-01-04 06:16:59 +00:00
|
|
|
|
// Debugging output
|
2021-01-17 12:30:43 +00:00
|
|
|
|
static void displayMResult(MatchResult &mres, string from, DocIterator & cur)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2021-01-17 12:30:43 +00:00
|
|
|
|
LYXERR0( "from:\t\t\t" << from);
|
|
|
|
|
string status;
|
|
|
|
|
if (mres.pos_len > 0) {
|
|
|
|
|
// Set in finalize
|
|
|
|
|
status = "FINALSEARCH";
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (mres.match_len > 0) {
|
|
|
|
|
if ((mres.match_prefix == 0) && (mres.pos == mres.leadsize))
|
|
|
|
|
status = "Good Match";
|
|
|
|
|
else
|
|
|
|
|
status = "Matched in";
|
2021-01-04 15:57:56 +00:00
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
else
|
|
|
|
|
status = "MissedSearch";
|
2021-01-04 15:57:56 +00:00
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
|
|
|
|
|
LYXERR0( status << "(" << cur.pos() << " ... " << mres.searched_size + cur.pos() << ") cur.lastpos(" << cur.lastpos() << ")");
|
|
|
|
|
if ((mres.leadsize > 0) || (mres.match_len > 0) || (mres.match2end > 0))
|
|
|
|
|
LYXERR0( "leadsize(" << mres.leadsize << ") match_len(" << mres.match_len << ") match2end(" << mres.match2end << ")");
|
|
|
|
|
if ((mres.pos > 0) || (mres.match_prefix > 0))
|
|
|
|
|
LYXERR0( "pos(" << mres.pos << ") match_prefix(" << mres.match_prefix << ")");
|
|
|
|
|
for (size_t i = 0; i < mres.result.size(); i++)
|
|
|
|
|
LYXERR0( "Match " << i << " = \"" << mres.result[i] << "\"");
|
2021-01-04 15:57:56 +00:00
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
#define displayMres(s, txt, cur) displayMResult(s, txt, cur);
|
|
|
|
|
#else
|
|
|
|
|
#define displayMres(s, txt, cur)
|
|
|
|
|
#endif
|
2021-01-04 15:57:56 +00:00
|
|
|
|
|
|
|
|
|
/** Finalize an advanced find operation, advancing the cursor to the innermost
|
|
|
|
|
** position that matches, plus computing the length of the matching text to
|
|
|
|
|
** be selected
|
2021-01-10 16:17:37 +00:00
|
|
|
|
** Return the cur.pos() difference between start and end of found match
|
2021-01-04 15:57:56 +00:00
|
|
|
|
**/
|
2021-01-21 12:39:25 +00:00
|
|
|
|
MatchResult findAdvFinalize(DocIterator & cur, MatchStringAdv const & match, MatchResult const & expected = MatchResult(-1))
|
2021-01-04 15:57:56 +00:00
|
|
|
|
{
|
|
|
|
|
// Search the foremost position that matches (avoids find of entire math
|
|
|
|
|
// inset when match at start of it)
|
|
|
|
|
DocIterator old_cur(cur.buffer());
|
|
|
|
|
MatchResult mres;
|
2021-01-08 11:05:51 +00:00
|
|
|
|
static MatchResult fail = MatchResult();
|
2021-01-21 12:39:25 +00:00
|
|
|
|
MatchResult max_match;
|
2021-01-06 01:12:58 +00:00
|
|
|
|
// If (prefix_len > 0) means that forwarding 1 position will remove the complete entry
|
|
|
|
|
// Happens with e.g. hyperlinks
|
|
|
|
|
// either one sees "http://www.bla.bla" or nothing
|
|
|
|
|
// so the search for "www" gives prefix_len = 7 (== sizeof("http://")
|
|
|
|
|
// and although we search for only 3 chars, we find the whole hyperlink inset
|
2021-01-08 11:05:51 +00:00
|
|
|
|
bool at_begin = (expected.match_prefix == 0);
|
2021-01-21 12:39:25 +00:00
|
|
|
|
if (!match.opt.forward && match.opt.ignoreformat) {
|
|
|
|
|
if (expected.pos > 0)
|
|
|
|
|
return fail;
|
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
LASSERT(at_begin, /**/);
|
|
|
|
|
if (expected.match_len > 0 && at_begin) {
|
|
|
|
|
// Search for deepest match
|
2021-01-15 16:09:18 +00:00
|
|
|
|
old_cur = cur;
|
|
|
|
|
max_match = expected;
|
|
|
|
|
do {
|
|
|
|
|
size_t d = cur.depth();
|
|
|
|
|
cur.forwardPos();
|
|
|
|
|
if (!cur)
|
|
|
|
|
break;
|
|
|
|
|
if (cur.depth() < d)
|
|
|
|
|
break;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
if (cur.depth() == d)
|
2021-01-15 16:09:18 +00:00
|
|
|
|
break;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
size_t lastd = d;
|
|
|
|
|
while (cur && cur.depth() > lastd) {
|
|
|
|
|
lastd = cur.depth();
|
|
|
|
|
mres = match(cur, -1, at_begin);
|
|
|
|
|
displayMres(mres, "Checking innermost", cur);
|
|
|
|
|
if (mres.match_len > 0)
|
|
|
|
|
break;
|
|
|
|
|
// maybe deeper?
|
|
|
|
|
cur.forwardPos();
|
|
|
|
|
}
|
2021-01-08 11:05:51 +00:00
|
|
|
|
if (mres.match_len < expected.match_len)
|
2021-01-15 16:09:18 +00:00
|
|
|
|
break;
|
|
|
|
|
max_match = mres;
|
|
|
|
|
old_cur = cur;;
|
|
|
|
|
} while(1);
|
|
|
|
|
cur = old_cur;
|
2021-01-04 15:57:56 +00:00
|
|
|
|
}
|
2021-01-15 16:09:18 +00:00
|
|
|
|
else {
|
|
|
|
|
// (expected.match_len <= 0)
|
2021-01-04 15:57:56 +00:00
|
|
|
|
mres = match(cur); /* match valid only if not searching whole words */
|
2021-01-17 12:30:43 +00:00
|
|
|
|
displayMres(mres, "Start with negative match", cur);
|
2021-01-08 11:05:51 +00:00
|
|
|
|
max_match = mres;
|
2021-01-04 15:57:56 +00:00
|
|
|
|
}
|
2021-01-08 11:05:51 +00:00
|
|
|
|
if (max_match.match_len <= 0) return fail;
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Ok");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
// Compute the match length
|
2021-01-10 16:17:37 +00:00
|
|
|
|
int len = 1;
|
2018-11-26 11:37:18 +00:00
|
|
|
|
if (cur.pos() + len > cur.lastpos())
|
2021-01-08 11:05:51 +00:00
|
|
|
|
return fail;
|
2021-01-18 16:06:05 +00:00
|
|
|
|
|
|
|
|
|
LASSERT(match.use_regexp, /**/);
|
|
|
|
|
{
|
2021-01-04 06:16:59 +00:00
|
|
|
|
int minl = 1;
|
|
|
|
|
int maxl = cur.lastpos() - cur.pos();
|
|
|
|
|
// Greedy behaviour while matching regexps
|
|
|
|
|
while (maxl > minl) {
|
2021-01-10 16:17:37 +00:00
|
|
|
|
MatchResult mres2;
|
|
|
|
|
mres2 = match(cur, len, at_begin);
|
2021-01-17 12:30:43 +00:00
|
|
|
|
displayMres(mres2, "Finalize loop", cur);
|
|
|
|
|
int actual_match_len = mres2.match_len;
|
|
|
|
|
if (actual_match_len >= max_match.match_len) {
|
|
|
|
|
// actual_match_len > max_match _can_ happen,
|
2021-01-04 06:16:59 +00:00
|
|
|
|
// if the search area splits
|
|
|
|
|
// some following word so that the regex
|
|
|
|
|
// (e.g. 'r.*r\b' matches 'r' from the middle of the
|
|
|
|
|
// splitted word)
|
|
|
|
|
// This means, the len value is too big
|
2021-01-17 12:30:43 +00:00
|
|
|
|
actual_match_len = max_match.match_len;
|
|
|
|
|
max_match = mres2;
|
|
|
|
|
max_match.match_len = actual_match_len;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
maxl = len;
|
|
|
|
|
if (maxl - minl < 4)
|
|
|
|
|
len = (int)((maxl + minl)/2);
|
|
|
|
|
else
|
|
|
|
|
len = (int)(minl + (maxl - minl + 3)/4);
|
|
|
|
|
}
|
|
|
|
|
else {
|
2021-01-17 12:30:43 +00:00
|
|
|
|
// (actual_match_len < max_match.match_len)
|
2021-01-04 06:16:59 +00:00
|
|
|
|
minl = len + 1;
|
|
|
|
|
len = (int)((maxl + minl)/2);
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
len = minl;
|
2019-02-11 12:06:02 +00:00
|
|
|
|
old_cur = cur;
|
|
|
|
|
// Search for real start of matched characters
|
|
|
|
|
while (len > 1) {
|
2021-01-08 11:05:51 +00:00
|
|
|
|
MatchResult actual_match;
|
2019-02-11 12:06:02 +00:00
|
|
|
|
do {
|
|
|
|
|
cur.forwardPos();
|
|
|
|
|
} while (cur.depth() > old_cur.depth()); /* Skip inner insets */
|
|
|
|
|
if (cur.depth() < old_cur.depth()) {
|
|
|
|
|
// Outer inset?
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "cur.depth() < old_cur.depth(), this should never happen");
|
2019-02-11 12:06:02 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (cur.pos() != old_cur.pos()) {
|
|
|
|
|
// OK, forwarded 1 pos in actual inset
|
2021-01-08 11:05:51 +00:00
|
|
|
|
actual_match = match(cur, len-1, at_begin);
|
|
|
|
|
if (actual_match.match_len == max_match.match_len) {
|
2019-02-11 12:06:02 +00:00
|
|
|
|
// Ha, got it! The shorter selection has the same match length
|
|
|
|
|
len--;
|
|
|
|
|
old_cur = cur;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
max_match = actual_match;
|
2019-02-11 12:06:02 +00:00
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// OK, the shorter selection matches less chars, revert to previous value
|
|
|
|
|
cur = old_cur;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "cur.pos() == old_cur.pos(), this should never happen");
|
2021-01-08 11:05:51 +00:00
|
|
|
|
actual_match = match(cur, len, at_begin);
|
|
|
|
|
if (actual_match.match_len == max_match.match_len) {
|
2019-02-11 12:06:02 +00:00
|
|
|
|
old_cur = cur;
|
2021-01-10 16:17:37 +00:00
|
|
|
|
max_match = actual_match;
|
|
|
|
|
}
|
2019-02-11 12:06:02 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-10 16:17:37 +00:00
|
|
|
|
if (len == 0)
|
|
|
|
|
return fail;
|
|
|
|
|
else {
|
|
|
|
|
max_match.pos_len = len;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
displayMres(max_match, "SEARCH RESULT", cur)
|
2021-01-10 16:17:37 +00:00
|
|
|
|
return max_match;
|
|
|
|
|
}
|
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Finds forward
|
2021-01-08 11:05:51 +00:00
|
|
|
|
int findForwardAdv(DocIterator & cur, MatchStringAdv & match)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
if (!cur)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return 0;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
bool repeat = false;
|
2021-01-20 11:24:33 +00:00
|
|
|
|
DocIterator orig_cur; // to be used if repeat not successful
|
|
|
|
|
MatchResult orig_mres;
|
2012-03-06 23:21:12 +00:00
|
|
|
|
while (!theApp()->longOperationCancelled() && cur) {
|
2021-01-12 18:33:29 +00:00
|
|
|
|
//(void) findAdvForwardInnermost(cur);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
LYXERR(Debug::FIND, "findForwardAdv() cur: " << cur);
|
2019-02-26 22:00:31 +00:00
|
|
|
|
MatchResult mres = match(cur, -1, false);
|
2021-01-17 12:30:43 +00:00
|
|
|
|
string msg = "Starting";
|
|
|
|
|
if (repeat)
|
|
|
|
|
msg = "Repeated";
|
|
|
|
|
displayMres(mres, msg + " findForwardAdv", cur)
|
2019-02-26 22:00:31 +00:00
|
|
|
|
int match_len = mres.match_len;
|
2019-02-27 09:17:56 +00:00
|
|
|
|
if ((mres.pos > 100000) || (mres.match2end > 100000) || (match_len > 100000)) {
|
2020-05-29 06:44:56 +00:00
|
|
|
|
LYXERR(Debug::INFO, "BIG LENGTHS: " << mres.pos << ", " << match_len << ", " << mres.match2end);
|
2019-02-26 22:00:31 +00:00
|
|
|
|
match_len = 0;
|
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
if (match_len <= 0) {
|
|
|
|
|
// This should exit nested insets, if any, or otherwise undefine the currsor.
|
|
|
|
|
cur.pos() = cur.lastpos();
|
|
|
|
|
LYXERR(Debug::FIND, "Advancing pos: cur=" << cur);
|
|
|
|
|
cur.forwardPos();
|
|
|
|
|
}
|
|
|
|
|
else { // match_len > 0
|
2019-02-26 22:00:31 +00:00
|
|
|
|
// Try to find the begin of searched string
|
2021-01-01 20:50:36 +00:00
|
|
|
|
int increment;
|
2021-01-22 16:03:54 +00:00
|
|
|
|
int firstInvalid = cur.lastpos() - cur.pos();
|
2021-01-17 12:30:43 +00:00
|
|
|
|
{
|
|
|
|
|
int incrmatch = (mres.match_prefix + mres.pos - mres.leadsize + 1)*3/4;
|
2021-01-22 16:03:54 +00:00
|
|
|
|
int incrcur = (firstInvalid + 1 )*3/4;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
if (incrcur < incrmatch)
|
|
|
|
|
increment = incrcur;
|
|
|
|
|
else
|
|
|
|
|
increment = incrmatch;
|
|
|
|
|
if (increment < 1)
|
|
|
|
|
increment = 1;
|
|
|
|
|
}
|
2021-01-01 20:50:36 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Set increment to " << increment);
|
2021-01-02 17:37:14 +00:00
|
|
|
|
while (increment > 0) {
|
2019-02-26 22:00:31 +00:00
|
|
|
|
DocIterator old_cur = cur;
|
2021-01-22 16:03:54 +00:00
|
|
|
|
if (cur.pos() + increment >= cur.lastpos()) {
|
2019-02-26 22:00:31 +00:00
|
|
|
|
increment /= 2;
|
2021-01-22 16:03:54 +00:00
|
|
|
|
continue;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
}
|
2021-01-22 16:03:54 +00:00
|
|
|
|
cur.pos() = cur.pos() + increment;
|
|
|
|
|
MatchResult mres2 = match(cur, -1, false);
|
|
|
|
|
displayMres(mres2, "findForwardAdv loop", cur)
|
|
|
|
|
switch (interpretMatch(mres, mres2)) {
|
2021-01-04 06:16:59 +00:00
|
|
|
|
case MatchResult::newIsTooFar:
|
2021-01-22 16:03:54 +00:00
|
|
|
|
// behind the expected match
|
|
|
|
|
firstInvalid = increment;
|
|
|
|
|
cur = old_cur;
|
|
|
|
|
increment /= 2;
|
|
|
|
|
break;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
case MatchResult::newIsBetter:
|
2021-01-22 16:03:54 +00:00
|
|
|
|
// not reached yet, but cur.pos()+increment is bettert
|
|
|
|
|
mres = mres2;
|
|
|
|
|
firstInvalid -= increment;
|
|
|
|
|
if (increment > firstInvalid*3/4)
|
|
|
|
|
increment = firstInvalid*3/4;
|
|
|
|
|
if ((mres2.pos == mres2.leadsize) && (increment >= mres2.match_prefix)) {
|
|
|
|
|
if (increment >= mres2.match_prefix)
|
|
|
|
|
increment = (mres2.match_prefix+1)*3/4;
|
|
|
|
|
}
|
|
|
|
|
break;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
default:
|
2021-01-22 16:03:54 +00:00
|
|
|
|
// Todo@
|
|
|
|
|
// Handle not like MatchResult::newIsTooFar
|
|
|
|
|
LYXERR0( "Probably too far: Increment = " << increment << " match_prefix = " << mres.match_prefix);
|
|
|
|
|
firstInvalid--;
|
|
|
|
|
increment = increment*3/4;
|
|
|
|
|
cur = old_cur;
|
|
|
|
|
break;
|
2019-02-26 22:00:31 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-20 11:24:33 +00:00
|
|
|
|
if (mres.match_len > 0) {
|
|
|
|
|
if (mres.match_prefix + mres.pos - mres.leadsize > 0) {
|
2021-01-22 16:03:54 +00:00
|
|
|
|
// The match seems to indicate some deeper level
|
2021-01-20 11:24:33 +00:00
|
|
|
|
repeat = true;
|
|
|
|
|
orig_cur = cur;
|
|
|
|
|
orig_mres = mres;
|
|
|
|
|
cur.forwardPos();
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (repeat) {
|
2021-01-22 16:03:54 +00:00
|
|
|
|
// should never be reached.
|
2021-01-20 11:24:33 +00:00
|
|
|
|
cur = orig_cur;
|
|
|
|
|
mres = orig_mres;
|
2021-01-17 12:30:43 +00:00
|
|
|
|
}
|
2021-01-04 06:16:59 +00:00
|
|
|
|
// LYXERR0("Leaving first loop");
|
2021-01-17 12:30:43 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Finalizing 1");
|
|
|
|
|
MatchResult found_match = findAdvFinalize(cur, match, mres);
|
|
|
|
|
if (found_match.match_len > 0) {
|
2021-01-22 16:03:54 +00:00
|
|
|
|
LASSERT(found_match.pos_len > 0, /**/);
|
|
|
|
|
match.FillResults(found_match);
|
|
|
|
|
return found_match.pos_len;
|
2021-01-04 06:16:59 +00:00
|
|
|
|
}
|
2021-01-17 12:30:43 +00:00
|
|
|
|
else {
|
2021-01-22 16:03:54 +00:00
|
|
|
|
// try next possible match
|
|
|
|
|
cur.forwardPos();
|
|
|
|
|
repeat = false;
|
|
|
|
|
continue;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
}
|
2008-12-20 16:00:47 +00:00
|
|
|
|
}
|
2010-01-04 12:29:38 +00:00
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2009-12-30 21:50:55 +00:00
|
|
|
|
|
2009-08-17 08:08:21 +00:00
|
|
|
|
/// Find the most backward consecutive match within same paragraph while searching backwards.
|
2021-01-21 12:39:25 +00:00
|
|
|
|
MatchResult findMostBackwards(DocIterator & cur, MatchStringAdv const & match, MatchResult &expected)
|
2010-01-04 12:29:38 +00:00
|
|
|
|
{
|
2021-01-21 12:39:25 +00:00
|
|
|
|
DocIterator cur_begin = cur;
|
|
|
|
|
cur_begin.pos() = 0;
|
2010-03-20 13:59:46 +00:00
|
|
|
|
DocIterator tmp_cur = cur;
|
2021-01-21 12:39:25 +00:00
|
|
|
|
MatchResult mr = findAdvFinalize(tmp_cur, match, expected);
|
2010-03-20 13:59:46 +00:00
|
|
|
|
Inset & inset = cur.inset();
|
|
|
|
|
for (; cur != cur_begin; cur.backwardPos()) {
|
|
|
|
|
LYXERR(Debug::FIND, "findMostBackwards(): cur=" << cur);
|
|
|
|
|
DocIterator new_cur = cur;
|
|
|
|
|
new_cur.backwardPos();
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (new_cur == cur || &new_cur.inset() != &inset || !match(new_cur).match_len)
|
2010-03-20 13:59:46 +00:00
|
|
|
|
break;
|
2021-01-21 12:39:25 +00:00
|
|
|
|
MatchResult new_mr = findAdvFinalize(new_cur, match, expected);
|
2021-01-08 11:05:51 +00:00
|
|
|
|
if (new_mr.match_len == mr.match_len)
|
2010-03-20 13:59:46 +00:00
|
|
|
|
break;
|
2021-01-08 11:05:51 +00:00
|
|
|
|
mr = new_mr;
|
2009-08-17 08:08:21 +00:00
|
|
|
|
}
|
2010-03-20 13:59:46 +00:00
|
|
|
|
LYXERR(Debug::FIND, "findMostBackwards(): exiting with cur=" << cur);
|
2021-01-08 11:05:51 +00:00
|
|
|
|
return mr;
|
2009-08-17 08:08:21 +00:00
|
|
|
|
}
|
2008-12-20 16:00:47 +00:00
|
|
|
|
|
2010-01-04 12:29:38 +00:00
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/// Finds backwards
|
2012-10-27 13:45:27 +00:00
|
|
|
|
int findBackwardsAdv(DocIterator & cur, MatchStringAdv & match)
|
|
|
|
|
{
|
2009-08-17 08:08:21 +00:00
|
|
|
|
if (! cur)
|
|
|
|
|
return 0;
|
2010-01-04 12:29:38 +00:00
|
|
|
|
// Backup of original position
|
2009-08-17 08:08:21 +00:00
|
|
|
|
DocIterator cur_begin = doc_iterator_begin(cur.buffer());
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (cur == cur_begin)
|
|
|
|
|
return 0;
|
2010-03-20 13:59:46 +00:00
|
|
|
|
cur.backwardPos();
|
|
|
|
|
DocIterator cur_orig(cur);
|
2010-01-04 12:29:38 +00:00
|
|
|
|
bool pit_changed = false;
|
2008-12-20 16:00:47 +00:00
|
|
|
|
do {
|
2009-08-17 08:08:21 +00:00
|
|
|
|
cur.pos() = 0;
|
2021-01-21 12:39:25 +00:00
|
|
|
|
MatchResult found_match = match(cur, -1, false);
|
2010-01-04 12:29:38 +00:00
|
|
|
|
|
2021-01-21 12:39:25 +00:00
|
|
|
|
if (found_match.match_len > 0) {
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (pit_changed)
|
|
|
|
|
cur.pos() = cur.lastpos();
|
|
|
|
|
else
|
|
|
|
|
cur.pos() = cur_orig.pos();
|
|
|
|
|
LYXERR(Debug::FIND, "findBackAdv2: cur: " << cur);
|
|
|
|
|
DocIterator cur_prev_iter;
|
2010-03-20 13:59:46 +00:00
|
|
|
|
do {
|
2021-01-21 12:39:25 +00:00
|
|
|
|
found_match = match(cur);
|
2015-05-17 15:27:12 +00:00
|
|
|
|
LYXERR(Debug::FIND, "findBackAdv3: found_match="
|
2021-01-21 12:39:25 +00:00
|
|
|
|
<< (found_match.match_len > 0) << ", cur: " << cur);
|
|
|
|
|
if (found_match.match_len > 0) {
|
|
|
|
|
MatchResult found_mr = findMostBackwards(cur, match, found_match);
|
|
|
|
|
if (found_mr.pos_len > 0) {
|
|
|
|
|
match.FillResults(found_mr);
|
|
|
|
|
return found_mr.pos_len;
|
|
|
|
|
}
|
2021-01-08 11:05:51 +00:00
|
|
|
|
}
|
2010-05-02 22:33:36 +00:00
|
|
|
|
|
2010-03-20 13:59:46 +00:00
|
|
|
|
// Stop if begin of document reached
|
|
|
|
|
if (cur == cur_begin)
|
2009-08-17 08:08:21 +00:00
|
|
|
|
break;
|
2009-12-26 22:10:14 +00:00
|
|
|
|
cur_prev_iter = cur;
|
2009-08-17 08:08:21 +00:00
|
|
|
|
cur.backwardPos();
|
2010-03-20 13:59:46 +00:00
|
|
|
|
} while (true);
|
2008-12-20 16:00:47 +00:00
|
|
|
|
}
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (cur == cur_begin)
|
2009-12-30 22:21:23 +00:00
|
|
|
|
break;
|
2010-01-04 12:29:38 +00:00
|
|
|
|
if (cur.pit() > 0)
|
|
|
|
|
--cur.pit();
|
|
|
|
|
else
|
|
|
|
|
cur.backwardPos();
|
|
|
|
|
pit_changed = true;
|
2012-03-06 23:21:12 +00:00
|
|
|
|
} while (!theApp()->longOperationCancelled());
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2009-12-30 18:40:18 +00:00
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
|
2009-01-14 15:34:56 +00:00
|
|
|
|
docstring stringifyFromForSearch(FindAndReplaceOptions const & opt,
|
2012-10-23 20:58:10 +00:00
|
|
|
|
DocIterator const & cur, int len)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2018-10-05 18:26:44 +00:00
|
|
|
|
if (cur.pos() < 0 || cur.pos() > cur.lastpos())
|
|
|
|
|
return docstring();
|
2008-11-17 11:46:07 +00:00
|
|
|
|
if (!opt.ignoreformat)
|
2008-12-20 16:00:47 +00:00
|
|
|
|
return latexifyFromCursor(cur, len);
|
2019-02-27 09:33:25 +00:00
|
|
|
|
else
|
|
|
|
|
return stringifyFromCursor(cur, len);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2012-10-23 20:58:10 +00:00
|
|
|
|
FindAndReplaceOptions::FindAndReplaceOptions(
|
2019-10-26 22:06:54 +00:00
|
|
|
|
docstring const & _find_buf_name, bool _casesensitive,
|
|
|
|
|
bool _matchword, bool _forward, bool _expandmacros, bool _ignoreformat,
|
|
|
|
|
docstring const & _repl_buf_name, bool _keep_case,
|
|
|
|
|
SearchScope _scope, SearchRestriction _restr, bool _replace_all)
|
|
|
|
|
: find_buf_name(_find_buf_name), casesensitive(_casesensitive), matchword(_matchword),
|
|
|
|
|
forward(_forward), expandmacros(_expandmacros), ignoreformat(_ignoreformat),
|
|
|
|
|
repl_buf_name(_repl_buf_name), keep_case(_keep_case), scope(_scope), restr(_restr), replace_all(_replace_all)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
2009-08-19 22:55:38 +00:00
|
|
|
|
|
2010-10-13 18:30:37 +00:00
|
|
|
|
namespace {
|
2009-08-19 22:55:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** Check if 'len' letters following cursor are all non-lowercase */
|
2013-07-22 09:39:11 +00:00
|
|
|
|
static bool allNonLowercase(Cursor const & cur, int len)
|
2012-10-27 13:45:27 +00:00
|
|
|
|
{
|
2013-07-22 09:39:11 +00:00
|
|
|
|
pos_type beg_pos = cur.selectionBegin().pos();
|
|
|
|
|
pos_type end_pos = cur.selectionBegin().pos() + len;
|
|
|
|
|
if (len > cur.lastpos() + 1 - beg_pos) {
|
|
|
|
|
LYXERR(Debug::FIND, "This should not happen, more debug needed");
|
|
|
|
|
len = cur.lastpos() + 1 - beg_pos;
|
2015-10-09 06:14:18 +00:00
|
|
|
|
end_pos = beg_pos + len;
|
2013-07-22 09:39:11 +00:00
|
|
|
|
}
|
|
|
|
|
for (pos_type pos = beg_pos; pos != end_pos; ++pos)
|
2009-08-19 22:55:38 +00:00
|
|
|
|
if (isLowerCase(cur.paragraph().getChar(pos)))
|
|
|
|
|
return false;
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** Check if first letter is upper case and second one is lower case */
|
2013-07-22 09:39:11 +00:00
|
|
|
|
static bool firstUppercase(Cursor const & cur)
|
2012-10-27 13:45:27 +00:00
|
|
|
|
{
|
2009-08-19 22:55:38 +00:00
|
|
|
|
char_type ch1, ch2;
|
2013-07-22 09:39:11 +00:00
|
|
|
|
pos_type pos = cur.selectionBegin().pos();
|
|
|
|
|
if (pos >= cur.lastpos() - 1) {
|
2009-08-19 22:55:38 +00:00
|
|
|
|
LYXERR(Debug::FIND, "No upper-case at cur: " << cur);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2013-07-22 09:39:11 +00:00
|
|
|
|
ch1 = cur.paragraph().getChar(pos);
|
|
|
|
|
ch2 = cur.paragraph().getChar(pos + 1);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
bool result = isUpperCase(ch1) && isLowerCase(ch2);
|
|
|
|
|
LYXERR(Debug::FIND, "firstUppercase(): "
|
2015-05-17 15:27:12 +00:00
|
|
|
|
<< "ch1=" << ch1 << "(" << char(ch1) << "), ch2="
|
2009-09-04 13:06:43 +00:00
|
|
|
|
<< ch2 << "(" << char(ch2) << ")"
|
2009-08-19 22:55:38 +00:00
|
|
|
|
<< ", result=" << result << ", cur=" << cur);
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** Make first letter of supplied buffer upper-case, and the rest lower-case.
|
|
|
|
|
**
|
|
|
|
|
** \fixme What to do with possible further paragraphs in replace buffer ?
|
|
|
|
|
**/
|
2012-10-27 13:45:27 +00:00
|
|
|
|
static void changeFirstCase(Buffer & buffer, TextCase first_case, TextCase others_case)
|
|
|
|
|
{
|
2009-08-19 22:55:38 +00:00
|
|
|
|
ParagraphList::iterator pit = buffer.paragraphs().begin();
|
2020-10-05 10:38:09 +00:00
|
|
|
|
LASSERT(!pit->empty(), /**/);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
pos_type right = pos_type(1);
|
|
|
|
|
pit->changeCase(buffer.params(), pos_type(0), right, first_case);
|
2013-07-22 09:39:11 +00:00
|
|
|
|
right = pit->size();
|
|
|
|
|
pit->changeCase(buffer.params(), pos_type(1), right, others_case);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
}
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace
|
2009-08-19 22:55:38 +00:00
|
|
|
|
|
2021-01-09 16:01:50 +00:00
|
|
|
|
static bool replaceMatches(string &t, int maxmatchnum, vector <string> const & replacements)
|
|
|
|
|
{
|
|
|
|
|
// Should replace the string "$" + std::to_string(matchnum) with replacement
|
|
|
|
|
// if the char '$' is not prefixed with odd number of char '\\'
|
|
|
|
|
static regex const rematch("(\\\\)*(\\$\\$([0-9]))");
|
|
|
|
|
string s;
|
|
|
|
|
size_t lastpos = 0;
|
|
|
|
|
smatch sub;
|
|
|
|
|
for (sregex_iterator it(t.begin(), t.end(), rematch), end; it != end; ++it) {
|
|
|
|
|
sub = *it;
|
|
|
|
|
if ((sub.position(2) - sub.position(0)) % 2 == 1)
|
|
|
|
|
continue;
|
|
|
|
|
int num = stoi(sub.str(3), nullptr, 10);
|
|
|
|
|
if (num >= maxmatchnum)
|
|
|
|
|
continue;
|
|
|
|
|
if (lastpos < (size_t) sub.position(2))
|
|
|
|
|
s += t.substr(lastpos, sub.position(2) - lastpos);
|
|
|
|
|
s += replacements[num];
|
|
|
|
|
lastpos = sub.position(2) + sub.length(2);
|
|
|
|
|
}
|
|
|
|
|
if (lastpos == 0)
|
|
|
|
|
return false;
|
|
|
|
|
else if (lastpos < t.length())
|
|
|
|
|
s += t.substr(lastpos, t.length() - lastpos);
|
|
|
|
|
t = s;
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-30 10:11:24 +00:00
|
|
|
|
///
|
2019-03-13 13:06:18 +00:00
|
|
|
|
static int findAdvReplace(BufferView * bv, FindAndReplaceOptions const & opt, MatchStringAdv & matchAdv)
|
2010-01-30 10:11:24 +00:00
|
|
|
|
{
|
|
|
|
|
Cursor & cur = bv->cursor();
|
2020-10-05 10:38:09 +00:00
|
|
|
|
if (opt.repl_buf_name.empty()
|
2017-05-03 15:32:31 +00:00
|
|
|
|
|| theBufferList().getBuffer(FileName(to_utf8(opt.repl_buf_name)), true) == 0
|
|
|
|
|
|| theBufferList().getBuffer(FileName(to_utf8(opt.find_buf_name)), true) == 0)
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
2010-01-30 10:11:24 +00:00
|
|
|
|
DocIterator sel_beg = cur.selectionBegin();
|
|
|
|
|
DocIterator sel_end = cur.selectionEnd();
|
2010-02-22 21:44:59 +00:00
|
|
|
|
if (&sel_beg.inset() != &sel_end.inset()
|
2014-03-31 16:33:53 +00:00
|
|
|
|
|| sel_beg.pit() != sel_end.pit()
|
|
|
|
|
|| sel_beg.idx() != sel_end.idx())
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
int sel_len = sel_end.pos() - sel_beg.pos();
|
2010-02-22 21:44:59 +00:00
|
|
|
|
LYXERR(Debug::FIND, "sel_beg: " << sel_beg << ", sel_end: " << sel_end
|
|
|
|
|
<< ", sel_len: " << sel_len << endl);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
if (sel_len == 0)
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
|
|
|
|
LASSERT(sel_len > 0, return 0);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
|
2019-02-26 22:00:31 +00:00
|
|
|
|
if (!matchAdv(sel_beg, sel_len).match_len)
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 0;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
// Build a copy of the replace buffer, adapted to the KeepCase option
|
2020-10-09 15:50:24 +00:00
|
|
|
|
Buffer const & repl_buffer_orig = *theBufferList().getBuffer(FileName(to_utf8(opt.repl_buf_name)), true);
|
2011-02-07 20:36:40 +00:00
|
|
|
|
ostringstream oss;
|
|
|
|
|
repl_buffer_orig.write(oss);
|
|
|
|
|
string lyx = oss.str();
|
2021-04-11 16:06:11 +00:00
|
|
|
|
if (matchAdv.valid_matches > 0)
|
|
|
|
|
replaceMatches(lyx, matchAdv.valid_matches, matchAdv.matches);
|
|
|
|
|
Buffer repl_buffer(string(), false);
|
|
|
|
|
repl_buffer.setInternal(true);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
repl_buffer.setUnnamed(true);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
LASSERT(repl_buffer.readString(lyx), return 0);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
if (opt.keep_case && sel_len >= 2) {
|
2013-07-22 09:39:11 +00:00
|
|
|
|
LYXERR(Debug::FIND, "keep_case true: cur.pos()=" << cur.pos() << ", sel_len=" << sel_len);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
if (cur.inTexted()) {
|
|
|
|
|
if (firstUppercase(cur))
|
|
|
|
|
changeFirstCase(repl_buffer, text_uppercase, text_lowercase);
|
|
|
|
|
else if (allNonLowercase(cur, sel_len))
|
|
|
|
|
changeFirstCase(repl_buffer, text_uppercase, text_uppercase);
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-07-22 20:18:50 +00:00
|
|
|
|
cap::cutSelection(cur, false);
|
2011-10-18 18:57:42 +00:00
|
|
|
|
if (cur.inTexted()) {
|
2010-12-30 17:59:59 +00:00
|
|
|
|
repl_buffer.changeLanguage(
|
|
|
|
|
repl_buffer.language(),
|
|
|
|
|
cur.getFont().language());
|
2010-01-30 10:11:24 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Replacing by pasteParagraphList()ing repl_buffer");
|
2010-12-29 19:59:41 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Before pasteParagraphList() cur=" << cur << endl);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
cap::pasteParagraphList(cur, repl_buffer.paragraphs(),
|
|
|
|
|
repl_buffer.params().documentClassPtr(),
|
2021-02-09 15:54:21 +00:00
|
|
|
|
repl_buffer.params().authors(),
|
2010-01-30 10:11:24 +00:00
|
|
|
|
bv->buffer().errorList("Paste"));
|
2010-12-29 19:59:41 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After pasteParagraphList() cur=" << cur << endl);
|
2011-01-26 23:54:12 +00:00
|
|
|
|
sel_len = repl_buffer.paragraphs().begin()->size();
|
2011-10-18 18:57:42 +00:00
|
|
|
|
} else if (cur.inMathed()) {
|
2010-01-30 10:11:24 +00:00
|
|
|
|
odocstringstream ods;
|
2016-09-04 02:02:47 +00:00
|
|
|
|
otexstream os(ods);
|
2019-03-03 13:08:27 +00:00
|
|
|
|
// OutputParams runparams(&repl_buffer.params().encoding());
|
|
|
|
|
OutputParams runparams(encodings.fromLyXName("utf8"));
|
2010-01-30 10:11:24 +00:00
|
|
|
|
runparams.nice = false;
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::XeTeX;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
runparams.linelen = 8000; //lyxrc.plaintext_linelen;
|
|
|
|
|
runparams.dryrun = true;
|
2011-02-10 20:02:48 +00:00
|
|
|
|
TeXOnePar(repl_buffer, repl_buffer.text(), 0, os, runparams);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
//repl_buffer.getSourceCode(ods, 0, repl_buffer.paragraphs().size(), false);
|
|
|
|
|
docstring repl_latex = ods.str();
|
|
|
|
|
LYXERR(Debug::FIND, "Latexified replace_buffer: '" << repl_latex << "'");
|
|
|
|
|
string s;
|
2017-03-31 09:38:30 +00:00
|
|
|
|
(void)regex_replace(to_utf8(repl_latex), s, "\\$(.*)\\$", "$1");
|
|
|
|
|
(void)regex_replace(s, s, "\\\\\\[(.*)\\\\\\]", "$1");
|
2010-01-30 10:11:24 +00:00
|
|
|
|
repl_latex = from_utf8(s);
|
2011-07-10 00:19:11 +00:00
|
|
|
|
LYXERR(Debug::FIND, "Replacing by insert()ing latex: '" << repl_latex << "' cur=" << cur << " with depth=" << cur.depth());
|
|
|
|
|
MathData ar(cur.buffer());
|
|
|
|
|
asArray(repl_latex, ar, Parse::NORMAL);
|
|
|
|
|
cur.insert(ar);
|
|
|
|
|
sel_len = ar.size();
|
|
|
|
|
LYXERR(Debug::FIND, "After insert() cur=" << cur << " with depth: " << cur.depth() << " and len: " << sel_len);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
}
|
2011-04-16 10:48:55 +00:00
|
|
|
|
if (cur.pos() >= sel_len)
|
|
|
|
|
cur.pos() -= sel_len;
|
|
|
|
|
else
|
2011-01-26 23:54:12 +00:00
|
|
|
|
cur.pos() = 0;
|
2011-07-10 00:19:11 +00:00
|
|
|
|
LYXERR(Debug::FIND, "After pos adj cur=" << cur << " with depth: " << cur.depth() << " and len: " << sel_len);
|
2011-01-26 23:54:12 +00:00
|
|
|
|
bv->putSelectionAt(DocIterator(cur), sel_len, !opt.forward);
|
2011-05-21 10:44:27 +00:00
|
|
|
|
bv->processUpdateFlags(Update::Force);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
return 1;
|
2010-01-30 10:11:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
/// Perform a FindAdv operation.
|
2021-01-10 21:58:58 +00:00
|
|
|
|
bool findAdv(BufferView * bv, FindAndReplaceOptions & opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2010-01-30 10:11:24 +00:00
|
|
|
|
DocIterator cur;
|
2021-01-10 16:17:37 +00:00
|
|
|
|
int pos_len = 0;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2017-05-02 15:46:38 +00:00
|
|
|
|
// e.g., when invoking word-findadv from mini-buffer wither with
|
|
|
|
|
// wrong options syntax or before ever opening advanced F&R pane
|
2017-05-03 15:32:31 +00:00
|
|
|
|
if (theBufferList().getBuffer(FileName(to_utf8(opt.find_buf_name)), true) == 0)
|
2017-05-02 15:46:38 +00:00
|
|
|
|
return false;
|
|
|
|
|
|
2008-11-15 23:30:27 +00:00
|
|
|
|
try {
|
2010-01-30 10:11:24 +00:00
|
|
|
|
MatchStringAdv matchAdv(bv->buffer(), opt);
|
2021-01-01 20:50:36 +00:00
|
|
|
|
#if QTSEARCH
|
2020-12-30 12:00:03 +00:00
|
|
|
|
if (!matchAdv.regexIsValid) {
|
|
|
|
|
bv->message(lyx::from_utf8(matchAdv.regexError));
|
|
|
|
|
return(false);
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2013-04-03 23:43:36 +00:00
|
|
|
|
int length = bv->cursor().selectionEnd().pos() - bv->cursor().selectionBegin().pos();
|
2013-04-03 23:50:02 +00:00
|
|
|
|
if (length > 0)
|
|
|
|
|
bv->putSelectionAt(bv->cursor().selectionBegin(), length, !opt.forward);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
num_replaced += findAdvReplace(bv, opt, matchAdv);
|
2010-01-30 10:11:24 +00:00
|
|
|
|
cur = bv->cursor();
|
2008-11-15 23:30:27 +00:00
|
|
|
|
if (opt.forward)
|
2021-01-10 16:17:37 +00:00
|
|
|
|
pos_len = findForwardAdv(cur, matchAdv);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
else
|
2021-01-10 16:17:37 +00:00
|
|
|
|
pos_len = findBackwardsAdv(cur, matchAdv);
|
2021-01-04 13:41:53 +00:00
|
|
|
|
} catch (exception & ex) {
|
2021-01-06 01:12:58 +00:00
|
|
|
|
bv->message(from_utf8(ex.what()));
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2021-01-10 16:17:37 +00:00
|
|
|
|
if (pos_len == 0) {
|
2019-03-13 13:06:18 +00:00
|
|
|
|
if (num_replaced > 0) {
|
|
|
|
|
switch (num_replaced)
|
|
|
|
|
{
|
|
|
|
|
case 1:
|
2019-03-13 13:14:35 +00:00
|
|
|
|
bv->message(_("One match has been replaced."));
|
2019-03-13 13:06:18 +00:00
|
|
|
|
break;
|
|
|
|
|
case 2:
|
|
|
|
|
bv->message(_("Two matches have been replaced."));
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
bv->message(bformat(_("%1$d matches have been replaced."), num_replaced));
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
num_replaced = 0;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
bv->message(_("Match not found."));
|
|
|
|
|
}
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-13 13:06:18 +00:00
|
|
|
|
if (num_replaced > 0)
|
|
|
|
|
bv->message(_("Match has been replaced."));
|
|
|
|
|
else
|
|
|
|
|
bv->message(_("Match found."));
|
2009-12-30 18:40:18 +00:00
|
|
|
|
|
2021-01-10 16:17:37 +00:00
|
|
|
|
if (cur.pos() + pos_len > cur.lastpos()) {
|
|
|
|
|
// Prevent crash in bv->putSelectionAt()
|
|
|
|
|
// Should never happen, maybe LASSERT() here?
|
|
|
|
|
pos_len = cur.lastpos() - cur.pos();
|
|
|
|
|
}
|
|
|
|
|
LYXERR(Debug::FIND, "Putting selection at cur=" << cur << " with len: " << pos_len);
|
|
|
|
|
bv->putSelectionAt(cur, pos_len, !opt.forward);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-01-10 12:37:50 +00:00
|
|
|
|
ostringstream & operator<<(ostringstream & os, FindAndReplaceOptions const & opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2011-02-07 20:36:40 +00:00
|
|
|
|
os << to_utf8(opt.find_buf_name) << "\nEOSS\n"
|
2008-11-15 23:30:27 +00:00
|
|
|
|
<< opt.casesensitive << ' '
|
|
|
|
|
<< opt.matchword << ' '
|
|
|
|
|
<< opt.forward << ' '
|
|
|
|
|
<< opt.expandmacros << ' '
|
|
|
|
|
<< opt.ignoreformat << ' '
|
2019-03-13 13:06:18 +00:00
|
|
|
|
<< opt.replace_all << ' '
|
2011-02-07 20:36:40 +00:00
|
|
|
|
<< to_utf8(opt.repl_buf_name) << "\nEOSS\n"
|
2009-12-30 18:40:18 +00:00
|
|
|
|
<< opt.keep_case << ' '
|
2013-08-23 19:36:50 +00:00
|
|
|
|
<< int(opt.scope) << ' '
|
|
|
|
|
<< int(opt.restr);
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "built: " << os.str());
|
2008-11-15 23:30:27 +00:00
|
|
|
|
|
|
|
|
|
return os;
|
|
|
|
|
}
|
|
|
|
|
|
2011-02-07 20:36:40 +00:00
|
|
|
|
|
2010-01-10 12:37:50 +00:00
|
|
|
|
istringstream & operator>>(istringstream & is, FindAndReplaceOptions & opt)
|
2008-11-15 23:30:27 +00:00
|
|
|
|
{
|
2021-01-19 16:52:36 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "parsing");
|
2008-11-15 23:30:27 +00:00
|
|
|
|
string s;
|
|
|
|
|
string line;
|
|
|
|
|
getline(is, line);
|
|
|
|
|
while (line != "EOSS") {
|
|
|
|
|
if (! s.empty())
|
2012-10-23 20:58:10 +00:00
|
|
|
|
s = s + "\n";
|
2008-11-15 23:30:27 +00:00
|
|
|
|
s = s + line;
|
|
|
|
|
if (is.eof()) // Tolerate malformed request
|
2012-10-23 20:58:10 +00:00
|
|
|
|
break;
|
2008-11-15 23:30:27 +00:00
|
|
|
|
getline(is, line);
|
|
|
|
|
}
|
2021-01-19 16:52:36 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "file_buf_name: '" << s << "'");
|
2011-02-07 20:36:40 +00:00
|
|
|
|
opt.find_buf_name = from_utf8(s);
|
2019-03-13 13:06:18 +00:00
|
|
|
|
is >> opt.casesensitive >> opt.matchword >> opt.forward >> opt.expandmacros >> opt.ignoreformat >> opt.replace_all;
|
2009-01-14 15:34:56 +00:00
|
|
|
|
is.get(); // Waste space before replace string
|
|
|
|
|
s = "";
|
|
|
|
|
getline(is, line);
|
|
|
|
|
while (line != "EOSS") {
|
|
|
|
|
if (! s.empty())
|
2012-10-23 20:58:10 +00:00
|
|
|
|
s = s + "\n";
|
2009-01-14 15:34:56 +00:00
|
|
|
|
s = s + line;
|
|
|
|
|
if (is.eof()) // Tolerate malformed request
|
2012-10-23 20:58:10 +00:00
|
|
|
|
break;
|
2009-01-14 15:34:56 +00:00
|
|
|
|
getline(is, line);
|
|
|
|
|
}
|
2021-01-19 16:52:36 +00:00
|
|
|
|
// LYXERR(Debug::FIND, "repl_buf_name: '" << s << "'");
|
2011-02-07 20:36:40 +00:00
|
|
|
|
opt.repl_buf_name = from_utf8(s);
|
2009-08-19 22:55:38 +00:00
|
|
|
|
is >> opt.keep_case;
|
2009-12-30 18:40:18 +00:00
|
|
|
|
int i;
|
|
|
|
|
is >> i;
|
|
|
|
|
opt.scope = FindAndReplaceOptions::SearchScope(i);
|
2013-08-23 19:36:50 +00:00
|
|
|
|
is >> i;
|
|
|
|
|
opt.restr = FindAndReplaceOptions::SearchRestriction(i);
|
|
|
|
|
|
2021-01-19 16:52:36 +00:00
|
|
|
|
/*
|
2009-08-17 14:39:00 +00:00
|
|
|
|
LYXERR(Debug::FIND, "parsed: " << opt.casesensitive << ' ' << opt.matchword << ' ' << opt.forward << ' '
|
2013-08-23 19:36:50 +00:00
|
|
|
|
<< opt.expandmacros << ' ' << opt.ignoreformat << ' ' << opt.keep_case << ' '
|
|
|
|
|
<< opt.scope << ' ' << opt.restr);
|
2021-01-19 16:52:36 +00:00
|
|
|
|
*/
|
2008-11-15 23:30:27 +00:00
|
|
|
|
return is;
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-23 11:11:54 +00:00
|
|
|
|
} // namespace lyx
|