2003-03-29 09:48:03 +00:00
|
|
|
|
/**
|
2007-04-26 04:41:58 +00:00
|
|
|
|
* \file Buffer.cpp
|
2003-03-29 09:48:03 +00:00
|
|
|
|
* This file is part of LyX, the document processor.
|
|
|
|
|
* Licence details can be found in the file COPYING.
|
2002-03-21 16:55:34 +00:00
|
|
|
|
*
|
2008-11-14 15:58:50 +00:00
|
|
|
|
* \author Lars Gullik Bjønnes
|
2007-11-01 11:13:07 +00:00
|
|
|
|
* \author Stefan Schimanski
|
1999-09-27 18:44:28 +00:00
|
|
|
|
*
|
2003-08-23 00:17:00 +00:00
|
|
|
|
* Full author contact details are available in file CREDITS.
|
1999-09-27 18:44:28 +00:00
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#include <config.h>
|
|
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Buffer.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Author.h"
|
2007-08-20 17:04:36 +00:00
|
|
|
|
#include "BiblioInfo.h"
|
2005-03-27 13:31:04 +00:00
|
|
|
|
#include "BranchList.h"
|
2003-06-24 20:42:15 +00:00
|
|
|
|
#include "buffer_funcs.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "BufferList.h"
|
|
|
|
|
#include "BufferParams.h"
|
2003-09-09 17:00:19 +00:00
|
|
|
|
#include "Bullet.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
|
#include "Chktex.h"
|
2021-01-18 08:56:53 +00:00
|
|
|
|
#include "ColorSet.h"
|
2007-10-20 10:51:13 +00:00
|
|
|
|
#include "Converter.h"
|
|
|
|
|
#include "Counters.h"
|
2012-07-15 16:16:09 +00:00
|
|
|
|
#include "Cursor.h"
|
2013-03-11 14:01:32 +00:00
|
|
|
|
#include "CutAndPaste.h"
|
2009-04-06 12:12:06 +00:00
|
|
|
|
#include "DispatchResult.h"
|
2007-08-21 07:33:46 +00:00
|
|
|
|
#include "DocIterator.h"
|
2013-09-26 20:22:02 +00:00
|
|
|
|
#include "BufferEncodings.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "ErrorList.h"
|
|
|
|
|
#include "Exporter.h"
|
|
|
|
|
#include "Format.h"
|
|
|
|
|
#include "FuncRequest.h"
|
2009-04-06 12:12:06 +00:00
|
|
|
|
#include "FuncStatus.h"
|
2009-04-16 07:29:01 +00:00
|
|
|
|
#include "IndicesList.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "InsetIterator.h"
|
2007-10-18 15:29:51 +00:00
|
|
|
|
#include "InsetList.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Language.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
|
#include "LaTeXFeatures.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
|
#include "LaTeX.h"
|
2007-09-29 20:02:32 +00:00
|
|
|
|
#include "Layout.h"
|
2007-04-26 11:30:54 +00:00
|
|
|
|
#include "Lexer.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
|
#include "LyXAction.h"
|
2007-05-16 10:39:41 +00:00
|
|
|
|
#include "LyX.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "LyXRC.h"
|
|
|
|
|
#include "LyXVC.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
|
#include "output.h"
|
2003-11-05 12:06:20 +00:00
|
|
|
|
#include "output_latex.h"
|
2020-06-08 21:27:49 +00:00
|
|
|
|
#include "output_docbook.h"
|
2007-10-20 10:51:13 +00:00
|
|
|
|
#include "output_plaintext.h"
|
2020-06-08 21:27:49 +00:00
|
|
|
|
#include "output_xhtml.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
|
#include "Paragraph.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
|
#include "ParagraphParameters.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "ParIterator.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
|
#include "PDFOptions.h"
|
2017-08-03 11:07:41 +00:00
|
|
|
|
#include "Session.h"
|
2009-04-04 16:40:47 +00:00
|
|
|
|
#include "SpellChecker.h"
|
2019-05-09 23:35:40 +00:00
|
|
|
|
#include "xml.h"
|
2016-06-19 02:39:38 +00:00
|
|
|
|
#include "texstream.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "TexRow.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
|
#include "Text.h"
|
2008-02-28 03:51:10 +00:00
|
|
|
|
#include "TextClass.h"
|
2006-11-11 00:35:14 +00:00
|
|
|
|
#include "TocBackend.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
|
#include "Undo.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
|
#include "VCBackend.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
|
#include "version.h"
|
2009-04-04 16:40:47 +00:00
|
|
|
|
#include "WordLangTuple.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
|
|
2009-07-09 09:48:34 +00:00
|
|
|
|
#include "insets/InsetBranch.h"
|
2007-04-25 01:24:38 +00:00
|
|
|
|
#include "insets/InsetInclude.h"
|
|
|
|
|
#include "insets/InsetText.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
|
|
2010-07-21 13:19:52 +00:00
|
|
|
|
#include "mathed/InsetMathHull.h"
|
2007-04-28 20:44:46 +00:00
|
|
|
|
#include "mathed/MacroTable.h"
|
2017-07-05 12:31:28 +00:00
|
|
|
|
#include "mathed/InsetMathMacroTemplate.h"
|
2006-09-17 09:14:18 +00:00
|
|
|
|
#include "mathed/MathSupport.h"
|
2004-04-13 06:27:29 +00:00
|
|
|
|
|
2011-07-12 12:43:11 +00:00
|
|
|
|
#include "graphics/PreviewLoader.h"
|
|
|
|
|
|
2019-07-16 23:01:49 +00:00
|
|
|
|
#include "frontends/Application.h"
|
2007-04-28 20:44:46 +00:00
|
|
|
|
#include "frontends/alert.h"
|
2007-10-02 18:27:20 +00:00
|
|
|
|
#include "frontends/Delegates.h"
|
2007-10-02 09:00:08 +00:00
|
|
|
|
#include "frontends/WorkAreaManager.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
|
|
2008-04-30 08:26:40 +00:00
|
|
|
|
#include "support/lassert.h"
|
2007-11-29 07:04:28 +00:00
|
|
|
|
#include "support/convert.h"
|
|
|
|
|
#include "support/debug.h"
|
2009-04-04 16:40:47 +00:00
|
|
|
|
#include "support/docstring_list.h"
|
2007-12-17 18:37:13 +00:00
|
|
|
|
#include "support/ExceptionMessage.h"
|
2017-03-05 19:12:07 +00:00
|
|
|
|
#include "support/FileMonitor.h"
|
2007-12-17 18:37:13 +00:00
|
|
|
|
#include "support/FileName.h"
|
2007-12-05 10:32:49 +00:00
|
|
|
|
#include "support/FileNameList.h"
|
1999-10-02 16:21:10 +00:00
|
|
|
|
#include "support/filetools.h"
|
2007-11-29 07:04:28 +00:00
|
|
|
|
#include "support/gettext.h"
|
2007-08-01 15:16:44 +00:00
|
|
|
|
#include "support/gzstream.h"
|
2007-11-13 23:50:28 +00:00
|
|
|
|
#include "support/lstrings.h"
|
2014-07-05 10:31:12 +00:00
|
|
|
|
#include "support/mutex.h"
|
2001-05-17 15:11:01 +00:00
|
|
|
|
#include "support/os.h"
|
2007-12-17 18:37:13 +00:00
|
|
|
|
#include "support/Package.h"
|
2013-02-06 10:36:58 +00:00
|
|
|
|
#include "support/PathChanger.h"
|
2009-04-06 12:12:06 +00:00
|
|
|
|
#include "support/Systemcall.h"
|
2013-04-14 17:45:36 +00:00
|
|
|
|
#include "support/TempFile.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
|
#include "support/textutils.h"
|
2007-11-29 07:04:28 +00:00
|
|
|
|
#include "support/types.h"
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
2007-08-12 08:57:17 +00:00
|
|
|
|
#include <algorithm>
|
2008-03-02 15:27:35 +00:00
|
|
|
|
#include <fstream>
|
2001-12-08 14:20:11 +00:00
|
|
|
|
#include <iomanip>
|
2008-03-02 15:27:35 +00:00
|
|
|
|
#include <map>
|
2016-06-02 17:13:55 +00:00
|
|
|
|
#include <memory>
|
2008-11-16 21:28:06 +00:00
|
|
|
|
#include <set>
|
2004-07-24 10:55:30 +00:00
|
|
|
|
#include <sstream>
|
2008-03-02 15:27:35 +00:00
|
|
|
|
#include <vector>
|
2001-12-08 14:20:11 +00:00
|
|
|
|
|
2007-12-12 10:16:00 +00:00
|
|
|
|
using namespace std;
|
2007-12-12 18:57:56 +00:00
|
|
|
|
using namespace lyx::support;
|
2011-07-12 12:43:11 +00:00
|
|
|
|
using namespace lyx::graphics;
|
2007-07-17 17:40:44 +00:00
|
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
|
namespace lyx {
|
|
|
|
|
|
|
|
|
|
namespace Alert = frontend::Alert;
|
|
|
|
|
namespace os = support::os;
|
2003-09-16 11:03:20 +00:00
|
|
|
|
|
2001-03-20 01:22:46 +00:00
|
|
|
|
namespace {
|
1999-12-10 00:07:59 +00:00
|
|
|
|
|
2011-10-23 10:29:21 +00:00
|
|
|
|
int const LYX_FORMAT = LYX_FORMAT_LYX;
|
2001-03-20 01:22:46 +00:00
|
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
|
typedef map<string, bool> DepClean;
|
2018-12-29 09:08:02 +00:00
|
|
|
|
|
|
|
|
|
// Information about labels and their associated refs
|
|
|
|
|
struct LabelInfo {
|
|
|
|
|
/// label string
|
|
|
|
|
docstring label;
|
|
|
|
|
/// label inset
|
|
|
|
|
InsetLabel const * inset;
|
|
|
|
|
/// associated references cache
|
|
|
|
|
Buffer::References references;
|
|
|
|
|
/// whether this label is active (i.e., not deleted)
|
|
|
|
|
bool active;
|
|
|
|
|
};
|
|
|
|
|
|
2018-12-31 17:27:34 +00:00
|
|
|
|
typedef vector<LabelInfo> LabelCache;
|
|
|
|
|
|
|
|
|
|
typedef map<docstring, Buffer::References> RefCache;
|
2008-03-02 15:27:35 +00:00
|
|
|
|
|
2020-02-19 00:22:54 +00:00
|
|
|
|
// A storehouse for the cloned buffers.
|
2020-02-23 21:39:00 +00:00
|
|
|
|
typedef list<CloneList_ptr> CloneStore;
|
|
|
|
|
CloneStore cloned_buffers;
|
2011-11-17 17:58:22 +00:00
|
|
|
|
|
2020-02-19 00:22:54 +00:00
|
|
|
|
} // namespace
|
2011-11-17 17:58:22 +00:00
|
|
|
|
|
2003-09-09 11:24:33 +00:00
|
|
|
|
|
2010-09-29 11:55:10 +00:00
|
|
|
|
|
2005-01-19 15:03:31 +00:00
|
|
|
|
class Buffer::Impl
|
2003-09-09 11:24:33 +00:00
|
|
|
|
{
|
2005-01-19 15:03:31 +00:00
|
|
|
|
public:
|
2010-01-25 18:39:08 +00:00
|
|
|
|
Impl(Buffer * owner, FileName const & file, bool readonly, Buffer const * cloned_buffer);
|
2007-11-30 20:30:09 +00:00
|
|
|
|
|
|
|
|
|
~Impl()
|
|
|
|
|
{
|
2011-07-12 12:43:11 +00:00
|
|
|
|
delete preview_loader_;
|
2007-11-30 20:30:09 +00:00
|
|
|
|
if (wa_) {
|
|
|
|
|
wa_->closeAll();
|
|
|
|
|
delete wa_;
|
|
|
|
|
}
|
2008-07-29 11:35:13 +00:00
|
|
|
|
delete inset;
|
2007-11-30 20:30:09 +00:00
|
|
|
|
}
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
|
/// search for macro in local (buffer) table or in children
|
|
|
|
|
MacroData const * getBufferMacro(docstring const & name,
|
|
|
|
|
DocIterator const & pos) const;
|
|
|
|
|
|
|
|
|
|
/// Update macro table starting with position of it \param it in some
|
|
|
|
|
/// text inset.
|
2010-07-21 21:58:54 +00:00
|
|
|
|
void updateMacros(DocIterator & it, DocIterator & scope);
|
2010-01-25 18:39:08 +00:00
|
|
|
|
///
|
|
|
|
|
void setLabel(ParIterator & it, UpdateType utype) const;
|
|
|
|
|
|
|
|
|
|
/** If we have branches that use the file suffix
|
|
|
|
|
feature, return the file name with suffix appended.
|
|
|
|
|
*/
|
2017-10-16 20:22:53 +00:00
|
|
|
|
FileName exportFileName() const;
|
2010-01-25 18:39:08 +00:00
|
|
|
|
|
|
|
|
|
Buffer * owner_;
|
|
|
|
|
|
2003-09-09 11:24:33 +00:00
|
|
|
|
BufferParams params;
|
|
|
|
|
LyXVC lyxvc;
|
2007-12-17 18:37:13 +00:00
|
|
|
|
FileName temppath;
|
2007-11-30 17:41:27 +00:00
|
|
|
|
mutable TexRow texrow;
|
2003-09-09 11:24:33 +00:00
|
|
|
|
|
2004-03-01 17:12:09 +00:00
|
|
|
|
/// need to regenerate .tex?
|
2003-09-09 11:24:33 +00:00
|
|
|
|
DepClean dep_clean;
|
|
|
|
|
|
|
|
|
|
/// name of the file the buffer is associated with.
|
2006-11-30 16:59:50 +00:00
|
|
|
|
FileName filename;
|
2003-09-09 11:24:33 +00:00
|
|
|
|
|
2006-11-11 00:35:14 +00:00
|
|
|
|
///
|
2007-11-30 17:41:27 +00:00
|
|
|
|
mutable TocBackend toc_backend;
|
2007-06-15 13:13:49 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
/// macro tables
|
2015-03-15 12:57:45 +00:00
|
|
|
|
struct ScopeMacro {
|
|
|
|
|
ScopeMacro() {}
|
|
|
|
|
ScopeMacro(DocIterator const & s, MacroData const & m)
|
|
|
|
|
: scope(s), macro(m) {}
|
|
|
|
|
DocIterator scope;
|
|
|
|
|
MacroData macro;
|
|
|
|
|
};
|
2007-12-21 20:42:46 +00:00
|
|
|
|
typedef map<DocIterator, ScopeMacro> PositionScopeMacroMap;
|
|
|
|
|
typedef map<docstring, PositionScopeMacroMap> NamePositionScopeMacroMap;
|
2008-03-10 12:49:18 +00:00
|
|
|
|
/// map from the macro name to the position map,
|
|
|
|
|
/// which maps the macro definition position to the scope and the MacroData.
|
2007-12-21 20:42:46 +00:00
|
|
|
|
NamePositionScopeMacroMap macros;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
/// positions of child buffers in the buffer
|
|
|
|
|
typedef map<Buffer const * const, DocIterator> BufferPositionMap;
|
2015-03-15 12:57:45 +00:00
|
|
|
|
struct ScopeBuffer {
|
2020-02-19 00:27:12 +00:00
|
|
|
|
ScopeBuffer() : buffer(nullptr) {}
|
2016-06-09 14:34:25 +00:00
|
|
|
|
ScopeBuffer(DocIterator const & s, Buffer const * b)
|
2015-03-15 12:57:45 +00:00
|
|
|
|
: scope(s), buffer(b) {}
|
|
|
|
|
DocIterator scope;
|
|
|
|
|
Buffer const * buffer;
|
|
|
|
|
};
|
2007-12-21 20:42:46 +00:00
|
|
|
|
typedef map<DocIterator, ScopeBuffer> PositionScopeBufferMap;
|
2008-03-10 12:49:18 +00:00
|
|
|
|
/// position of children buffers in this buffer
|
2007-12-21 20:42:46 +00:00
|
|
|
|
BufferPositionMap children_positions;
|
2008-03-10 12:49:18 +00:00
|
|
|
|
/// map from children inclusion positions to their scope and their buffer
|
2007-12-21 20:42:46 +00:00
|
|
|
|
PositionScopeBufferMap position_to_children;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
Correctly load documents moved elsewhere after save.
It is now possible opening documents that where manually moved to a
different location after they were saved and still produce an output.
Indeed, (hopefully) all needed included files are now still found.
When the moved document is saved again, all paths are accordingly updated.
Of course, for this to work, a document has to be saved in Format 490,
at least.
As an example, after converting the user guide to the last format, it can
be copied anywhere and opened without the need of adapting the paths of
included files or moving them to a proper place.
There is one glitch I am aware of. When moving a child document (but not
the master) the path to the master is correctly updated but it won't be
recognized as such. This is because LyX checks that the parent actually
includes this document but, of course, being the parent document not
touched, it appears not including this child. Anyway, it will also occur
when saving the child to a different location and the user is warned
on the terminal about this fact when the moved child is loaded.
However, there is no problem when it is the master that has been moved.
2015-05-16 17:51:53 +00:00
|
|
|
|
/// Contains the old buffer filePath() while saving-as, or the
|
|
|
|
|
/// directory where the document was last saved while loading.
|
2015-05-03 21:55:19 +00:00
|
|
|
|
string old_position;
|
|
|
|
|
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
/** Keeps track of the path of local layout files.
|
|
|
|
|
* If possible, it is always relative to the buffer path.
|
|
|
|
|
* Empty for layouts in system or user directory.
|
|
|
|
|
*/
|
|
|
|
|
string layout_position;
|
|
|
|
|
|
2007-06-15 13:13:49 +00:00
|
|
|
|
/// Container for all sort of Buffer dependant errors.
|
|
|
|
|
map<string, ErrorList> errorLists;
|
2007-08-09 20:46:22 +00:00
|
|
|
|
|
2017-03-01 19:32:33 +00:00
|
|
|
|
/// checksum used to test if the file has been externally modified. Used to
|
|
|
|
|
/// double check whether the file had been externally modified when saving.
|
2007-08-09 20:46:22 +00:00
|
|
|
|
unsigned long checksum_;
|
2007-10-02 09:00:08 +00:00
|
|
|
|
|
|
|
|
|
///
|
|
|
|
|
frontend::WorkAreaManager * wa_;
|
2010-01-25 14:32:39 +00:00
|
|
|
|
///
|
|
|
|
|
frontend::GuiBufferDelegate * gui_;
|
2007-10-18 11:51:17 +00:00
|
|
|
|
|
|
|
|
|
///
|
|
|
|
|
Undo undo_;
|
2007-12-05 10:32:49 +00:00
|
|
|
|
|
|
|
|
|
/// A cache for the bibfiles (including bibfiles of loaded child
|
|
|
|
|
/// documents), needed for appropriate update of natbib labels.
|
2018-09-02 04:10:01 +00:00
|
|
|
|
mutable docstring_list bibfiles_cache_;
|
2008-03-02 15:27:35 +00:00
|
|
|
|
|
2008-08-01 17:57:01 +00:00
|
|
|
|
// FIXME The caching mechanism could be improved. At present, we have a
|
2008-06-05 05:46:49 +00:00
|
|
|
|
// cache for each Buffer, that caches all the bibliography info for that
|
2008-08-01 17:57:01 +00:00
|
|
|
|
// Buffer. A more efficient solution would be to have a global cache per
|
2008-06-05 05:46:49 +00:00
|
|
|
|
// file, and then to construct the Buffer's bibinfo from that.
|
2008-04-25 20:03:03 +00:00
|
|
|
|
/// A cache for bibliography info
|
|
|
|
|
mutable BiblioInfo bibinfo_;
|
2008-06-05 05:46:49 +00:00
|
|
|
|
/// Cache of timestamps of .bib files
|
2010-01-09 16:15:46 +00:00
|
|
|
|
map<FileName, time_t> bibfile_status_;
|
2015-05-03 05:22:03 +00:00
|
|
|
|
|
|
|
|
|
/// These two hold the file name and format, written to by
|
|
|
|
|
/// Buffer::preview and read from by LFUN_BUFFER_VIEW_CACHE.
|
2015-03-31 22:54:49 +00:00
|
|
|
|
FileName preview_file_;
|
|
|
|
|
string preview_format_;
|
2008-04-25 20:03:03 +00:00
|
|
|
|
|
2018-12-31 17:27:34 +00:00
|
|
|
|
/// Cache the references associated to a label and their positions
|
|
|
|
|
/// in the buffer.
|
2008-03-02 15:27:35 +00:00
|
|
|
|
mutable RefCache ref_cache_;
|
2018-12-31 17:27:34 +00:00
|
|
|
|
/// Cache the label insets and their activity status.
|
|
|
|
|
mutable LabelCache label_cache_;
|
2008-03-04 22:28:18 +00:00
|
|
|
|
|
|
|
|
|
/// our Text that should be wrapped in an InsetText
|
2008-07-29 11:35:13 +00:00
|
|
|
|
InsetText * inset;
|
2009-04-03 00:44:33 +00:00
|
|
|
|
|
2011-07-12 12:43:11 +00:00
|
|
|
|
///
|
|
|
|
|
PreviewLoader * preview_loader_;
|
|
|
|
|
|
2020-02-23 22:14:13 +00:00
|
|
|
|
/// If non zero, this buffer is a clone of existing buffer \p cloned_buffer_
|
|
|
|
|
/// This one is useful for preview detached in a thread.
|
|
|
|
|
Buffer const * cloned_buffer_;
|
|
|
|
|
///
|
|
|
|
|
CloneList_ptr clone_list_;
|
|
|
|
|
|
2020-04-25 21:34:27 +00:00
|
|
|
|
///
|
|
|
|
|
std::list<Buffer const *> include_list_;
|
2020-02-23 22:14:13 +00:00
|
|
|
|
private:
|
|
|
|
|
/// So we can force access via the accessors.
|
|
|
|
|
mutable Buffer const * parent_buffer;
|
|
|
|
|
|
|
|
|
|
FileMonitorPtr file_monitor_;
|
|
|
|
|
|
|
|
|
|
/// ints and bools are all listed last so as to avoid alignment issues
|
|
|
|
|
public:
|
|
|
|
|
/// original format of loaded file
|
|
|
|
|
int file_format;
|
|
|
|
|
|
|
|
|
|
/// are we in the process of exporting this buffer?
|
|
|
|
|
mutable bool doing_export;
|
|
|
|
|
|
|
|
|
|
/// If there was an error when previewing, on the next preview we do
|
|
|
|
|
/// a fresh compile (e.g. in case the user installed a package that
|
|
|
|
|
/// was missing).
|
|
|
|
|
bool require_fresh_start_;
|
|
|
|
|
|
|
|
|
|
/// Indicates whether the bibinfo has changed since the last time
|
|
|
|
|
/// we ran updateBuffer(), i.e., whether citation labels may need
|
|
|
|
|
/// to be updated.
|
|
|
|
|
mutable bool cite_labels_valid_;
|
|
|
|
|
/// Do we have a bibliography environment?
|
|
|
|
|
mutable bool have_bibitems_;
|
|
|
|
|
|
|
|
|
|
/// is save needed?
|
|
|
|
|
mutable bool lyx_clean;
|
|
|
|
|
|
|
|
|
|
/// is autosave needed?
|
|
|
|
|
mutable bool bak_clean;
|
|
|
|
|
|
|
|
|
|
/// is this an unnamed file (New...)?
|
|
|
|
|
bool unnamed;
|
|
|
|
|
|
|
|
|
|
/// is this an internal bufffer?
|
|
|
|
|
bool internal_buffer;
|
|
|
|
|
|
|
|
|
|
/// buffer is r/o
|
|
|
|
|
bool read_only;
|
|
|
|
|
|
|
|
|
|
/** Set to true only when the file is fully loaded.
|
|
|
|
|
* Used to prevent the premature generation of previews
|
|
|
|
|
* and by the citation inset.
|
|
|
|
|
*/
|
|
|
|
|
bool file_fully_loaded;
|
|
|
|
|
|
|
|
|
|
/// if the file was originally loaded from an older format, do
|
|
|
|
|
/// we need to back it up still?
|
|
|
|
|
bool need_format_backup;
|
|
|
|
|
|
|
|
|
|
/// Ignore the parent (e.g. when exporting a child standalone)?
|
|
|
|
|
bool ignore_parent;
|
|
|
|
|
|
|
|
|
|
/// This seem to change the way Buffer::getMacro() works
|
|
|
|
|
mutable bool macro_lock;
|
|
|
|
|
|
|
|
|
|
/// has been externally modified? Can be reset by the user.
|
|
|
|
|
mutable bool externally_modified_;
|
|
|
|
|
|
|
|
|
|
/// whether the bibinfo cache is valid
|
|
|
|
|
mutable bool bibinfo_cache_valid_;
|
|
|
|
|
|
2021-01-28 09:10:18 +00:00
|
|
|
|
///
|
|
|
|
|
mutable bool need_update;
|
|
|
|
|
|
2020-02-23 22:14:13 +00:00
|
|
|
|
private:
|
|
|
|
|
int word_count_;
|
|
|
|
|
int char_count_;
|
|
|
|
|
int blank_count_;
|
|
|
|
|
|
|
|
|
|
public:
|
2009-04-03 00:44:33 +00:00
|
|
|
|
/// This is here to force the test to be done whenever parent_buffer
|
|
|
|
|
/// is accessed.
|
2012-10-27 13:45:27 +00:00
|
|
|
|
Buffer const * parent() const
|
|
|
|
|
{
|
2012-10-03 08:41:07 +00:00
|
|
|
|
// ignore_parent temporarily "orphans" a buffer
|
|
|
|
|
// (e.g. if a child is compiled standalone)
|
|
|
|
|
if (ignore_parent)
|
2020-02-19 00:27:12 +00:00
|
|
|
|
return nullptr;
|
2009-04-03 16:04:45 +00:00
|
|
|
|
// if parent_buffer is not loaded, then it has been unloaded,
|
|
|
|
|
// which means that parent_buffer is an invalid pointer. So we
|
|
|
|
|
// set it to null in that case.
|
2010-01-20 17:53:18 +00:00
|
|
|
|
// however, the BufferList doesn't know about cloned buffers, so
|
|
|
|
|
// they will always be regarded as unloaded. in that case, we hope
|
|
|
|
|
// for the best.
|
|
|
|
|
if (!cloned_buffer_ && !theBufferList().isLoaded(parent_buffer))
|
2020-02-19 00:27:12 +00:00
|
|
|
|
parent_buffer = nullptr;
|
2011-12-03 22:15:11 +00:00
|
|
|
|
return parent_buffer;
|
2009-04-03 00:44:33 +00:00
|
|
|
|
}
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2009-04-03 00:44:33 +00:00
|
|
|
|
///
|
2012-10-27 13:45:27 +00:00
|
|
|
|
void setParent(Buffer const * pb)
|
|
|
|
|
{
|
2010-08-10 14:37:06 +00:00
|
|
|
|
if (parent_buffer == pb)
|
|
|
|
|
// nothing to do
|
|
|
|
|
return;
|
|
|
|
|
if (!cloned_buffer_ && parent_buffer && pb)
|
2009-10-30 15:24:35 +00:00
|
|
|
|
LYXERR0("Warning: a buffer should not have two parents!");
|
|
|
|
|
parent_buffer = pb;
|
2017-11-05 01:23:25 +00:00
|
|
|
|
if (!cloned_buffer_ && parent_buffer)
|
2010-08-10 14:37:06 +00:00
|
|
|
|
parent_buffer->invalidateBibinfoCache();
|
2009-10-30 15:24:35 +00:00
|
|
|
|
}
|
2009-12-18 22:51:06 +00:00
|
|
|
|
|
2012-01-12 07:03:24 +00:00
|
|
|
|
/// compute statistics
|
|
|
|
|
/// \p from initial position
|
|
|
|
|
/// \p to points to the end position
|
|
|
|
|
void updateStatistics(DocIterator & from, DocIterator & to,
|
2012-10-27 13:45:27 +00:00
|
|
|
|
bool skipNoOutput = true);
|
2012-01-12 07:03:24 +00:00
|
|
|
|
/// statistics accessor functions
|
2012-10-27 13:45:27 +00:00
|
|
|
|
int wordCount() const
|
|
|
|
|
{
|
|
|
|
|
return word_count_;
|
|
|
|
|
}
|
|
|
|
|
int charCount(bool with_blanks) const
|
|
|
|
|
{
|
2012-01-12 07:03:24 +00:00
|
|
|
|
return char_count_
|
|
|
|
|
+ (with_blanks ? blank_count_ : 0);
|
|
|
|
|
}
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2017-03-05 19:12:07 +00:00
|
|
|
|
// Make sure the file monitor monitors the good file.
|
|
|
|
|
void refreshFileMonitor();
|
|
|
|
|
|
|
|
|
|
/// Notify or clear of external modification
|
2017-06-10 19:51:44 +00:00
|
|
|
|
void fileExternallyModified(bool exists);
|
2017-03-05 19:12:07 +00:00
|
|
|
|
|
2019-06-07 14:47:04 +00:00
|
|
|
|
///Binding LaTeX lines with buffer positions.
|
|
|
|
|
//Common routine for LaTeX and Reference errors listing.
|
|
|
|
|
void traverseErrors(TeXErrors::Errors::const_iterator err,
|
|
|
|
|
TeXErrors::Errors::const_iterator end,
|
|
|
|
|
ErrorList & errorList) const;
|
2003-09-09 11:24:33 +00:00
|
|
|
|
};
|
|
|
|
|
|
2008-04-10 21:49:34 +00:00
|
|
|
|
|
2007-12-17 18:37:13 +00:00
|
|
|
|
/// Creates the per buffer temporary directory
|
|
|
|
|
static FileName createBufferTmpDir()
|
|
|
|
|
{
|
2014-07-05 10:31:12 +00:00
|
|
|
|
// FIXME This would be the ideal application for a TempDir class (like
|
2015-03-15 17:20:01 +00:00
|
|
|
|
// TempFile but for directories)
|
2014-07-05 10:31:12 +00:00
|
|
|
|
string counter;
|
|
|
|
|
{
|
|
|
|
|
static int count;
|
|
|
|
|
static Mutex mutex;
|
|
|
|
|
Mutex::Locker locker(&mutex);
|
|
|
|
|
counter = convert<string>(count++);
|
|
|
|
|
}
|
2007-12-17 18:37:13 +00:00
|
|
|
|
// We are in our own directory. Why bother to mangle name?
|
|
|
|
|
// In fact I wrote this code to circumvent a problematic behaviour
|
|
|
|
|
// (bug?) of EMX mkstemp().
|
2010-04-21 01:19:09 +00:00
|
|
|
|
FileName tmpfl(package().temp_dir().absFileName() + "/lyx_tmpbuf" +
|
2014-07-05 10:31:12 +00:00
|
|
|
|
counter);
|
2007-12-17 18:37:13 +00:00
|
|
|
|
|
|
|
|
|
if (!tmpfl.createDirectory(0777)) {
|
|
|
|
|
throw ExceptionMessage(WarningException, _("Disk Error: "), bformat(
|
|
|
|
|
_("LyX could not create the temporary directory '%1$s' (Disk is full maybe?)"),
|
2010-04-21 01:19:09 +00:00
|
|
|
|
from_utf8(tmpfl.absFileName())));
|
2007-12-17 18:37:13 +00:00
|
|
|
|
}
|
|
|
|
|
return tmpfl;
|
|
|
|
|
}
|
|
|
|
|
|
2003-09-09 11:24:33 +00:00
|
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
|
Buffer::Impl::Impl(Buffer * owner, FileName const & file, bool readonly_,
|
2009-12-18 22:51:06 +00:00
|
|
|
|
Buffer const * cloned_buffer)
|
2020-02-23 22:14:13 +00:00
|
|
|
|
: owner_(owner), filename(file), toc_backend(owner), checksum_(0),
|
|
|
|
|
wa_(nullptr), gui_(nullptr), undo_(*owner), inset(nullptr),
|
|
|
|
|
preview_loader_(nullptr), cloned_buffer_(cloned_buffer),
|
|
|
|
|
clone_list_(nullptr), parent_buffer(nullptr), file_format(LYX_FORMAT),
|
|
|
|
|
doing_export(false), require_fresh_start_(false), cite_labels_valid_(false),
|
|
|
|
|
have_bibitems_(false), lyx_clean(true), bak_clean(true), unnamed(false),
|
|
|
|
|
internal_buffer(false), read_only(readonly_), file_fully_loaded(false),
|
|
|
|
|
need_format_backup(false), ignore_parent(false), macro_lock(false),
|
|
|
|
|
externally_modified_(false), bibinfo_cache_valid_(false),
|
2021-01-28 09:10:18 +00:00
|
|
|
|
need_update(false), word_count_(0), char_count_(0), blank_count_(0)
|
2003-09-09 11:24:33 +00:00
|
|
|
|
{
|
2017-03-05 19:12:07 +00:00
|
|
|
|
refreshFileMonitor();
|
2009-12-18 22:51:06 +00:00
|
|
|
|
if (!cloned_buffer_) {
|
|
|
|
|
temppath = createBufferTmpDir();
|
2010-01-25 18:39:08 +00:00
|
|
|
|
lyxvc.setBuffer(owner_);
|
2019-07-21 11:14:20 +00:00
|
|
|
|
Language const * inplang = theApp() ?
|
|
|
|
|
languages.getFromCode(theApp()->inputLanguageCode())
|
|
|
|
|
: nullptr;
|
2019-07-16 23:01:49 +00:00
|
|
|
|
if (inplang)
|
|
|
|
|
params.language = inplang;
|
2009-12-18 22:51:06 +00:00
|
|
|
|
if (use_gui)
|
|
|
|
|
wa_ = new frontend::WorkAreaManager;
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
temppath = cloned_buffer_->d->temppath;
|
|
|
|
|
file_fully_loaded = true;
|
|
|
|
|
params = cloned_buffer_->d->params;
|
2010-01-09 18:16:07 +00:00
|
|
|
|
bibfiles_cache_ = cloned_buffer_->d->bibfiles_cache_;
|
|
|
|
|
bibinfo_ = cloned_buffer_->d->bibinfo_;
|
|
|
|
|
bibinfo_cache_valid_ = cloned_buffer_->d->bibinfo_cache_valid_;
|
|
|
|
|
bibfile_status_ = cloned_buffer_->d->bibfile_status_;
|
2011-05-07 23:02:53 +00:00
|
|
|
|
cite_labels_valid_ = cloned_buffer_->d->cite_labels_valid_;
|
2019-01-04 12:01:03 +00:00
|
|
|
|
have_bibitems_ = cloned_buffer_->d->have_bibitems_;
|
2011-10-26 13:15:42 +00:00
|
|
|
|
unnamed = cloned_buffer_->d->unnamed;
|
2012-05-06 18:06:28 +00:00
|
|
|
|
internal_buffer = cloned_buffer_->d->internal_buffer;
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
layout_position = cloned_buffer_->d->layout_position;
|
2015-03-31 22:54:49 +00:00
|
|
|
|
preview_file_ = cloned_buffer_->d->preview_file_;
|
|
|
|
|
preview_format_ = cloned_buffer_->d->preview_format_;
|
2019-12-24 17:12:22 +00:00
|
|
|
|
require_fresh_start_ = cloned_buffer_->d->require_fresh_start_;
|
2003-09-09 11:24:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-12-18 22:51:06 +00:00
|
|
|
|
Buffer::Buffer(string const & file, bool readonly, Buffer const * cloned_buffer)
|
2010-01-25 18:39:08 +00:00
|
|
|
|
: d(new Impl(this, FileName(file), readonly, cloned_buffer))
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Buffer::Buffer()");
|
2009-12-19 15:46:45 +00:00
|
|
|
|
if (cloned_buffer) {
|
2010-01-05 13:16:55 +00:00
|
|
|
|
d->inset = new InsetText(*cloned_buffer->d->inset);
|
2009-12-19 15:46:45 +00:00
|
|
|
|
d->inset->setBuffer(*this);
|
2010-01-05 13:20:57 +00:00
|
|
|
|
// FIXME: optimize this loop somewhat, maybe by creating a new
|
|
|
|
|
// general recursive Inset::setId().
|
2010-01-05 13:16:55 +00:00
|
|
|
|
DocIterator it = doc_iterator_begin(this);
|
|
|
|
|
DocIterator cloned_it = doc_iterator_begin(cloned_buffer);
|
|
|
|
|
for (; !it.atEnd(); it.forwardPar(), cloned_it.forwardPar())
|
|
|
|
|
it.paragraph().setId(cloned_it.paragraph().id());
|
|
|
|
|
} else
|
2009-12-19 15:46:45 +00:00
|
|
|
|
d->inset = new InsetText(this);
|
2008-07-29 11:35:13 +00:00
|
|
|
|
d->inset->getText(0)->setMacrocontextPosition(par_iterator_begin());
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Buffer::~Buffer()
|
|
|
|
|
{
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Buffer::~Buffer()");
|
1999-09-27 18:44:28 +00:00
|
|
|
|
// here the buffer should take care that it is
|
|
|
|
|
// saved properly, before it goes into the void.
|
|
|
|
|
|
2007-12-01 12:17:00 +00:00
|
|
|
|
// GuiView already destroyed
|
2020-02-19 00:27:12 +00:00
|
|
|
|
d->gui_ = nullptr;
|
2007-12-01 12:17:00 +00:00
|
|
|
|
|
2009-09-08 01:29:07 +00:00
|
|
|
|
if (isInternal()) {
|
2008-10-13 20:40:58 +00:00
|
|
|
|
// No need to do additional cleanups for internal buffer.
|
|
|
|
|
delete d;
|
|
|
|
|
return;
|
|
|
|
|
}
|
2008-07-20 17:52:55 +00:00
|
|
|
|
|
2011-11-17 17:58:22 +00:00
|
|
|
|
if (isClone()) {
|
|
|
|
|
// this is in case of recursive includes: we won't try to delete
|
|
|
|
|
// ourselves as a child.
|
2011-11-20 18:49:05 +00:00
|
|
|
|
d->clone_list_->erase(this);
|
2011-11-17 17:58:22 +00:00
|
|
|
|
// loop over children
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & p : d->children_positions) {
|
|
|
|
|
Buffer * child = const_cast<Buffer *>(p.first);
|
2020-02-23 21:39:00 +00:00
|
|
|
|
if (d->clone_list_->erase(child))
|
|
|
|
|
delete child;
|
2011-11-17 17:58:22 +00:00
|
|
|
|
}
|
2011-11-20 18:49:12 +00:00
|
|
|
|
// if we're the master buffer, then we should get rid of the list
|
|
|
|
|
// of clones
|
|
|
|
|
if (!parent()) {
|
2013-04-25 21:27:10 +00:00
|
|
|
|
// If this is not empty, we have leaked something. Worse, one of the
|
|
|
|
|
// children still has a reference to this list. But we will try to
|
|
|
|
|
// continue, rather than shut down.
|
|
|
|
|
LATTEST(d->clone_list_->empty());
|
2020-02-23 21:39:00 +00:00
|
|
|
|
// The clone list itself is empty, but it's still referenced in our list
|
|
|
|
|
// of clones. So let's find it and remove it.
|
|
|
|
|
CloneStore::iterator it =
|
2011-11-20 18:49:12 +00:00
|
|
|
|
find(cloned_buffers.begin(), cloned_buffers.end(), d->clone_list_);
|
2013-04-25 21:27:10 +00:00
|
|
|
|
if (it == cloned_buffers.end()) {
|
|
|
|
|
// We will leak in this case, but it is safe to continue.
|
|
|
|
|
LATTEST(false);
|
2020-02-23 21:29:58 +00:00
|
|
|
|
} else
|
2013-04-25 21:27:10 +00:00
|
|
|
|
cloned_buffers.erase(it);
|
2011-11-20 18:49:12 +00:00
|
|
|
|
}
|
2011-11-17 17:58:22 +00:00
|
|
|
|
// FIXME Do we really need to do this right before we delete d?
|
|
|
|
|
// clear references to children in macro tables
|
|
|
|
|
d->children_positions.clear();
|
|
|
|
|
d->position_to_children.clear();
|
|
|
|
|
} else {
|
|
|
|
|
// loop over children
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & p : d->children_positions) {
|
|
|
|
|
Buffer * child = const_cast<Buffer *>(p.first);
|
2017-07-03 17:53:14 +00:00
|
|
|
|
if (theBufferList().isLoaded(child)) {
|
2017-09-27 16:05:58 +00:00
|
|
|
|
if (theBufferList().isOthersChild(this, child))
|
2020-02-19 00:27:12 +00:00
|
|
|
|
child->setParent(nullptr);
|
2017-09-27 16:05:58 +00:00
|
|
|
|
else
|
|
|
|
|
theBufferList().release(child);
|
2016-01-05 14:52:18 +00:00
|
|
|
|
}
|
2011-11-17 17:58:22 +00:00
|
|
|
|
}
|
2011-11-17 17:54:20 +00:00
|
|
|
|
|
2011-11-17 17:58:22 +00:00
|
|
|
|
if (!isClean()) {
|
|
|
|
|
docstring msg = _("LyX attempted to close a document that had unsaved changes!\n");
|
2017-03-31 11:38:35 +00:00
|
|
|
|
try {
|
|
|
|
|
msg += emergencyWrite();
|
|
|
|
|
} catch (...) {
|
2017-04-06 02:08:10 +00:00
|
|
|
|
msg += " " + _("Save failed! Document is lost.");
|
2017-03-31 11:38:35 +00:00
|
|
|
|
}
|
2011-11-17 17:58:22 +00:00
|
|
|
|
Alert::warning(_("Attempting to close changed document!"), msg);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// FIXME Do we really need to do this right before we delete d?
|
|
|
|
|
// clear references to children in macro tables
|
|
|
|
|
d->children_positions.clear();
|
|
|
|
|
d->position_to_children.clear();
|
2007-11-30 17:41:27 +00:00
|
|
|
|
|
2011-11-17 17:29:38 +00:00
|
|
|
|
if (!d->temppath.destroyDirectory()) {
|
2015-11-10 07:59:24 +00:00
|
|
|
|
LYXERR0(bformat(_("Could not remove the temporary directory %1$s"),
|
2011-11-17 17:29:38 +00:00
|
|
|
|
from_utf8(d->temppath.absFileName())));
|
|
|
|
|
}
|
2010-10-29 20:00:51 +00:00
|
|
|
|
removePreviews();
|
2011-11-17 17:29:38 +00:00
|
|
|
|
}
|
2007-12-01 09:51:45 +00:00
|
|
|
|
|
|
|
|
|
delete d;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-05-16 06:35:21 +00:00
|
|
|
|
Buffer * Buffer::cloneWithChildren() const
|
2009-12-13 21:00:46 +00:00
|
|
|
|
{
|
2011-05-26 13:20:11 +00:00
|
|
|
|
BufferMap bufmap;
|
2020-02-23 21:59:31 +00:00
|
|
|
|
cloned_buffers.emplace_back(new CloneList);
|
2020-02-23 21:39:00 +00:00
|
|
|
|
CloneList_ptr clones = cloned_buffers.back();
|
2011-11-20 18:49:05 +00:00
|
|
|
|
|
2018-05-16 06:35:21 +00:00
|
|
|
|
cloneWithChildren(bufmap, clones);
|
2011-11-17 17:58:22 +00:00
|
|
|
|
|
|
|
|
|
// make sure we got cloned
|
|
|
|
|
BufferMap::const_iterator bit = bufmap.find(this);
|
2020-02-19 00:27:12 +00:00
|
|
|
|
LASSERT(bit != bufmap.end(), return nullptr);
|
2011-11-17 17:58:22 +00:00
|
|
|
|
Buffer * cloned_buffer = bit->second;
|
|
|
|
|
|
|
|
|
|
return cloned_buffer;
|
2011-05-26 13:20:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-02-23 21:39:00 +00:00
|
|
|
|
void Buffer::cloneWithChildren(BufferMap & bufmap, CloneList_ptr clones) const
|
2011-05-26 13:20:11 +00:00
|
|
|
|
{
|
|
|
|
|
// have we already been cloned?
|
|
|
|
|
if (bufmap.find(this) != bufmap.end())
|
|
|
|
|
return;
|
|
|
|
|
|
2010-04-21 01:19:09 +00:00
|
|
|
|
Buffer * buffer_clone = new Buffer(fileName().absFileName(), false, this);
|
2013-03-11 14:01:32 +00:00
|
|
|
|
|
|
|
|
|
// The clone needs its own DocumentClass, since running updateBuffer() will
|
|
|
|
|
// modify it, and we would otherwise be sharing it with the original Buffer.
|
2014-02-09 10:15:57 +00:00
|
|
|
|
buffer_clone->params().makeDocumentClass(true);
|
2013-03-11 14:01:32 +00:00
|
|
|
|
ErrorList el;
|
|
|
|
|
cap::switchBetweenClasses(
|
|
|
|
|
params().documentClassPtr(), buffer_clone->params().documentClassPtr(),
|
|
|
|
|
static_cast<InsetText &>(buffer_clone->inset()), el);
|
|
|
|
|
|
2011-05-26 13:20:11 +00:00
|
|
|
|
bufmap[this] = buffer_clone;
|
2011-11-20 18:49:05 +00:00
|
|
|
|
clones->insert(buffer_clone);
|
|
|
|
|
buffer_clone->d->clone_list_ = clones;
|
2010-01-09 14:47:47 +00:00
|
|
|
|
buffer_clone->d->macro_lock = true;
|
|
|
|
|
buffer_clone->d->children_positions.clear();
|
2013-03-11 14:01:32 +00:00
|
|
|
|
|
2010-01-09 14:47:47 +00:00
|
|
|
|
// FIXME (Abdel 09/01/2010): this is too complicated. The whole children_positions and
|
|
|
|
|
// math macro caches need to be rethought and simplified.
|
|
|
|
|
// I am not sure wether we should handle Buffer cloning here or in BufferList.
|
|
|
|
|
// Right now BufferList knows nothing about buffer clones.
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & p : d->position_to_children) {
|
|
|
|
|
DocIterator dit = p.first.clone(buffer_clone);
|
2010-01-09 14:47:47 +00:00
|
|
|
|
dit.setBuffer(buffer_clone);
|
2017-11-05 02:15:01 +00:00
|
|
|
|
Buffer * child = const_cast<Buffer *>(p.second.buffer);
|
2011-05-26 13:20:11 +00:00
|
|
|
|
|
2011-12-03 23:20:18 +00:00
|
|
|
|
child->cloneWithChildren(bufmap, clones);
|
2011-05-26 13:36:05 +00:00
|
|
|
|
BufferMap::iterator const bit = bufmap.find(child);
|
|
|
|
|
LASSERT(bit != bufmap.end(), continue);
|
|
|
|
|
Buffer * child_clone = bit->second;
|
2011-05-26 13:20:11 +00:00
|
|
|
|
|
2010-01-09 14:47:47 +00:00
|
|
|
|
Inset * inset = dit.nextInset();
|
|
|
|
|
LASSERT(inset && inset->lyxCode() == INCLUDE_CODE, continue);
|
|
|
|
|
InsetInclude * inset_inc = static_cast<InsetInclude *>(inset);
|
|
|
|
|
inset_inc->setChildBuffer(child_clone);
|
|
|
|
|
child_clone->d->setParent(buffer_clone);
|
2011-05-26 14:16:52 +00:00
|
|
|
|
// FIXME Do we need to do this now, or can we wait until we run updateMacros()?
|
2010-01-09 14:47:47 +00:00
|
|
|
|
buffer_clone->setChild(dit, child_clone);
|
|
|
|
|
}
|
2010-01-12 15:25:04 +00:00
|
|
|
|
buffer_clone->d->macro_lock = false;
|
2009-12-13 21:00:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-12-03 23:20:18 +00:00
|
|
|
|
Buffer * Buffer::cloneBufferOnly() const {
|
2020-02-23 21:59:31 +00:00
|
|
|
|
cloned_buffers.emplace_back(new CloneList);
|
2020-02-23 21:39:00 +00:00
|
|
|
|
CloneList_ptr clones = cloned_buffers.back();
|
2011-12-03 23:20:18 +00:00
|
|
|
|
Buffer * buffer_clone = new Buffer(fileName().absFileName(), false, this);
|
2013-03-11 14:01:32 +00:00
|
|
|
|
|
|
|
|
|
// The clone needs its own DocumentClass, since running updateBuffer() will
|
|
|
|
|
// modify it, and we would otherwise be sharing it with the original Buffer.
|
2014-02-09 10:15:57 +00:00
|
|
|
|
buffer_clone->params().makeDocumentClass(true);
|
2013-03-11 14:01:32 +00:00
|
|
|
|
ErrorList el;
|
|
|
|
|
cap::switchBetweenClasses(
|
|
|
|
|
params().documentClassPtr(), buffer_clone->params().documentClassPtr(),
|
|
|
|
|
static_cast<InsetText &>(buffer_clone->inset()), el);
|
|
|
|
|
|
2011-12-04 02:35:38 +00:00
|
|
|
|
clones->insert(buffer_clone);
|
2011-12-04 03:05:08 +00:00
|
|
|
|
buffer_clone->d->clone_list_ = clones;
|
2013-03-11 14:01:32 +00:00
|
|
|
|
|
2011-12-03 23:20:18 +00:00
|
|
|
|
// we won't be cloning the children
|
|
|
|
|
buffer_clone->d->children_positions.clear();
|
|
|
|
|
return buffer_clone;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-12-18 22:59:59 +00:00
|
|
|
|
bool Buffer::isClone() const
|
|
|
|
|
{
|
|
|
|
|
return d->cloned_buffer_;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-01-08 02:03:54 +00:00
|
|
|
|
void Buffer::changed(bool update_metrics) const
|
2007-10-02 09:00:08 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
if (d->wa_)
|
2010-01-08 02:03:54 +00:00
|
|
|
|
d->wa_->redrawAll(update_metrics);
|
2007-10-02 09:00:08 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-02 14:39:48 +00:00
|
|
|
|
frontend::WorkAreaManager & Buffer::workAreaManager() const
|
2007-10-02 09:00:08 +00:00
|
|
|
|
{
|
2013-04-27 21:52:55 +00:00
|
|
|
|
LBUFERR(d->wa_);
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return *d->wa_;
|
2007-10-02 09:00:08 +00:00
|
|
|
|
}
|
|
|
|
|
|
2007-10-02 14:39:48 +00:00
|
|
|
|
|
2007-04-29 23:33:02 +00:00
|
|
|
|
Text & Buffer::text() const
|
2003-11-28 15:08:38 +00:00
|
|
|
|
{
|
2008-07-29 12:07:08 +00:00
|
|
|
|
return d->inset->text();
|
2004-03-18 12:53:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-04-29 13:39:47 +00:00
|
|
|
|
Inset & Buffer::inset() const
|
2004-03-18 12:53:43 +00:00
|
|
|
|
{
|
2008-07-29 11:35:13 +00:00
|
|
|
|
return *d->inset;
|
2003-11-28 15:08:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
|
BufferParams & Buffer::params()
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->params;
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BufferParams const & Buffer::params() const
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->params;
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2014-03-13 05:43:38 +00:00
|
|
|
|
BufferParams const & Buffer::masterParams() const
|
|
|
|
|
{
|
|
|
|
|
if (masterBuffer() == this)
|
|
|
|
|
return params();
|
|
|
|
|
|
|
|
|
|
BufferParams & mparams = const_cast<Buffer *>(masterBuffer())->params();
|
|
|
|
|
// Copy child authors to the params. We need those pointers.
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (Author const & a : params().authors())
|
|
|
|
|
mparams.authors().record(a);
|
2014-03-13 05:43:38 +00:00
|
|
|
|
return mparams;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2014-10-18 13:30:58 +00:00
|
|
|
|
double Buffer::fontScalingFactor() const
|
|
|
|
|
{
|
|
|
|
|
return isExporting() ? 75.0 * params().html_math_img_scale
|
2016-10-29 08:28:34 +00:00
|
|
|
|
: 0.01 * lyxrc.dpi * lyxrc.currentZoom * lyxrc.preview_scale_factor * params().display_pixel_ratio;
|
2014-10-18 13:30:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
|
ParagraphList & Buffer::paragraphs()
|
|
|
|
|
{
|
2004-03-18 12:53:43 +00:00
|
|
|
|
return text().paragraphs();
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ParagraphList const & Buffer::paragraphs() const
|
|
|
|
|
{
|
2004-03-18 12:53:43 +00:00
|
|
|
|
return text().paragraphs();
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
LyXVC & Buffer::lyxvc()
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->lyxvc;
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
LyXVC const & Buffer::lyxvc() const
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->lyxvc;
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-12-17 18:37:13 +00:00
|
|
|
|
string const Buffer::temppath() const
|
2003-09-09 09:47:59 +00:00
|
|
|
|
{
|
2010-04-21 01:19:09 +00:00
|
|
|
|
return d->temppath.absFileName();
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-11-16 00:12:21 +00:00
|
|
|
|
TexRow & Buffer::texrow()
|
|
|
|
|
{
|
|
|
|
|
return d->texrow;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
|
TexRow const & Buffer::texrow() const
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->texrow;
|
2003-09-09 09:47:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-11-30 17:41:27 +00:00
|
|
|
|
TocBackend & Buffer::tocBackend() const
|
2006-11-11 00:35:14 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->toc_backend;
|
2006-11-11 00:35:14 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-18 11:51:17 +00:00
|
|
|
|
Undo & Buffer::undo()
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->undo_;
|
2007-10-18 11:51:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-01-09 14:47:47 +00:00
|
|
|
|
void Buffer::setChild(DocIterator const & dit, Buffer * child)
|
|
|
|
|
{
|
|
|
|
|
d->children_positions[child] = dit;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
|
string Buffer::latexName(bool const no_path) const
|
2000-04-08 17:02:02 +00:00
|
|
|
|
{
|
2009-07-13 14:30:08 +00:00
|
|
|
|
FileName latex_name =
|
2010-01-25 18:39:08 +00:00
|
|
|
|
makeLatexName(d->exportFileName());
|
2007-11-28 18:07:09 +00:00
|
|
|
|
return no_path ? latex_name.onlyFileName()
|
2010-04-21 01:19:09 +00:00
|
|
|
|
: latex_name.absFileName();
|
2000-04-08 17:02:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
2001-03-14 14:54:30 +00:00
|
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
|
FileName Buffer::Impl::exportFileName() const
|
2009-07-13 14:30:08 +00:00
|
|
|
|
{
|
|
|
|
|
docstring const branch_suffix =
|
2010-04-21 01:19:35 +00:00
|
|
|
|
params.branchlist().getFileNameSuffix();
|
2009-07-13 14:30:08 +00:00
|
|
|
|
if (branch_suffix.empty())
|
2010-01-25 18:39:08 +00:00
|
|
|
|
return filename;
|
2009-07-13 14:30:08 +00:00
|
|
|
|
|
2019-02-19 18:51:11 +00:00
|
|
|
|
string const name = addExtension(filename.onlyFileNameWithoutExt()
|
|
|
|
|
+ to_utf8(branch_suffix), filename.extension());
|
2010-04-21 01:19:09 +00:00
|
|
|
|
FileName res(filename.onlyPath().absFileName() + "/" + name);
|
2009-07-13 14:30:08 +00:00
|
|
|
|
|
|
|
|
|
return res;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-11-01 22:17:22 +00:00
|
|
|
|
string Buffer::logName(LogType * type) const
|
2001-02-06 17:41:42 +00:00
|
|
|
|
{
|
2007-10-20 10:03:45 +00:00
|
|
|
|
string const filename = latexName(false);
|
2001-02-06 17:41:42 +00:00
|
|
|
|
|
2007-11-01 22:17:22 +00:00
|
|
|
|
if (filename.empty()) {
|
|
|
|
|
if (type)
|
|
|
|
|
*type = latexlog;
|
|
|
|
|
return string();
|
|
|
|
|
}
|
2001-02-06 17:41:42 +00:00
|
|
|
|
|
2004-02-25 12:00:53 +00:00
|
|
|
|
string const path = temppath();
|
2001-02-06 17:41:42 +00:00
|
|
|
|
|
2006-11-30 16:59:50 +00:00
|
|
|
|
FileName const fname(addName(temppath(),
|
2010-04-21 01:19:28 +00:00
|
|
|
|
onlyFileName(changeExtension(filename,
|
2006-11-30 16:59:50 +00:00
|
|
|
|
".log"))));
|
2009-05-28 08:59:25 +00:00
|
|
|
|
|
|
|
|
|
// FIXME: how do we know this is the name of the build log?
|
2006-11-30 16:59:50 +00:00
|
|
|
|
FileName const bname(
|
2010-04-21 01:19:28 +00:00
|
|
|
|
addName(path, onlyFileName(
|
2006-04-08 22:31:11 +00:00
|
|
|
|
changeExtension(filename,
|
2017-03-14 03:33:40 +00:00
|
|
|
|
theFormats().extension(params().bufferFormat()) + ".out"))));
|
2001-02-06 17:41:42 +00:00
|
|
|
|
|
2009-06-21 09:44:10 +00:00
|
|
|
|
// Also consider the master buffer log file
|
|
|
|
|
FileName masterfname = fname;
|
2015-02-20 15:47:27 +00:00
|
|
|
|
LogType mtype = latexlog;
|
2009-06-21 09:44:10 +00:00
|
|
|
|
if (masterBuffer() != this) {
|
|
|
|
|
string const mlogfile = masterBuffer()->logName(&mtype);
|
|
|
|
|
masterfname = FileName(mlogfile);
|
|
|
|
|
}
|
2001-02-09 15:54:30 +00:00
|
|
|
|
|
2009-06-21 09:44:10 +00:00
|
|
|
|
// If no Latex log or Build log is newer, show Build log
|
2007-10-18 19:29:32 +00:00
|
|
|
|
if (bname.exists() &&
|
2009-06-21 09:44:10 +00:00
|
|
|
|
((!fname.exists() && !masterfname.exists())
|
|
|
|
|
|| (fname.lastModified() < bname.lastModified()
|
|
|
|
|
&& masterfname.lastModified() < bname.lastModified()))) {
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Log name calculated as: " << bname);
|
2007-11-01 22:17:22 +00:00
|
|
|
|
if (type)
|
|
|
|
|
*type = buildlog;
|
2010-04-21 01:19:09 +00:00
|
|
|
|
return bname.absFileName();
|
2009-06-21 09:44:10 +00:00
|
|
|
|
// If we have a newer master file log or only a master log, show this
|
|
|
|
|
} else if (fname != masterfname
|
2009-07-11 06:50:18 +00:00
|
|
|
|
&& (!fname.exists() && (masterfname.exists()
|
|
|
|
|
|| fname.lastModified() < masterfname.lastModified()))) {
|
2009-06-21 09:44:10 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Log name calculated as: " << masterfname);
|
|
|
|
|
if (type)
|
|
|
|
|
*type = mtype;
|
2010-04-21 01:19:09 +00:00
|
|
|
|
return masterfname.absFileName();
|
2001-02-09 15:54:30 +00:00
|
|
|
|
}
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Log name calculated as: " << fname);
|
2007-11-01 22:17:22 +00:00
|
|
|
|
if (type)
|
|
|
|
|
*type = latexlog;
|
2010-04-21 01:19:09 +00:00
|
|
|
|
return fname.absFileName();
|
2001-02-06 17:41:42 +00:00
|
|
|
|
}
|
2000-04-08 17:02:02 +00:00
|
|
|
|
|
2001-03-14 14:54:30 +00:00
|
|
|
|
|
2009-10-25 14:00:29 +00:00
|
|
|
|
void Buffer::setReadonly(bool const flag)
|
2000-04-08 17:02:02 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
if (d->read_only != flag) {
|
|
|
|
|
d->read_only = flag;
|
2010-01-25 13:31:07 +00:00
|
|
|
|
changed(false);
|
2000-04-08 17:02:02 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-11-18 17:50:33 +00:00
|
|
|
|
void Buffer::setFileName(FileName const & fname)
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2012-11-13 20:52:38 +00:00
|
|
|
|
bool const changed = fname != d->filename;
|
2010-11-18 17:50:33 +00:00
|
|
|
|
d->filename = fname;
|
2017-03-05 19:12:07 +00:00
|
|
|
|
d->refreshFileMonitor();
|
2012-11-13 20:52:38 +00:00
|
|
|
|
if (changed)
|
|
|
|
|
lyxvc().file_found_hook(fname);
|
2007-11-30 17:46:49 +00:00
|
|
|
|
setReadonly(d->filename.isReadOnly());
|
2010-11-18 17:56:00 +00:00
|
|
|
|
saveCheckSum();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
updateTitles();
|
|
|
|
|
}
|
|
|
|
|
|
1999-12-10 00:07:59 +00:00
|
|
|
|
|
2007-04-26 11:30:54 +00:00
|
|
|
|
int Buffer::readHeader(Lexer & lex)
|
2003-03-12 02:39:12 +00:00
|
|
|
|
{
|
|
|
|
|
int unknown_tokens = 0;
|
2004-08-14 18:41:27 +00:00
|
|
|
|
int line = -1;
|
|
|
|
|
int begin_header_line = -1;
|
2003-03-12 02:39:12 +00:00
|
|
|
|
|
2005-03-27 13:31:04 +00:00
|
|
|
|
// Initialize parameters that may be/go lacking in header:
|
|
|
|
|
params().branchlist().clear();
|
2006-02-22 17:34:43 +00:00
|
|
|
|
params().preamble.erase();
|
2005-03-27 13:31:04 +00:00
|
|
|
|
params().options.erase();
|
2008-04-28 16:38:56 +00:00
|
|
|
|
params().master.erase();
|
2005-03-27 13:31:04 +00:00
|
|
|
|
params().float_placement.erase();
|
2018-05-10 18:15:11 +00:00
|
|
|
|
params().float_alignment.erase();
|
2005-03-27 13:31:04 +00:00
|
|
|
|
params().paperwidth.erase();
|
|
|
|
|
params().paperheight.erase();
|
|
|
|
|
params().leftmargin.erase();
|
|
|
|
|
params().rightmargin.erase();
|
|
|
|
|
params().topmargin.erase();
|
|
|
|
|
params().bottommargin.erase();
|
|
|
|
|
params().headheight.erase();
|
|
|
|
|
params().headsep.erase();
|
|
|
|
|
params().footskip.erase();
|
2008-02-19 05:24:48 +00:00
|
|
|
|
params().columnsep.erase();
|
2019-07-11 18:28:34 +00:00
|
|
|
|
params().font_roman_opts.erase();
|
|
|
|
|
params().font_sans_opts.erase();
|
|
|
|
|
params().font_typewriter_opts.erase();
|
2010-11-26 18:32:29 +00:00
|
|
|
|
params().fonts_cjk.erase();
|
2007-05-25 12:32:08 +00:00
|
|
|
|
params().listings_params.clear();
|
This is one of a series of patches that will merge the layout modules development in personal/branches/rgheck back into the tree.
Design goal: Allow the use of layout "modules", which are to LaTeX packages as layout files are to LaTeX document classes. Thus, one could have a module that defined certain character styles, environments, commands, or what have you, and include it in various documents, each of which uses a different document class, without having to modify the layout files themselves. For example, a theorems.module could be used with article.layout to provide support for theorem-type environments, without having to modify article.layout itself, and the same module could be used with book.layout, etc.
This patch adds the backend. The ModuleList class holds a list of the available modules, which are retrieved from lyxmodules.lst, itself generated by configure.py. There are two LFUNs available: modules-clear and module-add, which do the obvious thing; you can test by typing these into the minibuffer, along with the name of one of the available modules: URL (a CharStyle), Endnote (a Custom Inset), and---with the spaces---End To Foot (View>LaTeX and look at the user preamble), which are themselves in lib/layouts. There are some others, too, that allow theorems to be added to classes like article and book.
The GUI will come next.
Issues: (i) The configure.py script could be improved. It'd be nice, for example, if it tested for the presence of the LaTeX packages a particular module needs. But this would mean re-working the LaTeX script, and I don't know how to do that. Note that at present, the packages are ignored. This will change shortly. (ii) I've used std::string in LyXModule, following what seemed to be a precedent in TextClass. If some of these should be docstrings, please let me know, and I'll change them. (iii) There is at present no distinction between LaTeX and DocBook modules. Should there be? That is: Should there be modules that are available when the document class is a LaTeX class and others that are available only when it is DocBook? Or should there just be one set of modules? Each module can of course indicate for what it is suitable in its description.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@19893 a592a061-630c-0410-9148-cb99ea01b6c8
2007-08-29 17:59:49 +00:00
|
|
|
|
params().clearLayoutModules();
|
2008-07-27 17:46:27 +00:00
|
|
|
|
params().clearRemovedModules();
|
2010-01-07 10:01:26 +00:00
|
|
|
|
params().clearIncludedChildren();
|
2007-09-20 22:31:18 +00:00
|
|
|
|
params().pdfoptions().clear();
|
2009-04-16 07:29:01 +00:00
|
|
|
|
params().indiceslist().clear();
|
2009-05-18 10:53:02 +00:00
|
|
|
|
params().backgroundcolor = lyx::rgbFromHexName("#ffffff");
|
2010-04-05 20:31:10 +00:00
|
|
|
|
params().isbackgroundcolor = false;
|
2011-09-15 21:22:56 +00:00
|
|
|
|
params().fontcolor = RGBColor(0, 0, 0);
|
2010-04-02 23:39:36 +00:00
|
|
|
|
params().isfontcolor = false;
|
2011-09-15 21:22:56 +00:00
|
|
|
|
params().notefontcolor = RGBColor(0xCC, 0xCC, 0xCC);
|
2021-01-20 12:58:24 +00:00
|
|
|
|
params().isnotefontcolor = false;
|
2011-09-15 21:22:56 +00:00
|
|
|
|
params().boxbgcolor = RGBColor(0xFF, 0, 0);
|
2021-01-20 13:15:48 +00:00
|
|
|
|
params().isboxbgcolor = false;
|
2010-05-27 15:00:33 +00:00
|
|
|
|
params().html_latex_start.clear();
|
|
|
|
|
params().html_latex_end.clear();
|
|
|
|
|
params().html_math_img_scale = 1.0;
|
2010-05-25 11:36:00 +00:00
|
|
|
|
params().output_sync_macro.erase();
|
2016-09-25 10:37:40 +00:00
|
|
|
|
params().setLocalLayout(docstring(), false);
|
|
|
|
|
params().setLocalLayout(docstring(), true);
|
2017-01-14 08:59:33 +00:00
|
|
|
|
params().biblio_opts.erase();
|
|
|
|
|
params().biblatex_bibstyle.erase();
|
|
|
|
|
params().biblatex_citestyle.erase();
|
2017-02-04 11:02:00 +00:00
|
|
|
|
params().multibib.erase();
|
2019-06-03 12:33:08 +00:00
|
|
|
|
params().lineno_opts.clear();
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-03-12 15:15:21 +00:00
|
|
|
|
for (int i = 0; i < 4; ++i) {
|
|
|
|
|
params().user_defined_bullet(i) = ITEMIZE_DEFAULTS[i];
|
|
|
|
|
params().temp_bullet(i) = ITEMIZE_DEFAULTS[i];
|
|
|
|
|
}
|
2005-03-27 13:31:04 +00:00
|
|
|
|
|
2007-11-30 17:46:49 +00:00
|
|
|
|
ErrorList & errorList = d->errorLists["Parse"];
|
2006-08-13 16:16:43 +00:00
|
|
|
|
|
2003-03-12 02:39:12 +00:00
|
|
|
|
while (lex.isOK()) {
|
2008-04-05 12:23:27 +00:00
|
|
|
|
string token;
|
|
|
|
|
lex >> token;
|
2003-03-12 02:39:12 +00:00
|
|
|
|
|
|
|
|
|
if (token.empty())
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
if (token == "\\end_header")
|
|
|
|
|
break;
|
|
|
|
|
|
2004-08-14 18:41:27 +00:00
|
|
|
|
++line;
|
|
|
|
|
if (token == "\\begin_header") {
|
|
|
|
|
begin_header_line = line;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::PARSER, "Handling document header token: `"
|
|
|
|
|
<< token << '\'');
|
2003-03-12 02:39:12 +00:00
|
|
|
|
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
string const result =
|
2021-01-22 09:34:41 +00:00
|
|
|
|
params().readToken(lex, token, d->filename);
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
if (!result.empty()) {
|
|
|
|
|
if (token == "\\textclass") {
|
|
|
|
|
d->layout_position = result;
|
2003-03-12 02:39:12 +00:00
|
|
|
|
} else {
|
2003-03-29 10:29:38 +00:00
|
|
|
|
++unknown_tokens;
|
2006-09-11 08:54:10 +00:00
|
|
|
|
docstring const s = bformat(_("Unknown token: "
|
|
|
|
|
"%1$s %2$s\n"),
|
2006-10-21 00:16:43 +00:00
|
|
|
|
from_utf8(token),
|
2006-11-22 09:15:38 +00:00
|
|
|
|
lex.getDocString());
|
2016-10-09 19:34:12 +00:00
|
|
|
|
errorList.push_back(ErrorItem(_("Document header error"), s));
|
2003-03-12 02:39:12 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2004-08-14 18:41:27 +00:00
|
|
|
|
if (begin_header_line) {
|
2006-09-11 08:54:10 +00:00
|
|
|
|
docstring const s = _("\\begin_header is missing");
|
2016-10-09 19:34:12 +00:00
|
|
|
|
errorList.push_back(ErrorItem(_("Document header error"), s));
|
2004-08-14 18:41:27 +00:00
|
|
|
|
}
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2017-08-03 11:07:41 +00:00
|
|
|
|
params().shell_escape = theSession().shellescapeFiles().find(absFileName());
|
|
|
|
|
|
2021-01-14 06:15:19 +00:00
|
|
|
|
params().makeDocumentClass(isClone(), isInternal());
|
2006-07-15 22:43:37 +00:00
|
|
|
|
|
2003-03-12 02:39:12 +00:00
|
|
|
|
return unknown_tokens;
|
|
|
|
|
}
|
|
|
|
|
|
2001-08-30 07:13:15 +00:00
|
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
|
// Uwe C. Schroeder
|
|
|
|
|
// changed to be public and have one parameter
|
2009-02-04 09:52:10 +00:00
|
|
|
|
// Returns true if "\end_document" is not read (Asger)
|
2007-04-26 11:30:54 +00:00
|
|
|
|
bool Buffer::readDocument(Lexer & lex)
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
ErrorList & errorList = d->errorLists["Parse"];
|
2006-08-13 16:16:43 +00:00
|
|
|
|
errorList.clear();
|
2006-07-15 22:43:37 +00:00
|
|
|
|
|
2010-10-25 13:04:13 +00:00
|
|
|
|
// remove dummy empty par
|
|
|
|
|
paragraphs().clear();
|
|
|
|
|
|
2008-04-05 12:23:27 +00:00
|
|
|
|
if (!lex.checkFor("\\begin_document")) {
|
2006-09-11 08:54:10 +00:00
|
|
|
|
docstring const s = _("\\begin_document is missing");
|
2016-10-09 19:34:12 +00:00
|
|
|
|
errorList.push_back(ErrorItem(_("Document header error"), s));
|
2004-08-14 18:41:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
2005-11-29 15:08:35 +00:00
|
|
|
|
readHeader(lex);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
2014-03-29 22:52:36 +00:00
|
|
|
|
if (params().output_changes) {
|
2009-01-12 08:01:28 +00:00
|
|
|
|
bool xcolorulem = LaTeXFeatures::isAvailable("ulem") &&
|
2007-05-28 22:27:45 +00:00
|
|
|
|
LaTeXFeatures::isAvailable("xcolor");
|
|
|
|
|
|
2020-01-13 07:59:26 +00:00
|
|
|
|
if (!xcolorulem) {
|
2007-05-08 17:46:03 +00:00
|
|
|
|
Alert::warning(_("Changes not shown in LaTeX output"),
|
2007-05-28 22:27:45 +00:00
|
|
|
|
_("Changes will not be highlighted in LaTeX output, "
|
2020-01-13 07:59:26 +00:00
|
|
|
|
"because xcolor and ulem are not installed.\n"
|
2007-05-28 22:27:45 +00:00
|
|
|
|
"Please install both packages or redefine "
|
|
|
|
|
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
|
2007-05-08 17:46:03 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2013-11-23 15:51:16 +00:00
|
|
|
|
if (!parent() && !params().master.empty()) {
|
2008-04-28 16:38:56 +00:00
|
|
|
|
FileName const master_file = makeAbsPath(params().master,
|
|
|
|
|
onlyPath(absFileName()));
|
2010-04-21 01:19:31 +00:00
|
|
|
|
if (isLyXFileName(master_file.absFileName())) {
|
2011-12-03 22:15:11 +00:00
|
|
|
|
Buffer * master =
|
2009-01-07 07:53:28 +00:00
|
|
|
|
checkAndLoadLyXFile(master_file, true);
|
2009-02-14 14:21:59 +00:00
|
|
|
|
if (master) {
|
2009-04-03 11:07:53 +00:00
|
|
|
|
// necessary e.g. after a reload
|
|
|
|
|
// to re-register the child (bug 5873)
|
|
|
|
|
// FIXME: clean up updateMacros (here, only
|
|
|
|
|
// child registering is needed).
|
|
|
|
|
master->updateMacros();
|
2009-02-14 14:21:59 +00:00
|
|
|
|
// set master as master buffer, but only
|
|
|
|
|
// if we are a real child
|
|
|
|
|
if (master->isChild(this))
|
|
|
|
|
setParent(master);
|
|
|
|
|
// if the master is not fully loaded
|
|
|
|
|
// it is probably just loading this
|
|
|
|
|
// child. No warning needed then.
|
|
|
|
|
else if (master->isFullyLoaded())
|
|
|
|
|
LYXERR0("The master '"
|
|
|
|
|
<< params().master
|
2009-04-03 11:07:53 +00:00
|
|
|
|
<< "' assigned to this document ("
|
2009-02-14 14:21:59 +00:00
|
|
|
|
<< absFileName()
|
2009-04-03 11:07:53 +00:00
|
|
|
|
<< ") does not include "
|
2009-02-14 14:21:59 +00:00
|
|
|
|
"this document. Ignoring the master assignment.");
|
2018-06-01 08:17:18 +00:00
|
|
|
|
// If the master has just been created, un-hide it (#11162)
|
|
|
|
|
if (!master->fileName().exists())
|
|
|
|
|
lyx::dispatch(FuncRequest(LFUN_BUFFER_SWITCH,
|
|
|
|
|
master->absFileName()));
|
2009-02-14 14:21:59 +00:00
|
|
|
|
}
|
2008-04-28 16:38:56 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2009-08-17 08:28:57 +00:00
|
|
|
|
// assure we have a default index
|
|
|
|
|
params().indiceslist().addDefault(B_("Index"));
|
2008-04-28 16:38:56 +00:00
|
|
|
|
|
2007-09-11 14:23:12 +00:00
|
|
|
|
// read main text
|
2016-01-10 18:46:01 +00:00
|
|
|
|
if (FileName::isAbsolute(params().origin))
|
|
|
|
|
d->old_position = params().origin;
|
|
|
|
|
else
|
|
|
|
|
d->old_position = filePath();
|
2020-04-26 02:17:51 +00:00
|
|
|
|
|
|
|
|
|
if (!parent())
|
|
|
|
|
clearIncludeList();
|
|
|
|
|
|
2009-08-09 15:29:34 +00:00
|
|
|
|
bool const res = text().read(lex, errorList, d->inset);
|
Correctly load documents moved elsewhere after save.
It is now possible opening documents that where manually moved to a
different location after they were saved and still produce an output.
Indeed, (hopefully) all needed included files are now still found.
When the moved document is saved again, all paths are accordingly updated.
Of course, for this to work, a document has to be saved in Format 490,
at least.
As an example, after converting the user guide to the last format, it can
be copied anywhere and opened without the need of adapting the paths of
included files or moving them to a proper place.
There is one glitch I am aware of. When moving a child document (but not
the master) the path to the master is correctly updated but it won't be
recognized as such. This is because LyX checks that the parent actually
includes this document but, of course, being the parent document not
touched, it appears not including this child. Anyway, it will also occur
when saving the child to a different location and the user is warned
on the terminal about this fact when the moved child is loaded.
However, there is no problem when it is the master that has been moved.
2015-05-16 17:51:53 +00:00
|
|
|
|
d->old_position.clear();
|
2006-07-15 22:43:37 +00:00
|
|
|
|
|
2010-12-20 18:15:39 +00:00
|
|
|
|
// inform parent buffer about local macros
|
|
|
|
|
if (parent()) {
|
2010-12-21 14:52:18 +00:00
|
|
|
|
Buffer const * pbuf = parent();
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & m : usermacros)
|
|
|
|
|
pbuf->usermacros.insert(m);
|
2010-12-20 18:15:39 +00:00
|
|
|
|
}
|
|
|
|
|
usermacros.clear();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
updateMacros();
|
2011-05-07 11:57:08 +00:00
|
|
|
|
updateMacroInstances(InternalUpdate);
|
2005-11-29 15:08:35 +00:00
|
|
|
|
return res;
|
2000-03-01 14:13:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2013-04-14 17:45:36 +00:00
|
|
|
|
bool Buffer::importString(string const & format, docstring const & contents, ErrorList & errorList)
|
|
|
|
|
{
|
2017-03-14 03:33:40 +00:00
|
|
|
|
Format const * fmt = theFormats().getFormat(format);
|
2013-04-14 17:45:36 +00:00
|
|
|
|
if (!fmt)
|
|
|
|
|
return false;
|
|
|
|
|
// It is important to use the correct extension here, since some
|
|
|
|
|
// converters create a wrong output file otherwise (e.g. html2latex)
|
2018-02-08 10:31:23 +00:00
|
|
|
|
FileName const name = tempFileName("Buffer_importStringXXXXXX." + fmt->extension());
|
2013-04-14 17:45:36 +00:00
|
|
|
|
ofdocstream os(name.toFilesystemEncoding().c_str());
|
2014-03-04 22:04:27 +00:00
|
|
|
|
// Do not convert os implicitly to bool, since that is forbidden in C++11.
|
|
|
|
|
bool const success = !(os << contents).fail();
|
2013-04-14 17:45:36 +00:00
|
|
|
|
os.close();
|
|
|
|
|
|
|
|
|
|
bool converted = false;
|
|
|
|
|
if (success) {
|
|
|
|
|
params().compressed = false;
|
|
|
|
|
|
|
|
|
|
// remove dummy empty par
|
|
|
|
|
paragraphs().clear();
|
|
|
|
|
|
|
|
|
|
converted = importFile(format, name, errorList);
|
|
|
|
|
}
|
|
|
|
|
|
2018-02-08 10:31:23 +00:00
|
|
|
|
removeTempFile(name);
|
2013-04-14 17:45:36 +00:00
|
|
|
|
return converted;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::importFile(string const & format, FileName const & name, ErrorList & errorList)
|
|
|
|
|
{
|
|
|
|
|
if (!theConverters().isReachable(format, "lyx"))
|
|
|
|
|
return false;
|
|
|
|
|
|
2018-02-08 10:31:23 +00:00
|
|
|
|
FileName const lyx = tempFileName("Buffer_importFileXXXXXX.lyx");
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
Converters::RetVal const retval =
|
2020-02-19 00:27:12 +00:00
|
|
|
|
theConverters().convert(nullptr, name, lyx, name, format, "lyx", errorList);
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
if (retval == Converters::SUCCESS) {
|
2018-02-08 10:31:23 +00:00
|
|
|
|
bool const success = readFile(lyx) == ReadSuccess;
|
|
|
|
|
removeTempFile(lyx);
|
|
|
|
|
return success;
|
|
|
|
|
}
|
2013-04-14 17:45:36 +00:00
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
|
bool Buffer::readString(string const & s)
|
2007-01-13 18:29:50 +00:00
|
|
|
|
{
|
|
|
|
|
params().compressed = false;
|
|
|
|
|
|
2008-04-02 23:06:22 +00:00
|
|
|
|
Lexer lex;
|
2007-12-12 19:28:07 +00:00
|
|
|
|
istringstream is(s);
|
2007-01-13 18:29:50 +00:00
|
|
|
|
lex.setStream(is);
|
2014-06-09 11:05:50 +00:00
|
|
|
|
TempFile tempfile("Buffer_readStringXXXXXX.lyx");
|
|
|
|
|
FileName const fn = tempfile.name();
|
2010-10-25 12:18:09 +00:00
|
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
|
int file_format;
|
2013-04-01 10:24:24 +00:00
|
|
|
|
bool success = parseLyXFormat(lex, fn, file_format) == ReadSuccess;
|
2010-10-25 14:14:44 +00:00
|
|
|
|
|
2013-04-01 10:24:24 +00:00
|
|
|
|
if (success && file_format != LYX_FORMAT) {
|
2007-01-13 18:29:50 +00:00
|
|
|
|
// We need to call lyx2lyx, so write the input to a file
|
2010-10-25 14:14:44 +00:00
|
|
|
|
ofstream os(fn.toFilesystemEncoding().c_str());
|
2007-01-13 18:29:50 +00:00
|
|
|
|
os << s;
|
|
|
|
|
os.close();
|
2010-10-25 14:14:44 +00:00
|
|
|
|
// lyxvc in readFile
|
2013-04-01 10:24:24 +00:00
|
|
|
|
if (readFile(fn) != ReadSuccess)
|
|
|
|
|
success = false;
|
2007-01-13 18:29:50 +00:00
|
|
|
|
}
|
2013-04-21 19:32:53 +00:00
|
|
|
|
else if (success)
|
2013-04-01 10:24:24 +00:00
|
|
|
|
if (readDocument(lex))
|
|
|
|
|
success = false;
|
|
|
|
|
return success;
|
2007-01-13 18:29:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-10-25 13:04:13 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::readFile(FileName const & fn)
|
2003-03-12 05:46:35 +00:00
|
|
|
|
{
|
2008-04-02 23:06:22 +00:00
|
|
|
|
Lexer lex;
|
2020-10-31 13:09:46 +00:00
|
|
|
|
if (!lex.setFile(fn)) {
|
2011-05-16 18:17:39 +00:00
|
|
|
|
Alert::error(_("File Not Found"),
|
2011-12-03 22:15:11 +00:00
|
|
|
|
bformat(_("Unable to open file `%1$s'."),
|
2011-05-16 18:17:39 +00:00
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
|
return ReadFileNotFound;
|
|
|
|
|
}
|
2003-04-24 23:19:41 +00:00
|
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
|
int file_format;
|
|
|
|
|
ReadStatus const ret_plf = parseLyXFormat(lex, fn, file_format);
|
|
|
|
|
if (ret_plf != ReadSuccess)
|
|
|
|
|
return ret_plf;
|
|
|
|
|
|
|
|
|
|
if (file_format != LYX_FORMAT) {
|
|
|
|
|
FileName tmpFile;
|
2015-11-10 17:39:24 +00:00
|
|
|
|
ReadStatus ret_clf = convertLyXFormat(fn, tmpFile, file_format);
|
2017-05-06 07:21:58 +00:00
|
|
|
|
if (ret_clf != ReadSuccess)
|
|
|
|
|
return ret_clf;
|
|
|
|
|
ret_clf = readFile(tmpFile);
|
2015-11-10 17:39:24 +00:00
|
|
|
|
if (ret_clf == ReadSuccess) {
|
2017-05-06 07:21:58 +00:00
|
|
|
|
d->file_format = file_format;
|
|
|
|
|
d->need_format_backup = true;
|
2015-11-10 17:39:24 +00:00
|
|
|
|
}
|
2017-05-06 07:21:58 +00:00
|
|
|
|
return ret_clf;
|
2010-10-25 14:14:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-12-03 22:15:11 +00:00
|
|
|
|
// FIXME: InsetInfo needs to know whether the file is under VCS
|
2010-11-05 21:23:26 +00:00
|
|
|
|
// during the parse process, so this has to be done before.
|
2010-11-05 21:21:01 +00:00
|
|
|
|
lyxvc().file_found_hook(d->filename);
|
2010-11-01 22:20:54 +00:00
|
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
|
if (readDocument(lex)) {
|
|
|
|
|
Alert::error(_("Document format failure"),
|
|
|
|
|
bformat(_("%1$s ended unexpectedly, which means"
|
|
|
|
|
" that it is probably corrupted."),
|
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
|
return ReadDocumentFailure;
|
|
|
|
|
}
|
2010-10-25 13:04:13 +00:00
|
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
|
d->file_fully_loaded = true;
|
2010-11-26 02:01:04 +00:00
|
|
|
|
d->read_only = !d->filename.isWritable();
|
2017-03-14 03:33:40 +00:00
|
|
|
|
params().compressed = theFormats().isZippedFile(d->filename);
|
2010-10-28 22:12:29 +00:00
|
|
|
|
saveCheckSum();
|
2010-10-25 13:04:13 +00:00
|
|
|
|
return ReadSuccess;
|
2003-03-12 05:46:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
|
bool Buffer::isFullyLoaded() const
|
2003-10-22 13:15:18 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->file_fully_loaded;
|
2003-10-22 13:15:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
|
void Buffer::setFullyLoaded(bool value)
|
2003-10-22 14:40:24 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
d->file_fully_loaded = value;
|
2003-10-22 14:40:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2019-12-24 17:12:22 +00:00
|
|
|
|
bool Buffer::freshStartRequired() const
|
2015-05-03 05:22:03 +00:00
|
|
|
|
{
|
2019-12-24 17:12:22 +00:00
|
|
|
|
return d->require_fresh_start_;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Buffer::requireFreshStart(bool const b) const
|
|
|
|
|
{
|
|
|
|
|
d->require_fresh_start_ = b;
|
2015-05-03 05:22:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-07-12 12:43:11 +00:00
|
|
|
|
PreviewLoader * Buffer::loader() const
|
2010-10-29 20:00:51 +00:00
|
|
|
|
{
|
2011-10-29 20:14:36 +00:00
|
|
|
|
if (!isExporting() && lyxrc.preview == LyXRC::PREVIEW_OFF)
|
2020-02-19 00:27:12 +00:00
|
|
|
|
return nullptr;
|
2011-07-12 12:43:11 +00:00
|
|
|
|
if (!d->preview_loader_)
|
|
|
|
|
d->preview_loader_ = new PreviewLoader(*this);
|
|
|
|
|
return d->preview_loader_;
|
2010-10-29 20:00:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::removePreviews() const
|
|
|
|
|
{
|
2011-07-12 12:43:11 +00:00
|
|
|
|
delete d->preview_loader_;
|
2020-02-19 00:27:12 +00:00
|
|
|
|
d->preview_loader_ = nullptr;
|
2011-07-12 12:43:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::updatePreviews() const
|
|
|
|
|
{
|
|
|
|
|
PreviewLoader * ploader = loader();
|
|
|
|
|
if (!ploader)
|
|
|
|
|
return;
|
|
|
|
|
|
2020-11-27 18:10:52 +00:00
|
|
|
|
InsetIterator it = begin(*d->inset);
|
|
|
|
|
InsetIterator const itend = end(*d->inset);
|
|
|
|
|
for (; it != itend; ++it)
|
2011-07-12 12:43:11 +00:00
|
|
|
|
it->addPreview(it, *ploader);
|
|
|
|
|
|
|
|
|
|
ploader->startLoading();
|
2010-10-29 20:00:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-10-25 13:29:50 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::parseLyXFormat(Lexer & lex,
|
|
|
|
|
FileName const & fn, int & file_format) const
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2010-10-25 13:29:50 +00:00
|
|
|
|
if(!lex.checkFor("\\lyxformat")) {
|
2006-09-11 08:54:10 +00:00
|
|
|
|
Alert::error(_("Document format failure"),
|
2010-10-25 13:29:50 +00:00
|
|
|
|
bformat(_("%1$s is not a readable LyX document."),
|
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
|
return ReadNoLyXFormat;
|
2011-12-03 22:15:11 +00:00
|
|
|
|
}
|
2003-03-29 11:34:53 +00:00
|
|
|
|
|
2008-04-05 12:23:27 +00:00
|
|
|
|
string tmp_format;
|
|
|
|
|
lex >> tmp_format;
|
2010-10-25 13:29:50 +00:00
|
|
|
|
|
|
|
|
|
// LyX formats 217 and earlier were written as 2.17. This corresponds
|
|
|
|
|
// to files from LyX versions < 1.1.6.3. We just remove the dot in
|
|
|
|
|
// these cases. See also: www.lyx.org/trac/changeset/1313.
|
2008-04-05 12:23:27 +00:00
|
|
|
|
size_t dot = tmp_format.find_first_of(".,");
|
2003-03-29 11:34:53 +00:00
|
|
|
|
if (dot != string::npos)
|
2010-10-25 13:29:50 +00:00
|
|
|
|
tmp_format.erase(dot, 1);
|
|
|
|
|
|
|
|
|
|
file_format = convert<int>(tmp_format);
|
|
|
|
|
return ReadSuccess;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-12-03 22:15:11 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::convertLyXFormat(FileName const & fn,
|
2010-10-25 13:55:39 +00:00
|
|
|
|
FileName & tmpfile, int from_format)
|
|
|
|
|
{
|
2014-06-09 11:05:50 +00:00
|
|
|
|
TempFile tempfile("Buffer_convertLyXFormatXXXXXX.lyx");
|
|
|
|
|
tempfile.setAutoRemove(false);
|
|
|
|
|
tmpfile = tempfile.name();
|
2010-10-25 13:55:39 +00:00
|
|
|
|
if(tmpfile.empty()) {
|
|
|
|
|
Alert::error(_("Conversion failed"),
|
|
|
|
|
bformat(_("%1$s is from a different"
|
|
|
|
|
" version of LyX, but a temporary"
|
|
|
|
|
" file for converting it could"
|
|
|
|
|
" not be created."),
|
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
|
return LyX2LyXNoTempFile;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
FileName const lyx2lyx = libFileSearch("lyx2lyx", "lyx2lyx");
|
|
|
|
|
if (lyx2lyx.empty()) {
|
|
|
|
|
Alert::error(_("Conversion script not found"),
|
|
|
|
|
bformat(_("%1$s is from a different"
|
|
|
|
|
" version of LyX, but the"
|
|
|
|
|
" conversion script lyx2lyx"
|
|
|
|
|
" could not be found."),
|
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
|
return LyX2LyXNotFound;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Run lyx2lyx:
|
|
|
|
|
// $python$ "$lyx2lyx$" -t $LYX_FORMAT$ -o "$tempfile$" "$filetoread$"
|
|
|
|
|
ostringstream command;
|
|
|
|
|
command << os::python()
|
|
|
|
|
<< ' ' << quoteName(lyx2lyx.toFilesystemEncoding())
|
|
|
|
|
<< " -t " << convert<string>(LYX_FORMAT)
|
2017-03-28 10:05:52 +00:00
|
|
|
|
<< " -o " << quoteName(tmpfile.toSafeFilesystemEncoding())
|
2010-10-25 13:55:39 +00:00
|
|
|
|
<< ' ' << quoteName(fn.toSafeFilesystemEncoding());
|
|
|
|
|
string const command_str = command.str();
|
|
|
|
|
|
|
|
|
|
LYXERR(Debug::INFO, "Running '" << command_str << '\'');
|
|
|
|
|
|
|
|
|
|
cmd_ret const ret = runCommand(command_str);
|
2020-09-10 21:22:55 +00:00
|
|
|
|
if (!ret.valid) {
|
2010-10-25 13:55:39 +00:00
|
|
|
|
if (from_format < LYX_FORMAT) {
|
|
|
|
|
Alert::error(_("Conversion script failed"),
|
|
|
|
|
bformat(_("%1$s is from an older version"
|
2010-11-25 14:55:21 +00:00
|
|
|
|
" of LyX and the lyx2lyx script"
|
2010-10-25 13:55:39 +00:00
|
|
|
|
" failed to convert it."),
|
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
|
return LyX2LyXOlderFormat;
|
|
|
|
|
} else {
|
|
|
|
|
Alert::error(_("Conversion script failed"),
|
2010-11-25 14:55:21 +00:00
|
|
|
|
bformat(_("%1$s is from a newer version"
|
2010-11-25 14:37:10 +00:00
|
|
|
|
" of LyX and the lyx2lyx script"
|
2010-10-25 13:55:39 +00:00
|
|
|
|
" failed to convert it."),
|
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
|
return LyX2LyXNewerFormat;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return ReadSuccess;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2015-11-13 22:12:52 +00:00
|
|
|
|
FileName Buffer::getBackupName() const {
|
2018-05-09 07:53:28 +00:00
|
|
|
|
map<int, string> const file_formats = {
|
|
|
|
|
{544, "23"},
|
|
|
|
|
{508, "22"},
|
|
|
|
|
{474, "21"},
|
|
|
|
|
{413, "20"},
|
|
|
|
|
{345, "16"},
|
|
|
|
|
{276, "15"},
|
|
|
|
|
{245, "14"},
|
|
|
|
|
{221, "13"},
|
|
|
|
|
{220, "12"},
|
|
|
|
|
{218, "1163"},
|
|
|
|
|
{217, "116"},
|
|
|
|
|
{216, "115"},
|
|
|
|
|
{215, "11"},
|
|
|
|
|
{210, "010"},
|
|
|
|
|
{200, "006"}
|
|
|
|
|
};
|
2015-11-10 17:39:24 +00:00
|
|
|
|
FileName const & fn = fileName();
|
|
|
|
|
string const fname = fn.onlyFileNameWithoutExt();
|
2016-09-09 08:37:42 +00:00
|
|
|
|
string const fext = fn.extension() + "~";
|
2015-11-10 17:39:24 +00:00
|
|
|
|
string const fpath = lyxrc.backupdir_path.empty() ?
|
|
|
|
|
fn.onlyPath().absFileName() :
|
|
|
|
|
lyxrc.backupdir_path;
|
2018-05-09 07:53:28 +00:00
|
|
|
|
string backup_suffix;
|
|
|
|
|
// If file format is from a stable series use version instead of file format
|
|
|
|
|
auto const it = file_formats.find(d->file_format);
|
|
|
|
|
if (it != file_formats.end())
|
|
|
|
|
backup_suffix = "-lyx" + it->second;
|
|
|
|
|
else
|
|
|
|
|
backup_suffix = "-lyxformat-" + convert<string>(d->file_format);
|
|
|
|
|
string const backname = fname + backup_suffix;
|
2015-11-10 17:39:24 +00:00
|
|
|
|
FileName backup(addName(fpath, addExtension(backname, fext)));
|
|
|
|
|
|
|
|
|
|
// limit recursion, just in case
|
|
|
|
|
int v = 1;
|
|
|
|
|
unsigned long orig_checksum = 0;
|
|
|
|
|
while (backup.exists() && v < 100) {
|
|
|
|
|
if (orig_checksum == 0)
|
|
|
|
|
orig_checksum = fn.checksum();
|
|
|
|
|
unsigned long new_checksum = backup.checksum();
|
|
|
|
|
if (orig_checksum == new_checksum) {
|
|
|
|
|
LYXERR(Debug::FILES, "Not backing up " << fn <<
|
|
|
|
|
"since " << backup << "has the same checksum.");
|
|
|
|
|
// a bit of a hack, but we have to check this anyway
|
|
|
|
|
// below, and setting this is simpler than introducing
|
|
|
|
|
// a special boolean for this purpose.
|
|
|
|
|
v = 1000;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2015-11-15 17:24:37 +00:00
|
|
|
|
string const newbackname = backname + "-" + convert<string>(v);
|
2015-11-10 17:39:24 +00:00
|
|
|
|
backup.set(addName(fpath, addExtension(newbackname, fext)));
|
|
|
|
|
v++;
|
|
|
|
|
}
|
2015-11-13 22:12:52 +00:00
|
|
|
|
return v < 100 ? backup : FileName();
|
2015-11-10 17:39:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-02-09 17:06:40 +00:00
|
|
|
|
// Should probably be moved to somewhere else: BufferView? GuiView?
|
2000-03-20 14:49:54 +00:00
|
|
|
|
bool Buffer::save() const
|
2000-02-22 00:36:17 +00:00
|
|
|
|
{
|
2010-11-26 04:17:20 +00:00
|
|
|
|
docstring const file = makeDisplayPath(absFileName(), 20);
|
|
|
|
|
d->filename.refresh();
|
|
|
|
|
|
|
|
|
|
// check the read-only status before moving the file as a backup
|
|
|
|
|
if (d->filename.exists()) {
|
|
|
|
|
bool const read_only = !d->filename.isWritable();
|
|
|
|
|
if (read_only) {
|
|
|
|
|
Alert::warning(_("File is read-only"),
|
|
|
|
|
bformat(_("The file %1$s cannot be written because it "
|
|
|
|
|
"is marked as read-only."), file));
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2010-03-26 23:15:37 +00:00
|
|
|
|
// ask if the disk file has been externally modified (use checksum method)
|
2017-03-05 19:12:07 +00:00
|
|
|
|
if (fileName().exists() && isChecksumModified()) {
|
2011-12-03 22:15:11 +00:00
|
|
|
|
docstring text =
|
2010-11-26 04:17:20 +00:00
|
|
|
|
bformat(_("Document %1$s has been externally modified. "
|
|
|
|
|
"Are you sure you want to overwrite this file?"), file);
|
2010-03-26 23:15:37 +00:00
|
|
|
|
int const ret = Alert::prompt(_("Overwrite modified file?"),
|
|
|
|
|
text, 1, 1, _("&Overwrite"), _("&Cancel"));
|
|
|
|
|
if (ret == 1)
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2000-02-22 00:36:17 +00:00
|
|
|
|
// We don't need autosaves in the immediate future. (Asger)
|
|
|
|
|
resetAutosaveTimers();
|
|
|
|
|
|
2014-06-03 14:42:07 +00:00
|
|
|
|
// if the file does not yet exist, none of the backup activity
|
|
|
|
|
// that follows is necessary
|
2017-02-28 21:24:51 +00:00
|
|
|
|
if (!fileName().exists()) {
|
2015-05-17 20:11:39 +00:00
|
|
|
|
if (!writeFile(fileName()))
|
2017-02-28 21:24:51 +00:00
|
|
|
|
return false;
|
|
|
|
|
markClean();
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2014-06-03 14:42:07 +00:00
|
|
|
|
|
|
|
|
|
// we first write the file to a new name, then move it to its
|
|
|
|
|
// proper location once that has been done successfully. that
|
|
|
|
|
// way we preserve the original file if something goes wrong.
|
2014-11-03 15:07:17 +00:00
|
|
|
|
string const justname = fileName().onlyFileNameWithoutExt();
|
2021-01-15 10:36:14 +00:00
|
|
|
|
auto tempfile = lyx::make_unique<TempFile>(fileName().onlyPath(),
|
2016-06-02 22:49:36 +00:00
|
|
|
|
justname + "-XXXXXX.lyx");
|
2014-08-08 16:02:20 +00:00
|
|
|
|
bool const symlink = fileName().isSymLink();
|
|
|
|
|
if (!symlink)
|
2014-11-03 15:07:17 +00:00
|
|
|
|
tempfile->setAutoRemove(false);
|
2014-06-03 14:42:07 +00:00
|
|
|
|
|
2014-11-03 15:07:17 +00:00
|
|
|
|
FileName savefile(tempfile->name());
|
2014-06-03 14:42:07 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Saving to " << savefile.absFileName());
|
2018-12-07 22:10:46 +00:00
|
|
|
|
if (!savefile.clonePermissions(fileName()))
|
|
|
|
|
LYXERR0("Failed to clone the permission from " << fileName().absFileName() << " to " << savefile.absFileName());
|
|
|
|
|
|
2014-06-03 14:42:07 +00:00
|
|
|
|
if (!writeFile(savefile))
|
|
|
|
|
return false;
|
2007-01-14 17:31:15 +00:00
|
|
|
|
|
2014-06-03 14:42:07 +00:00
|
|
|
|
// we will set this to false if we fail
|
|
|
|
|
bool made_backup = true;
|
2014-08-08 16:02:20 +00:00
|
|
|
|
|
2015-11-10 17:39:24 +00:00
|
|
|
|
FileName backupName;
|
2015-11-15 17:24:37 +00:00
|
|
|
|
bool const needBackup = lyxrc.make_backup || d->need_format_backup;
|
|
|
|
|
if (needBackup) {
|
2015-11-13 22:12:52 +00:00
|
|
|
|
if (d->need_format_backup)
|
|
|
|
|
backupName = getBackupName();
|
2015-11-10 17:39:24 +00:00
|
|
|
|
|
|
|
|
|
// If we for some reason failed to find a backup name in case of
|
|
|
|
|
// a format change, this will still set one. It's the best we can
|
|
|
|
|
// do in this case.
|
|
|
|
|
if (backupName.empty()) {
|
|
|
|
|
backupName.set(fileName().absFileName() + "~");
|
|
|
|
|
if (!lyxrc.backupdir_path.empty()) {
|
|
|
|
|
string const mangledName =
|
|
|
|
|
subst(subst(backupName.absFileName(), '/', '!'), ':', '!');
|
|
|
|
|
backupName.set(addName(lyxrc.backupdir_path, mangledName));
|
|
|
|
|
}
|
2007-10-21 17:57:13 +00:00
|
|
|
|
}
|
2011-01-28 12:39:50 +00:00
|
|
|
|
|
2014-08-25 15:14:27 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Backing up original file to " <<
|
|
|
|
|
backupName.absFileName());
|
2011-01-28 12:39:50 +00:00
|
|
|
|
// Except file is symlink do not copy because of #6587.
|
|
|
|
|
// Hard links have bad luck.
|
2014-06-09 09:08:24 +00:00
|
|
|
|
made_backup = symlink ?
|
2014-06-03 14:42:07 +00:00
|
|
|
|
fileName().copyTo(backupName):
|
|
|
|
|
fileName().moveTo(backupName);
|
2011-01-28 12:39:50 +00:00
|
|
|
|
|
2014-06-03 14:42:07 +00:00
|
|
|
|
if (!made_backup) {
|
2006-09-11 08:54:10 +00:00
|
|
|
|
Alert::error(_("Backup failure"),
|
2007-05-28 22:27:45 +00:00
|
|
|
|
bformat(_("Cannot create backup file %1$s.\n"
|
2010-12-29 16:57:04 +00:00
|
|
|
|
"Please check whether the directory exists and is writable."),
|
2010-04-21 01:19:09 +00:00
|
|
|
|
from_utf8(backupName.absFileName())));
|
2007-11-15 20:04:51 +00:00
|
|
|
|
//LYXERR(Debug::DEBUG, "Fs error: " << fe.what());
|
2015-11-10 17:39:24 +00:00
|
|
|
|
} else if (d->need_format_backup) {
|
|
|
|
|
// the original file has been backed up successfully, so we
|
|
|
|
|
// will not need to do that again
|
|
|
|
|
d->need_format_backup = false;
|
2000-02-22 00:36:17 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2014-06-09 09:08:24 +00:00
|
|
|
|
|
2014-11-03 15:07:17 +00:00
|
|
|
|
// Destroy tempfile since it keeps the file locked on windows (bug 9234)
|
|
|
|
|
// Only do this if tempfile is not in autoremove mode
|
|
|
|
|
if (!symlink)
|
|
|
|
|
tempfile.reset();
|
2014-06-09 09:08:24 +00:00
|
|
|
|
// If we have no symlink, we can simply rename the temp file.
|
|
|
|
|
// Otherwise, we need to copy it so the symlink stays intact.
|
2014-08-08 16:02:20 +00:00
|
|
|
|
if (made_backup && symlink ? savefile.copyTo(fileName(), true) :
|
|
|
|
|
savefile.moveTo(fileName()))
|
|
|
|
|
{
|
2014-06-09 11:17:40 +00:00
|
|
|
|
// saveCheckSum() was already called by writeFile(), but the
|
|
|
|
|
// time stamp is invalidated by copying/moving
|
|
|
|
|
saveCheckSum();
|
2002-08-04 23:11:50 +00:00
|
|
|
|
markClean();
|
2020-10-01 10:34:15 +00:00
|
|
|
|
// the file associated with this buffer is now in the current format
|
|
|
|
|
d->file_format = LYX_FORMAT;
|
2007-01-14 17:31:15 +00:00
|
|
|
|
return true;
|
2000-02-22 00:36:17 +00:00
|
|
|
|
}
|
2014-08-08 16:02:20 +00:00
|
|
|
|
// else we saved the file, but failed to move it to the right location.
|
|
|
|
|
|
2015-11-15 17:24:37 +00:00
|
|
|
|
if (needBackup && made_backup && !symlink) {
|
|
|
|
|
// the original file was moved to some new location, so it will look
|
2014-08-08 16:02:20 +00:00
|
|
|
|
// to the user as if it was deleted. (see bug #9234.) we could try
|
|
|
|
|
// to restore it, but that would basically mean trying to do again
|
|
|
|
|
// what we just failed to do. better to leave things as they are.
|
|
|
|
|
Alert::error(_("Write failure"),
|
2015-05-17 15:27:12 +00:00
|
|
|
|
bformat(_("The file has successfully been saved as:\n %1$s.\n"
|
|
|
|
|
"But LyX could not move it to:\n %2$s.\n"
|
|
|
|
|
"Your original file has been backed up to:\n %3$s"),
|
|
|
|
|
from_utf8(savefile.absFileName()),
|
|
|
|
|
from_utf8(fileName().absFileName()),
|
|
|
|
|
from_utf8(backupName.absFileName())));
|
2014-08-08 16:02:20 +00:00
|
|
|
|
} else {
|
|
|
|
|
// either we did not try to make a backup, or else we tried and failed,
|
|
|
|
|
// or else the original file was a symlink, in which case it was copied,
|
|
|
|
|
// not moved. so the original file is intact.
|
|
|
|
|
Alert::error(_("Write failure"),
|
|
|
|
|
bformat(_("Cannot move saved file to:\n %1$s.\n"
|
|
|
|
|
"But the file has successfully been saved as:\n %2$s."),
|
|
|
|
|
from_utf8(fileName().absFileName()),
|
|
|
|
|
from_utf8(savefile.absFileName())));
|
|
|
|
|
}
|
2014-06-03 14:42:07 +00:00
|
|
|
|
return false;
|
2000-02-22 00:36:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2006-11-30 16:59:50 +00:00
|
|
|
|
bool Buffer::writeFile(FileName const & fname) const
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
if (d->read_only && fname == d->filename)
|
1999-09-27 18:44:28 +00:00
|
|
|
|
return false;
|
|
|
|
|
|
2004-07-25 00:04:42 +00:00
|
|
|
|
bool retval = false;
|
2003-07-27 23:40:08 +00:00
|
|
|
|
|
2007-12-04 22:21:25 +00:00
|
|
|
|
docstring const str = bformat(_("Saving document %1$s..."),
|
2010-04-21 01:19:09 +00:00
|
|
|
|
makeDisplayPath(fname.absFileName()));
|
2007-12-04 22:21:25 +00:00
|
|
|
|
message(str);
|
|
|
|
|
|
2010-02-22 19:34:25 +00:00
|
|
|
|
string const encoded_fname = fname.toSafeFilesystemEncoding(os::CREATE);
|
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
|
if (params().compressed) {
|
2010-02-22 19:34:25 +00:00
|
|
|
|
gz::ogzstream ofs(encoded_fname.c_str(), ios::out|ios::trunc);
|
2007-12-04 22:21:25 +00:00
|
|
|
|
retval = ofs && write(ofs);
|
2003-07-28 14:40:29 +00:00
|
|
|
|
} else {
|
2010-02-22 19:34:25 +00:00
|
|
|
|
ofstream ofs(encoded_fname.c_str(), ios::out|ios::trunc);
|
2007-12-04 22:21:25 +00:00
|
|
|
|
retval = ofs && write(ofs);
|
|
|
|
|
}
|
2003-07-27 23:40:08 +00:00
|
|
|
|
|
2007-12-04 22:21:25 +00:00
|
|
|
|
if (!retval) {
|
2008-03-16 13:05:42 +00:00
|
|
|
|
message(str + _(" could not write file!"));
|
2007-12-04 22:21:25 +00:00
|
|
|
|
return false;
|
2003-07-28 14:40:29 +00:00
|
|
|
|
}
|
2000-09-14 17:53:12 +00:00
|
|
|
|
|
2010-03-26 23:15:37 +00:00
|
|
|
|
// see bug 6587
|
|
|
|
|
// removeAutosaveFile();
|
2008-01-06 16:21:57 +00:00
|
|
|
|
|
2010-10-28 22:12:29 +00:00
|
|
|
|
saveCheckSum();
|
2007-12-04 22:21:25 +00:00
|
|
|
|
message(str + _(" done."));
|
|
|
|
|
|
|
|
|
|
return true;
|
2003-07-27 23:40:08 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-10-21 08:35:40 +00:00
|
|
|
|
docstring Buffer::emergencyWrite() const
|
2009-08-15 16:34:54 +00:00
|
|
|
|
{
|
|
|
|
|
// No need to save if the buffer has not changed.
|
|
|
|
|
if (isClean())
|
|
|
|
|
return docstring();
|
|
|
|
|
|
2010-04-21 01:19:28 +00:00
|
|
|
|
string const doc = isUnnamed() ? onlyFileName(absFileName()) : absFileName();
|
2009-08-15 16:34:54 +00:00
|
|
|
|
|
|
|
|
|
docstring user_message = bformat(
|
|
|
|
|
_("LyX: Attempting to save document %1$s\n"), from_utf8(doc));
|
|
|
|
|
|
|
|
|
|
// We try to save three places:
|
|
|
|
|
// 1) Same place as document. Unless it is an unnamed doc.
|
|
|
|
|
if (!isUnnamed()) {
|
|
|
|
|
string s = absFileName();
|
|
|
|
|
s += ".emergency";
|
|
|
|
|
LYXERR0(" " << s);
|
|
|
|
|
if (writeFile(FileName(s))) {
|
|
|
|
|
markClean();
|
2010-11-27 03:04:53 +00:00
|
|
|
|
user_message += " " + bformat(_("Saved to %1$s. Phew.\n"), from_utf8(s));
|
2009-08-15 16:34:54 +00:00
|
|
|
|
return user_message;
|
|
|
|
|
} else {
|
2010-11-27 03:04:53 +00:00
|
|
|
|
user_message += " " + _("Save failed! Trying again...\n");
|
2009-08-15 16:34:54 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// 2) In HOME directory.
|
2011-04-13 19:11:55 +00:00
|
|
|
|
string s = addName(Package::get_home_dir().absFileName(), absFileName());
|
2009-08-15 16:34:54 +00:00
|
|
|
|
s += ".emergency";
|
|
|
|
|
lyxerr << ' ' << s << endl;
|
|
|
|
|
if (writeFile(FileName(s))) {
|
|
|
|
|
markClean();
|
2010-11-27 03:04:53 +00:00
|
|
|
|
user_message += " " + bformat(_("Saved to %1$s. Phew.\n"), from_utf8(s));
|
2009-08-15 16:34:54 +00:00
|
|
|
|
return user_message;
|
|
|
|
|
}
|
|
|
|
|
|
2010-11-27 03:04:53 +00:00
|
|
|
|
user_message += " " + _("Save failed! Trying yet again...\n");
|
2009-08-15 16:34:54 +00:00
|
|
|
|
|
|
|
|
|
// 3) In "/tmp" directory.
|
|
|
|
|
// MakeAbsPath to prepend the current
|
|
|
|
|
// drive letter on OS/2
|
2010-04-21 01:19:09 +00:00
|
|
|
|
s = addName(package().temp_dir().absFileName(), absFileName());
|
2009-08-15 16:34:54 +00:00
|
|
|
|
s += ".emergency";
|
|
|
|
|
lyxerr << ' ' << s << endl;
|
|
|
|
|
if (writeFile(FileName(s))) {
|
|
|
|
|
markClean();
|
2010-11-27 03:04:53 +00:00
|
|
|
|
user_message += " " + bformat(_("Saved to %1$s. Phew.\n"), from_utf8(s));
|
2009-08-15 16:34:54 +00:00
|
|
|
|
return user_message;
|
|
|
|
|
}
|
|
|
|
|
|
2017-04-06 02:08:10 +00:00
|
|
|
|
user_message += " " + _("Save failed! Document is lost.");
|
2009-08-15 16:34:54 +00:00
|
|
|
|
// Don't try again.
|
|
|
|
|
markClean();
|
|
|
|
|
return user_message;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-01-13 18:29:50 +00:00
|
|
|
|
bool Buffer::write(ostream & ofs) const
|
2003-07-27 23:40:08 +00:00
|
|
|
|
{
|
2000-09-14 17:53:12 +00:00
|
|
|
|
#ifdef HAVE_LOCALE
|
|
|
|
|
// Use the standard "C" locale for file output.
|
2007-12-12 19:28:07 +00:00
|
|
|
|
ofs.imbue(locale::classic());
|
2000-09-14 17:53:12 +00:00
|
|
|
|
#endif
|
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
|
// The top of the file should not be written by params().
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
|
|
// write out a comment in the top of the file
|
2013-07-07 07:56:34 +00:00
|
|
|
|
// Important: Keep the version formatting in sync with lyx2lyx and
|
|
|
|
|
// tex2lyx (bug 7951)
|
2011-02-09 22:19:14 +00:00
|
|
|
|
ofs << "#LyX " << lyx_version_major << "." << lyx_version_minor
|
2017-12-17 01:24:26 +00:00
|
|
|
|
<< " created this file. For more info see https://www.lyx.org/\n"
|
2004-08-14 18:41:27 +00:00
|
|
|
|
<< "\\lyxformat " << LYX_FORMAT << "\n"
|
|
|
|
|
<< "\\begin_document\n";
|
2001-02-14 19:22:41 +00:00
|
|
|
|
|
2007-07-09 20:52:34 +00:00
|
|
|
|
/// For each author, set 'used' to true if there is a change
|
|
|
|
|
/// by this author in the document; otherwise set it to 'false'.
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (Author const & a : params().authors())
|
|
|
|
|
a.setUsed(false);
|
2007-07-09 20:52:34 +00:00
|
|
|
|
|
2008-01-12 21:38:51 +00:00
|
|
|
|
ParIterator const end = const_cast<Buffer *>(this)->par_iterator_end();
|
|
|
|
|
ParIterator it = const_cast<Buffer *>(this)->par_iterator_begin();
|
2007-07-09 20:52:34 +00:00
|
|
|
|
for ( ; it != end; ++it)
|
|
|
|
|
it->checkAuthors(params().authors());
|
|
|
|
|
|
2003-12-02 12:39:14 +00:00
|
|
|
|
// now write out the buffer parameters.
|
2004-08-16 11:27:51 +00:00
|
|
|
|
ofs << "\\begin_header\n";
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
params().writeFile(ofs, this);
|
2003-03-12 02:39:12 +00:00
|
|
|
|
ofs << "\\end_header\n";
|
2004-10-05 10:11:42 +00:00
|
|
|
|
|
2003-12-02 12:39:14 +00:00
|
|
|
|
// write the text
|
2004-08-14 18:41:27 +00:00
|
|
|
|
ofs << "\n\\begin_body\n";
|
2009-08-09 15:29:34 +00:00
|
|
|
|
text().write(ofs);
|
2004-08-14 18:41:27 +00:00
|
|
|
|
ofs << "\n\\end_body\n";
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
|
|
// Write marker that shows file is complete
|
2004-08-14 18:41:27 +00:00
|
|
|
|
ofs << "\\end_document" << endl;
|
2000-10-13 12:20:38 +00:00
|
|
|
|
|
2003-07-27 23:40:08 +00:00
|
|
|
|
// Shouldn't really be needed....
|
|
|
|
|
//ofs.close();
|
2000-10-13 12:20:38 +00:00
|
|
|
|
|
1999-12-07 00:44:53 +00:00
|
|
|
|
// how to check if close went ok?
|
2000-10-13 12:20:38 +00:00
|
|
|
|
// Following is an attempt... (BE 20001011)
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2015-02-09 23:42:35 +00:00
|
|
|
|
// good() returns false if any error occurred, including some
|
2000-10-13 12:20:38 +00:00
|
|
|
|
// formatting error.
|
|
|
|
|
// bad() returns true if something bad happened in the buffer,
|
|
|
|
|
// which should include file system full errors.
|
|
|
|
|
|
|
|
|
|
bool status = true;
|
2004-02-11 14:45:44 +00:00
|
|
|
|
if (!ofs) {
|
2000-10-13 12:20:38 +00:00
|
|
|
|
status = false;
|
2004-02-11 14:45:44 +00:00
|
|
|
|
lyxerr << "File was not closed properly." << endl;
|
2000-10-13 12:20:38 +00:00
|
|
|
|
}
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2000-10-13 12:20:38 +00:00
|
|
|
|
return status;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::makeLaTeXFile(FileName const & fname,
|
2011-10-22 15:10:43 +00:00
|
|
|
|
string const & original_path,
|
2009-04-06 06:58:30 +00:00
|
|
|
|
OutputParams const & runparams_in,
|
2011-10-29 20:14:48 +00:00
|
|
|
|
OutputWhat output) const
|
2000-03-06 02:42:40 +00:00
|
|
|
|
{
|
2009-04-06 06:58:30 +00:00
|
|
|
|
OutputParams runparams = runparams_in;
|
|
|
|
|
|
2007-03-25 16:31:16 +00:00
|
|
|
|
string const encoding = runparams.encoding->iconvName();
|
2011-10-06 23:23:45 +00:00
|
|
|
|
LYXERR(Debug::LATEX, "makeLaTeXFile encoding: " << encoding << ", fname=" << fname.realPath());
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2008-11-16 12:21:29 +00:00
|
|
|
|
ofdocstream ofs;
|
2007-12-05 13:56:53 +00:00
|
|
|
|
try { ofs.reset(encoding); }
|
2012-09-17 08:01:26 +00:00
|
|
|
|
catch (iconv_codecvt_facet_exception const & e) {
|
2007-12-05 13:56:53 +00:00
|
|
|
|
lyxerr << "Caught iconv exception: " << e.what() << endl;
|
2019-05-09 07:28:37 +00:00
|
|
|
|
Alert::error(_("Iconv software exception Detected"),
|
2019-05-11 10:17:09 +00:00
|
|
|
|
bformat(_("Please verify that the `iconv' support software is"
|
2019-05-09 07:28:37 +00:00
|
|
|
|
" properly installed and supports the selected encoding"
|
|
|
|
|
" (%1$s), or change the encoding in"
|
|
|
|
|
" Document>Settings>Language."), from_ascii(encoding)));
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportError;
|
2007-12-05 13:56:53 +00:00
|
|
|
|
}
|
2003-11-05 12:06:20 +00:00
|
|
|
|
if (!openFileWrite(ofs, fname))
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportError;
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2007-12-18 17:51:20 +00:00
|
|
|
|
ErrorList & errorList = d->errorLists["Export"];
|
|
|
|
|
errorList.clear();
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
ExportStatus status = ExportSuccess;
|
2016-06-19 02:39:38 +00:00
|
|
|
|
otexstream os(ofs);
|
2011-05-05 20:18:16 +00:00
|
|
|
|
|
|
|
|
|
// make sure we are ready to export
|
|
|
|
|
// this needs to be done before we validate
|
|
|
|
|
// FIXME Do we need to do this all the time? I.e., in children
|
|
|
|
|
// of a master we are exporting?
|
|
|
|
|
updateBuffer();
|
2011-05-07 11:57:08 +00:00
|
|
|
|
updateMacroInstances(OutputUpdate);
|
2011-05-05 20:18:16 +00:00
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
ExportStatus retval;
|
2006-10-26 15:01:45 +00:00
|
|
|
|
try {
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
retval = writeLaTeXSource(os, original_path, runparams, output);
|
|
|
|
|
if (retval == ExportKilled)
|
|
|
|
|
return ExportKilled;
|
2006-10-26 15:01:45 +00:00
|
|
|
|
}
|
2012-09-17 08:01:26 +00:00
|
|
|
|
catch (EncodingException const & e) {
|
2014-11-30 11:37:43 +00:00
|
|
|
|
docstring const failed(1, e.failed_char);
|
2007-12-24 13:55:01 +00:00
|
|
|
|
ostringstream oss;
|
2020-12-29 14:54:37 +00:00
|
|
|
|
oss << "0x" << hex << static_cast<uint32_t>(e.failed_char) << dec;
|
2017-12-25 11:22:45 +00:00
|
|
|
|
if (getParFromID(e.par_id).paragraph().layout().pass_thru) {
|
|
|
|
|
docstring msg = bformat(_("Uncodable character '%1$s'"
|
|
|
|
|
" (code point %2$s)"),
|
|
|
|
|
failed, from_utf8(oss.str()));
|
|
|
|
|
errorList.push_back(ErrorItem(msg, _("Some characters of your document are not "
|
|
|
|
|
"representable in specific verbatim contexts.\n"
|
|
|
|
|
"Changing the document encoding to utf8 could help."),
|
|
|
|
|
{e.par_id, e.pos}, {e.par_id, e.pos + 1}));
|
|
|
|
|
} else {
|
|
|
|
|
docstring msg = bformat(_("Could not find LaTeX command for character '%1$s'"
|
|
|
|
|
" (code point %2$s)"),
|
|
|
|
|
failed, from_utf8(oss.str()));
|
|
|
|
|
errorList.push_back(ErrorItem(msg, _("Some characters of your document are probably not "
|
|
|
|
|
"representable in the chosen encoding.\n"
|
|
|
|
|
"Changing the document encoding to utf8 could help."),
|
|
|
|
|
{e.par_id, e.pos}, {e.par_id, e.pos + 1}));
|
|
|
|
|
}
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
status = ExportError;
|
2007-12-18 17:51:20 +00:00
|
|
|
|
}
|
2012-09-17 08:01:26 +00:00
|
|
|
|
catch (iconv_codecvt_facet_exception const & e) {
|
2007-12-18 17:51:20 +00:00
|
|
|
|
errorList.push_back(ErrorItem(_("iconv conversion failed"),
|
2016-10-09 19:34:12 +00:00
|
|
|
|
_(e.what())));
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
status = ExportError;
|
2007-05-16 10:39:41 +00:00
|
|
|
|
}
|
2007-12-12 19:28:07 +00:00
|
|
|
|
catch (exception const & e) {
|
2007-12-18 17:51:20 +00:00
|
|
|
|
errorList.push_back(ErrorItem(_("conversion failed"),
|
2016-10-09 19:34:12 +00:00
|
|
|
|
_(e.what())));
|
2017-03-10 22:11:58 +00:00
|
|
|
|
lyxerr << e.what() << endl;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
status = ExportError;
|
2007-05-16 10:39:41 +00:00
|
|
|
|
}
|
|
|
|
|
catch (...) {
|
|
|
|
|
lyxerr << "Caught some really weird exception..." << endl;
|
2008-07-14 08:35:00 +00:00
|
|
|
|
lyx_exit(1);
|
2006-10-26 15:01:45 +00:00
|
|
|
|
}
|
2002-07-05 19:21:29 +00:00
|
|
|
|
|
2016-06-19 02:39:38 +00:00
|
|
|
|
d->texrow = move(os.texrow());
|
|
|
|
|
|
2002-07-05 19:21:29 +00:00
|
|
|
|
ofs.close();
|
2006-10-26 15:01:45 +00:00
|
|
|
|
if (ofs.fail()) {
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
status = ExportError;
|
2004-02-11 14:45:44 +00:00
|
|
|
|
lyxerr << "File '" << fname << "' was not closed properly." << endl;
|
2007-05-16 10:39:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2013-11-12 19:52:35 +00:00
|
|
|
|
if (runparams_in.silent)
|
|
|
|
|
errorList.clear();
|
|
|
|
|
else
|
|
|
|
|
errors("Export");
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return status;
|
2002-07-05 19:21:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::writeLaTeXSource(otexstream & os,
|
2011-10-22 15:10:43 +00:00
|
|
|
|
string const & original_path,
|
2003-11-05 12:06:20 +00:00
|
|
|
|
OutputParams const & runparams_in,
|
2012-10-03 08:41:07 +00:00
|
|
|
|
OutputWhat output) const
|
2002-07-05 19:21:29 +00:00
|
|
|
|
{
|
2008-03-06 18:13:04 +00:00
|
|
|
|
// The child documents, if any, shall be already loaded at this point.
|
|
|
|
|
|
2003-11-05 12:06:20 +00:00
|
|
|
|
OutputParams runparams = runparams_in;
|
2002-03-07 16:03:36 +00:00
|
|
|
|
|
2018-04-20 18:00:01 +00:00
|
|
|
|
// Some macros rely on font encoding
|
|
|
|
|
runparams.main_fontenc = params().main_font_encoding();
|
|
|
|
|
|
2012-10-03 08:41:07 +00:00
|
|
|
|
// If we are compiling a file standalone, even if this is the
|
|
|
|
|
// child of some other buffer, let's cut the link here, so the
|
|
|
|
|
// file is really independent and no concurring settings from
|
|
|
|
|
// the master (e.g. branch state) interfere (see #8100).
|
|
|
|
|
if (!runparams.is_child)
|
|
|
|
|
d->ignore_parent = true;
|
|
|
|
|
|
2008-09-26 15:53:15 +00:00
|
|
|
|
// Classify the unicode characters appearing in math insets
|
2013-09-26 20:22:02 +00:00
|
|
|
|
BufferEncodings::initUnicodeMath(*this);
|
2008-09-26 15:53:15 +00:00
|
|
|
|
|
2000-03-06 02:42:40 +00:00
|
|
|
|
// validate the buffer.
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::LATEX, " Validating buffer...");
|
2006-03-28 18:49:46 +00:00
|
|
|
|
LaTeXFeatures features(*this, params(), runparams);
|
2000-03-06 02:42:40 +00:00
|
|
|
|
validate(features);
|
2013-07-18 07:26:56 +00:00
|
|
|
|
// This is only set once per document (in master)
|
2019-03-13 10:31:25 +00:00
|
|
|
|
if (!runparams.is_child) {
|
2013-07-18 07:26:56 +00:00
|
|
|
|
runparams.use_polyglossia = features.usePolyglossia();
|
2020-04-05 13:23:22 +00:00
|
|
|
|
runparams.use_hyperref = features.isRequired("hyperref");
|
2019-03-13 10:31:25 +00:00
|
|
|
|
runparams.use_CJK = features.mustProvide("CJK");
|
|
|
|
|
}
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::LATEX, " Buffer validation done.");
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2011-10-29 20:14:48 +00:00
|
|
|
|
bool const output_preamble =
|
|
|
|
|
output == FullSource || output == OnlyPreamble;
|
|
|
|
|
bool const output_body =
|
2012-06-08 18:31:20 +00:00
|
|
|
|
output == FullSource || output == OnlyBody;
|
2011-10-29 20:14:48 +00:00
|
|
|
|
|
2002-03-21 16:55:34 +00:00
|
|
|
|
// The starting paragraph of the coming rows is the
|
2000-03-06 02:42:40 +00:00
|
|
|
|
// first paragraph of the document. (Asger)
|
2003-07-26 21:37:10 +00:00
|
|
|
|
if (output_preamble && runparams.nice) {
|
2003-07-28 14:09:05 +00:00
|
|
|
|
os << "%% LyX " << lyx_version << " created this file. "
|
2017-12-17 01:24:26 +00:00
|
|
|
|
"For more info, see https://www.lyx.org/.\n"
|
2000-03-06 02:42:40 +00:00
|
|
|
|
"%% Do not edit unless you really know what "
|
|
|
|
|
"you are doing.\n";
|
|
|
|
|
}
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::INFO, "lyx document header finished");
|
2008-03-26 12:55:36 +00:00
|
|
|
|
|
2011-10-22 15:10:43 +00:00
|
|
|
|
// There are a few differences between nice LaTeX and usual files:
|
|
|
|
|
// usual files have \batchmode and special input@path to allow
|
|
|
|
|
// inclusion of figures specified by an explicitly relative path
|
|
|
|
|
// (i.e., a path starting with './' or '../') with either \input or
|
|
|
|
|
// \includegraphics, as the TEXINPUTS method doesn't work in this case.
|
|
|
|
|
// input@path is set when the actual parameter original_path is set.
|
|
|
|
|
// This is done for usual tex-file, but not for nice-latex-file.
|
|
|
|
|
// (Matthias 250696)
|
|
|
|
|
// Note that input@path is only needed for something the user does
|
|
|
|
|
// in the preamble, included .tex files or ERT, files included by
|
|
|
|
|
// LyX work without it.
|
2003-07-26 21:37:10 +00:00
|
|
|
|
if (output_preamble) {
|
2003-05-22 21:10:22 +00:00
|
|
|
|
if (!runparams.nice) {
|
2000-03-06 02:42:40 +00:00
|
|
|
|
// code for usual, NOT nice-latex-file
|
Introduce a wrapper class for odocstream to help ensuring that no
blank lines may be inadvertently output. This is achieved by using two
special iomanip-like variables (breakln and safebreakln) in the lyx::
namespace. When they are inserted in the stream, a newline is output
only if not already at the beginning of a line. The difference between
breakln and safebreakln is that, if needed, the former outputs '\n'
and the latter "%\n".
In future, the new class will also be used for counting the number of
newlines issued. Even if the infractrure for doing that is already in
place, the counting is essentially still done the old way.
There are still places in the code where the functionality of the
class could be used, most probably. ATM, it is used for InsetTabular,
InsetListings, InsetFloat, and InsetText.
The Comment and GreyedOut insets required a special treatment and a
new InsetLayout parameter (Display) has been introduced. The default
for Display is "true", meaning that the corresponding latex
environment is of "display" type, i.e., it stands on its own, whereas
"false" means that the contents appear inline with the text. The
latter is the case for both Comment and GreyedOut insets.
Mostly, the only visible effects on latex exports should be the
disappearing of some redundant % chars and the appearing/disappearing
of null {} latex groups after a comment or lyxgreyedout environments
(they are related to the presence or absence of a space immediately
after those environments), as well as the fact that math environments
are now started on their own lines.
As a last thing, only the latex code between \begin{document} and
\end{document} goes through the new class, the preamble being directly
output through odocstream, as usual.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@37360 a592a061-630c-0410-9148-cb99ea01b6c8
2011-01-29 02:41:13 +00:00
|
|
|
|
os << "\\batchmode\n"; // changed from \nonstopmode
|
2000-03-06 02:42:40 +00:00
|
|
|
|
}
|
2011-10-22 15:10:43 +00:00
|
|
|
|
if (!original_path.empty()) {
|
|
|
|
|
// FIXME UNICODE
|
|
|
|
|
// We don't know the encoding of inputpath
|
|
|
|
|
docstring const inputpath = from_utf8(original_path);
|
|
|
|
|
docstring uncodable_glyphs;
|
|
|
|
|
Encoding const * const enc = runparams.encoding;
|
|
|
|
|
if (enc) {
|
2020-10-09 06:04:20 +00:00
|
|
|
|
for (char_type n : inputpath) {
|
|
|
|
|
if (!enc->encodable(n)) {
|
|
|
|
|
docstring const glyph(1, n);
|
2011-10-22 15:10:43 +00:00
|
|
|
|
LYXERR0("Uncodable character '"
|
|
|
|
|
<< glyph
|
|
|
|
|
<< "' in input path!");
|
|
|
|
|
uncodable_glyphs += glyph;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// warn user if we found uncodable glyphs.
|
|
|
|
|
if (!uncodable_glyphs.empty()) {
|
|
|
|
|
frontend::Alert::warning(
|
|
|
|
|
_("Uncodable character in file path"),
|
2017-10-16 20:22:53 +00:00
|
|
|
|
bformat(
|
2011-10-22 15:10:43 +00:00
|
|
|
|
_("The path of your document\n"
|
|
|
|
|
"(%1$s)\n"
|
|
|
|
|
"contains glyphs that are unknown "
|
|
|
|
|
"in the current document encoding "
|
|
|
|
|
"(namely %2$s). This may result in "
|
|
|
|
|
"incomplete output, unless "
|
|
|
|
|
"TEXINPUTS contains the document "
|
|
|
|
|
"directory and you don't use "
|
|
|
|
|
"explicitly relative paths (i.e., "
|
|
|
|
|
"paths starting with './' or "
|
|
|
|
|
"'../') in the preamble or in ERT."
|
|
|
|
|
"\n\nIn case of problems, choose "
|
|
|
|
|
"an appropriate document encoding\n"
|
|
|
|
|
"(such as utf8) or change the "
|
|
|
|
|
"file path name."),
|
|
|
|
|
inputpath, uncodable_glyphs));
|
|
|
|
|
} else {
|
2018-05-27 09:54:07 +00:00
|
|
|
|
string docdir = os::latex_path(original_path);
|
2011-10-22 15:10:43 +00:00
|
|
|
|
if (contains(docdir, '#')) {
|
|
|
|
|
docdir = subst(docdir, "#", "\\#");
|
|
|
|
|
os << "\\catcode`\\#=11"
|
|
|
|
|
"\\def\\#{#}\\catcode`\\#=6\n";
|
|
|
|
|
}
|
|
|
|
|
if (contains(docdir, '%')) {
|
|
|
|
|
docdir = subst(docdir, "%", "\\%");
|
|
|
|
|
os << "\\catcode`\\%=11"
|
|
|
|
|
"\\def\\%{%}\\catcode`\\%=14\n";
|
|
|
|
|
}
|
2019-11-20 10:57:32 +00:00
|
|
|
|
if (contains(docdir, '~'))
|
|
|
|
|
docdir = subst(docdir, "~", "\\string~");
|
2019-11-20 10:48:18 +00:00
|
|
|
|
bool const nonascii = !isAscii(from_utf8(docdir));
|
|
|
|
|
// LaTeX 2019/10/01 handles non-ascii path without detokenize
|
|
|
|
|
bool const utfpathlatex = features.isAvailable("LaTeX-2019/10/01");
|
2019-11-20 10:57:32 +00:00
|
|
|
|
bool const detokenize = !utfpathlatex && nonascii;
|
2018-05-27 09:54:07 +00:00
|
|
|
|
bool const quote = contains(docdir, ' ');
|
2019-11-20 10:48:18 +00:00
|
|
|
|
if (utfpathlatex && nonascii)
|
|
|
|
|
os << "\\UseRawInputEncoding\n";
|
2011-10-22 15:10:43 +00:00
|
|
|
|
os << "\\makeatletter\n"
|
2018-05-27 09:54:07 +00:00
|
|
|
|
<< "\\def\\input@path{{";
|
|
|
|
|
if (detokenize)
|
|
|
|
|
os << "\\detokenize{";
|
|
|
|
|
if (quote)
|
|
|
|
|
os << "\"";
|
|
|
|
|
os << docdir;
|
|
|
|
|
if (quote)
|
|
|
|
|
os << "\"";
|
|
|
|
|
if (detokenize)
|
|
|
|
|
os << "}";
|
|
|
|
|
os << "}}\n"
|
2011-10-22 15:10:43 +00:00
|
|
|
|
<< "\\makeatother\n";
|
|
|
|
|
}
|
|
|
|
|
}
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
|
// get parent macros (if this buffer has a parent) which will be
|
|
|
|
|
// written at the document begin further down.
|
|
|
|
|
MacroSet parentMacros;
|
|
|
|
|
listParentMacros(parentMacros, features);
|
|
|
|
|
|
2003-02-16 00:54:43 +00:00
|
|
|
|
// Write the preamble
|
2010-01-07 10:01:26 +00:00
|
|
|
|
runparams.use_babel = params().writeLaTeX(os, features,
|
|
|
|
|
d->filename.onlyPath());
|
2017-01-08 08:39:46 +00:00
|
|
|
|
|
2019-08-14 11:10:42 +00:00
|
|
|
|
// Active characters
|
|
|
|
|
runparams.active_chars = features.getActiveChars();
|
|
|
|
|
|
2017-01-08 08:39:46 +00:00
|
|
|
|
// Biblatex bibliographies are loaded here
|
|
|
|
|
if (params().useBiblatex()) {
|
2019-03-29 14:45:55 +00:00
|
|
|
|
vector<pair<docstring, string>> const bibfiles =
|
2017-11-05 01:23:25 +00:00
|
|
|
|
prepareBibFilePaths(runparams, getBibfiles(), true);
|
2019-03-29 14:45:55 +00:00
|
|
|
|
for (pair<docstring, string> const & file: bibfiles) {
|
|
|
|
|
os << "\\addbibresource";
|
|
|
|
|
if (!file.second.empty())
|
|
|
|
|
os << "[bibencoding=" << file.second << "]";
|
|
|
|
|
os << "{" << file.first << "}\n";
|
|
|
|
|
}
|
2017-01-08 08:39:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-10-27 07:59:01 +00:00
|
|
|
|
if (!runparams.dryrun && features.hasPolyglossiaExclusiveLanguages()
|
|
|
|
|
&& !features.hasOnlyPolyglossiaLanguages()) {
|
|
|
|
|
docstring blangs;
|
|
|
|
|
docstring plangs;
|
|
|
|
|
vector<string> bll = features.getBabelExclusiveLanguages();
|
|
|
|
|
vector<string> pll = features.getPolyglossiaExclusiveLanguages();
|
|
|
|
|
if (!bll.empty()) {
|
|
|
|
|
docstring langs;
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (string const & sit : bll) {
|
2016-10-27 07:59:01 +00:00
|
|
|
|
if (!langs.empty())
|
|
|
|
|
langs += ", ";
|
2017-11-05 02:15:01 +00:00
|
|
|
|
langs += _(sit);
|
2016-10-27 07:59:01 +00:00
|
|
|
|
}
|
|
|
|
|
blangs = bll.size() > 1 ?
|
2017-10-16 20:22:53 +00:00
|
|
|
|
bformat(_("The languages %1$s are only supported by Babel."), langs)
|
|
|
|
|
: bformat(_("The language %1$s is only supported by Babel."), langs);
|
2016-10-27 07:59:01 +00:00
|
|
|
|
}
|
|
|
|
|
if (!pll.empty()) {
|
|
|
|
|
docstring langs;
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (string const & pit : pll) {
|
2016-10-27 07:59:01 +00:00
|
|
|
|
if (!langs.empty())
|
|
|
|
|
langs += ", ";
|
2017-11-05 02:15:01 +00:00
|
|
|
|
langs += _(pit);
|
2016-10-27 07:59:01 +00:00
|
|
|
|
}
|
|
|
|
|
plangs = pll.size() > 1 ?
|
2017-10-16 20:22:53 +00:00
|
|
|
|
bformat(_("The languages %1$s are only supported by Polyglossia."), langs)
|
|
|
|
|
: bformat(_("The language %1$s is only supported by Polyglossia."), langs);
|
2016-10-27 07:59:01 +00:00
|
|
|
|
if (!blangs.empty())
|
2017-07-03 17:53:14 +00:00
|
|
|
|
plangs += "\n";
|
2016-10-27 07:59:01 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
frontend::Alert::warning(
|
|
|
|
|
_("Incompatible Languages!"),
|
2017-10-16 20:22:53 +00:00
|
|
|
|
bformat(
|
2016-10-27 07:59:01 +00:00
|
|
|
|
_("You cannot use the following languages "
|
|
|
|
|
"together in one LaTeX document because "
|
|
|
|
|
"they require conflicting language packages:\n"
|
|
|
|
|
"%1$s%2$s"),
|
|
|
|
|
plangs, blangs));
|
|
|
|
|
}
|
2002-07-05 19:21:29 +00:00
|
|
|
|
|
2013-07-18 07:26:56 +00:00
|
|
|
|
// Japanese might be required only in some children of a document,
|
|
|
|
|
// but once required, we must keep use_japanese true.
|
|
|
|
|
runparams.use_japanese |= features.isRequired("japanese");
|
2008-08-18 17:26:09 +00:00
|
|
|
|
|
2012-10-05 17:26:07 +00:00
|
|
|
|
if (!output_body) {
|
|
|
|
|
// Restore the parenthood if needed
|
|
|
|
|
if (!runparams.is_child)
|
|
|
|
|
d->ignore_parent = false;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportSuccess;
|
2012-10-05 17:26:07 +00:00
|
|
|
|
}
|
2002-05-10 12:58:07 +00:00
|
|
|
|
|
2000-03-06 02:42:40 +00:00
|
|
|
|
// make the body.
|
2016-10-11 12:52:10 +00:00
|
|
|
|
// mark the beginning of the body to separate it from InPreamble insets
|
|
|
|
|
os.texrow().start(TexRow::beginDocument());
|
2002-07-05 19:21:29 +00:00
|
|
|
|
os << "\\begin{document}\n";
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2016-10-23 16:04:13 +00:00
|
|
|
|
// mark the start of a new paragraph by simulating a newline,
|
|
|
|
|
// so that os.afterParbreak() returns true at document start
|
|
|
|
|
os.lastChar('\n');
|
|
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
|
// output the parent macros
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & mac : parentMacros) {
|
|
|
|
|
int num_lines = mac->write(os.os(), true);
|
2011-02-13 21:41:44 +00:00
|
|
|
|
os.texrow().newlines(num_lines);
|
2010-02-08 17:39:55 +00:00
|
|
|
|
}
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2003-07-26 21:37:10 +00:00
|
|
|
|
} // output_preamble
|
2007-08-13 14:24:49 +00:00
|
|
|
|
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::INFO, "preamble finished, now the body.");
|
2001-03-01 15:57:10 +00:00
|
|
|
|
|
2004-03-25 10:12:44 +00:00
|
|
|
|
// the real stuff
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
try {
|
|
|
|
|
latexParagraphs(*this, text(), os, runparams);
|
|
|
|
|
}
|
|
|
|
|
catch (ConversionException const &) { return ExportKilled; }
|
2000-05-04 08:14:34 +00:00
|
|
|
|
|
2004-03-25 10:12:44 +00:00
|
|
|
|
// Restore the parenthood if needed
|
2012-10-03 08:41:07 +00:00
|
|
|
|
if (!runparams.is_child)
|
|
|
|
|
d->ignore_parent = false;
|
2004-03-25 10:12:44 +00:00
|
|
|
|
|
2000-05-04 08:14:34 +00:00
|
|
|
|
// add this just in case after all the paragraphs
|
2011-02-14 17:09:39 +00:00
|
|
|
|
os << endl;
|
2000-05-04 08:14:34 +00:00
|
|
|
|
|
2003-07-26 21:37:10 +00:00
|
|
|
|
if (output_preamble) {
|
2002-07-05 19:21:29 +00:00
|
|
|
|
os << "\\end{document}\n";
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::LATEX, "makeLaTeXFile...done");
|
2000-05-04 08:14:34 +00:00
|
|
|
|
} else {
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::LATEX, "LaTeXFile for inclusion made.");
|
2000-05-04 08:14:34 +00:00
|
|
|
|
}
|
2007-03-18 10:59:16 +00:00
|
|
|
|
runparams_in.encoding = runparams.encoding;
|
2000-05-04 08:14:34 +00:00
|
|
|
|
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Finished making LaTeX file.");
|
2011-02-13 21:41:44 +00:00
|
|
|
|
LYXERR(Debug::INFO, "Row count was " << os.texrow().rows() - 1 << '.');
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportSuccess;
|
2000-05-04 08:14:34 +00:00
|
|
|
|
}
|
|
|
|
|
|
2000-07-04 19:16:35 +00:00
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::makeDocBookFile(FileName const & fname,
|
2006-08-04 13:59:12 +00:00
|
|
|
|
OutputParams const & runparams,
|
2011-10-29 20:14:48 +00:00
|
|
|
|
OutputWhat output) const
|
2006-08-04 13:59:12 +00:00
|
|
|
|
{
|
2007-11-15 20:04:51 +00:00
|
|
|
|
LYXERR(Debug::LATEX, "makeDocBookFile...");
|
2006-08-04 13:59:12 +00:00
|
|
|
|
|
2008-11-16 12:21:29 +00:00
|
|
|
|
ofdocstream ofs;
|
2006-08-04 13:59:12 +00:00
|
|
|
|
if (!openFileWrite(ofs, fname))
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportError;
|
2006-08-04 13:59:12 +00:00
|
|
|
|
|
2011-05-05 20:18:16 +00:00
|
|
|
|
// make sure we are ready to export
|
|
|
|
|
// this needs to be done before we validate
|
|
|
|
|
updateBuffer();
|
2011-05-07 11:57:08 +00:00
|
|
|
|
updateMacroInstances(OutputUpdate);
|
2011-05-05 20:18:16 +00:00
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
ExportStatus const retval =
|
2020-06-08 21:27:49 +00:00
|
|
|
|
writeDocBookSource(ofs, runparams, output);
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
if (retval == ExportKilled)
|
|
|
|
|
return ExportKilled;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
1999-12-07 00:44:53 +00:00
|
|
|
|
ofs.close();
|
2004-02-11 14:45:44 +00:00
|
|
|
|
if (ofs.fail())
|
|
|
|
|
lyxerr << "File '" << fname << "' was not closed properly." << endl;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportSuccess;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-06-08 21:27:49 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::writeDocBookSource(odocstream & os,
|
2003-11-05 12:06:20 +00:00
|
|
|
|
OutputParams const & runparams,
|
2011-10-29 20:14:48 +00:00
|
|
|
|
OutputWhat output) const
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2006-03-28 18:49:46 +00:00
|
|
|
|
LaTeXFeatures features(*this, params(), runparams);
|
2000-07-01 12:54:45 +00:00
|
|
|
|
validate(features);
|
2020-06-08 21:27:49 +00:00
|
|
|
|
d->bibinfo_.makeCitationLabels(*this);
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2016-09-04 02:02:47 +00:00
|
|
|
|
d->texrow.reset();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
2008-02-28 01:42:02 +00:00
|
|
|
|
DocumentClass const & tclass = params().documentClass();
|
2001-03-23 08:37:44 +00:00
|
|
|
|
|
2011-10-29 20:14:48 +00:00
|
|
|
|
bool const output_preamble =
|
|
|
|
|
output == FullSource || output == OnlyPreamble;
|
|
|
|
|
bool const output_body =
|
2020-10-18 07:05:49 +00:00
|
|
|
|
output == FullSource || output == OnlyBody || output == IncludedFile;
|
2011-10-29 20:14:48 +00:00
|
|
|
|
|
2020-06-08 21:27:49 +00:00
|
|
|
|
if (output_preamble) {
|
2020-07-08 16:15:34 +00:00
|
|
|
|
// XML preamble, no doctype needed.
|
2020-06-08 21:27:49 +00:00
|
|
|
|
// Not using XMLStream for this, as the root tag would be in the tag stack and make troubles with the error
|
|
|
|
|
// detection mechanisms (these are called before the end tag is output, and thus interact with the canary
|
|
|
|
|
// parsep in output_docbook.cpp).
|
2020-07-08 16:15:34 +00:00
|
|
|
|
os << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
2020-06-08 21:27:49 +00:00
|
|
|
|
<< "<!-- This DocBook file was created by LyX " << lyx_version
|
2021-01-04 04:22:26 +00:00
|
|
|
|
<< "\n See https://www.lyx.org/ for more information -->\n";
|
2020-06-08 21:27:49 +00:00
|
|
|
|
|
|
|
|
|
// Directly output the root tag, based on the current type of document.
|
2020-07-08 16:15:34 +00:00
|
|
|
|
string languageCode = params().language->code();
|
2020-06-08 21:27:49 +00:00
|
|
|
|
string params = "xml:lang=\"" + languageCode + '"'
|
|
|
|
|
+ " xmlns=\"http://docbook.org/ns/docbook\""
|
|
|
|
|
+ " xmlns:xlink=\"http://www.w3.org/1999/xlink\""
|
|
|
|
|
+ " xmlns:m=\"http://www.w3.org/1998/Math/MathML\""
|
2020-07-08 16:15:34 +00:00
|
|
|
|
+ " xmlns:xi=\"http://www.w3.org/2001/XInclude\""
|
|
|
|
|
+ " version=\"5.2\"";
|
2020-06-08 21:27:49 +00:00
|
|
|
|
|
|
|
|
|
os << "<" << from_ascii(tclass.docbookroot()) << " " << from_ascii(params) << ">\n";
|
2000-11-13 15:43:36 +00:00
|
|
|
|
}
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
2011-10-29 21:00:23 +00:00
|
|
|
|
if (output_body) {
|
2020-06-08 21:27:49 +00:00
|
|
|
|
// Start to output the document.
|
2020-08-18 03:37:46 +00:00
|
|
|
|
XMLStream xs(os);
|
2020-06-08 21:27:49 +00:00
|
|
|
|
docbookParagraphs(text(), *this, xs, runparams);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (output_preamble) {
|
2020-08-18 03:37:46 +00:00
|
|
|
|
// Close the root element. No need for a line break, as free text is never allowed
|
|
|
|
|
// in a root element, it must always be wrapped in some container.
|
|
|
|
|
os << "</" << from_ascii(tclass.docbookroot()) << ">";
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
2020-08-18 03:37:46 +00:00
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportSuccess;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::makeLyXHTMLFile(FileName const & fname,
|
2011-06-20 21:08:56 +00:00
|
|
|
|
OutputParams const & runparams) const
|
2009-06-05 17:44:35 +00:00
|
|
|
|
{
|
2009-10-25 01:48:14 +00:00
|
|
|
|
LYXERR(Debug::LATEX, "makeLyXHTMLFile...");
|
2009-06-05 17:44:35 +00:00
|
|
|
|
|
|
|
|
|
ofdocstream ofs;
|
|
|
|
|
if (!openFileWrite(ofs, fname))
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return ExportError;
|
2009-06-05 17:44:35 +00:00
|
|
|
|
|
2011-05-05 20:18:16 +00:00
|
|
|
|
// make sure we are ready to export
|
|
|
|
|
// this has to be done before we validate
|
|
|
|
|
updateBuffer(UpdateMaster, OutputUpdate);
|
2011-05-07 11:57:08 +00:00
|
|
|
|
updateMacroInstances(OutputUpdate);
|
2011-05-05 20:18:16 +00:00
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
ExportStatus const retval = writeLyXHTMLSource(ofs, runparams, FullSource);
|
|
|
|
|
if (retval == ExportKilled)
|
|
|
|
|
return retval;
|
2009-06-05 17:44:35 +00:00
|
|
|
|
|
|
|
|
|
ofs.close();
|
|
|
|
|
if (ofs.fail())
|
|
|
|
|
lyxerr << "File '" << fname << "' was not closed properly." << endl;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
return retval;
|
2009-06-05 17:44:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::writeLyXHTMLSource(odocstream & os,
|
2009-06-05 17:44:35 +00:00
|
|
|
|
OutputParams const & runparams,
|
2011-10-29 20:14:48 +00:00
|
|
|
|
OutputWhat output) const
|
2009-06-05 17:44:35 +00:00
|
|
|
|
{
|
|
|
|
|
LaTeXFeatures features(*this, params(), runparams);
|
|
|
|
|
validate(features);
|
2010-01-08 18:19:37 +00:00
|
|
|
|
d->bibinfo_.makeCitationLabels(*this);
|
2009-06-05 17:44:35 +00:00
|
|
|
|
|
2011-10-29 20:14:48 +00:00
|
|
|
|
bool const output_preamble =
|
|
|
|
|
output == FullSource || output == OnlyPreamble;
|
|
|
|
|
bool const output_body =
|
2012-04-23 17:55:13 +00:00
|
|
|
|
output == FullSource || output == OnlyBody || output == IncludedFile;
|
2011-10-29 20:14:48 +00:00
|
|
|
|
|
|
|
|
|
if (output_preamble) {
|
2010-04-05 22:02:43 +00:00
|
|
|
|
os << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
2020-09-19 23:35:14 +00:00
|
|
|
|
<< "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1 plus MathML 2.0//EN\" \"http://www.w3.org/Math/DTD/mathml2/xhtml-math11-f.dtd\">\n"
|
2010-04-05 22:02:43 +00:00
|
|
|
|
// FIXME Language should be set properly.
|
|
|
|
|
<< "<html xmlns=\"http://www.w3.org/1999/xhtml\">\n"
|
|
|
|
|
<< "<head>\n"
|
|
|
|
|
<< "<meta name=\"GENERATOR\" content=\"" << PACKAGE_STRING << "\" />\n"
|
|
|
|
|
// FIXME Presumably need to set this right
|
|
|
|
|
<< "<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\" />\n";
|
|
|
|
|
|
|
|
|
|
docstring const & doctitle = features.htmlTitle();
|
|
|
|
|
os << "<title>"
|
2011-12-03 22:15:11 +00:00
|
|
|
|
<< (doctitle.empty() ?
|
|
|
|
|
from_ascii("LyX Document") :
|
2020-08-01 01:20:08 +00:00
|
|
|
|
xml::escapeString(doctitle, XMLStream::ESCAPE_ALL))
|
2010-04-05 22:02:43 +00:00
|
|
|
|
<< "</title>\n";
|
2009-06-05 17:44:35 +00:00
|
|
|
|
|
2011-12-06 22:17:22 +00:00
|
|
|
|
docstring styles = features.getTClassHTMLPreamble();
|
|
|
|
|
if (!styles.empty())
|
|
|
|
|
os << "\n<!-- Text Class Preamble -->\n" << styles << '\n';
|
|
|
|
|
|
2011-12-06 22:17:18 +00:00
|
|
|
|
// we will collect CSS information in a stream, and then output it
|
|
|
|
|
// either here, as part of the header, or else in a separate file.
|
|
|
|
|
odocstringstream css;
|
2016-09-25 10:37:40 +00:00
|
|
|
|
styles = features.getCSSSnippets();
|
2011-12-06 22:17:18 +00:00
|
|
|
|
if (!styles.empty())
|
|
|
|
|
css << "/* LyX Provided Styles */\n" << styles << '\n';
|
|
|
|
|
|
|
|
|
|
styles = features.getTClassHTMLStyles();
|
|
|
|
|
if (!styles.empty())
|
|
|
|
|
css << "/* Layout-provided Styles */\n" << styles << '\n';
|
2011-09-15 21:22:56 +00:00
|
|
|
|
|
|
|
|
|
bool const needfg = params().fontcolor != RGBColor(0, 0, 0);
|
2011-09-15 21:48:23 +00:00
|
|
|
|
bool const needbg = params().backgroundcolor != RGBColor(0xFF, 0xFF, 0xFF);
|
2011-09-15 21:22:56 +00:00
|
|
|
|
if (needfg || needbg) {
|
2011-12-06 22:17:18 +00:00
|
|
|
|
css << "\nbody {\n";
|
2011-09-15 21:22:56 +00:00
|
|
|
|
if (needfg)
|
2011-12-06 22:17:18 +00:00
|
|
|
|
css << " color: "
|
2011-09-15 21:22:56 +00:00
|
|
|
|
<< from_ascii(X11hexname(params().fontcolor))
|
|
|
|
|
<< ";\n";
|
|
|
|
|
if (needbg)
|
2011-12-06 22:17:18 +00:00
|
|
|
|
css << " background-color: "
|
2011-09-15 21:22:56 +00:00
|
|
|
|
<< from_ascii(X11hexname(params().backgroundcolor))
|
|
|
|
|
<< ";\n";
|
2011-12-06 22:17:18 +00:00
|
|
|
|
css << "}\n";
|
|
|
|
|
}
|
2013-11-12 19:52:35 +00:00
|
|
|
|
|
2011-12-06 22:17:18 +00:00
|
|
|
|
docstring const dstyles = css.str();
|
|
|
|
|
if (!dstyles.empty()) {
|
2011-12-06 22:17:22 +00:00
|
|
|
|
bool written = false;
|
|
|
|
|
if (params().html_css_as_file) {
|
|
|
|
|
// open a file for CSS info
|
|
|
|
|
ofdocstream ocss;
|
|
|
|
|
string const fcssname = addName(temppath(), "docstyle.css");
|
|
|
|
|
FileName const fcssfile = FileName(fcssname);
|
|
|
|
|
if (openFileWrite(ocss, fcssfile)) {
|
|
|
|
|
ocss << dstyles;
|
|
|
|
|
ocss.close();
|
|
|
|
|
written = true;
|
|
|
|
|
// write link to header
|
|
|
|
|
os << "<link rel='stylesheet' href='docstyle.css' type='text/css' />\n";
|
|
|
|
|
// register file
|
|
|
|
|
runparams.exportdata->addExternalFile("xhtml", fcssfile);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// we are here if the CSS is supposed to be written to the header
|
|
|
|
|
// or if we failed to write it to an external file.
|
|
|
|
|
if (!written) {
|
|
|
|
|
os << "<style type='text/css'>\n"
|
|
|
|
|
<< dstyles
|
|
|
|
|
<< "\n</style>\n";
|
|
|
|
|
}
|
2011-09-15 21:22:56 +00:00
|
|
|
|
}
|
2011-10-29 21:00:23 +00:00
|
|
|
|
os << "</head>\n";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (output_body) {
|
2012-04-23 17:55:13 +00:00
|
|
|
|
bool const output_body_tag = (output != IncludedFile);
|
|
|
|
|
if (output_body_tag)
|
2016-07-31 06:52:30 +00:00
|
|
|
|
os << "<body dir=\"auto\">\n";
|
2019-05-09 23:35:40 +00:00
|
|
|
|
XMLStream xs(os);
|
2013-03-18 22:46:08 +00:00
|
|
|
|
if (output != IncludedFile)
|
|
|
|
|
// if we're an included file, the counters are in the master.
|
|
|
|
|
params().documentClass().counters().reset();
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
try {
|
|
|
|
|
xhtmlParagraphs(text(), *this, xs, runparams);
|
|
|
|
|
}
|
|
|
|
|
catch (ConversionException const &) { return ExportKilled; }
|
2012-04-23 17:55:13 +00:00
|
|
|
|
if (output_body_tag)
|
|
|
|
|
os << "</body>\n";
|
2009-06-05 17:44:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-29 20:14:48 +00:00
|
|
|
|
if (output_preamble)
|
2011-10-29 21:00:23 +00:00
|
|
|
|
os << "</html>\n";
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
|
|
|
|
|
return ExportSuccess;
|
2009-06-05 17:44:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
|
// chktex should be run with these flags disabled: 3, 22, 25, 30, 38(?)
|
|
|
|
|
// Other flags: -wall -v0 -x
|
|
|
|
|
int Buffer::runChktex()
|
|
|
|
|
{
|
2007-10-21 10:50:56 +00:00
|
|
|
|
setBusy(true);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
|
|
// get LaTeX-Filename
|
2007-04-06 18:03:29 +00:00
|
|
|
|
FileName const path(temppath());
|
2010-04-21 01:19:09 +00:00
|
|
|
|
string const name = addName(path.absFileName(), latexName());
|
2011-10-22 15:10:43 +00:00
|
|
|
|
string const org_path = filePath();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
2007-12-12 19:57:42 +00:00
|
|
|
|
PathChanger p(path); // path to LaTeX file
|
2006-09-11 08:54:10 +00:00
|
|
|
|
message(_("Running chktex..."));
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
|
|
// Generate the LaTeX file if neccessary
|
2007-03-18 10:59:16 +00:00
|
|
|
|
OutputParams runparams(¶ms().encoding());
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::LaTeX;
|
2003-05-22 21:10:22 +00:00
|
|
|
|
runparams.nice = false;
|
2010-03-04 08:13:42 +00:00
|
|
|
|
runparams.linelen = lyxrc.plaintext_linelen;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
ExportStatus const retval =
|
|
|
|
|
makeLaTeXFile(FileName(name), org_path, runparams);
|
|
|
|
|
if (retval != ExportSuccess) {
|
|
|
|
|
// error code on failure
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
|
|
TeXErrors terr;
|
2010-04-21 01:19:28 +00:00
|
|
|
|
Chktex chktex(lyxrc.chktex_command, onlyFileName(name), filePath());
|
2005-01-05 20:21:27 +00:00
|
|
|
|
int const res = chktex.run(terr); // run chktex
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
|
|
if (res == -1) {
|
2006-09-11 08:54:10 +00:00
|
|
|
|
Alert::error(_("chktex failure"),
|
|
|
|
|
_("Could not run chktex successfully."));
|
2013-04-21 05:39:19 +00:00
|
|
|
|
} else {
|
2007-11-30 17:46:49 +00:00
|
|
|
|
ErrorList & errlist = d->errorLists["ChkTeX"];
|
2007-10-21 10:50:56 +00:00
|
|
|
|
errlist.clear();
|
|
|
|
|
bufferErrors(terr, errlist);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2007-10-21 10:50:56 +00:00
|
|
|
|
setBusy(false);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
2013-11-12 19:52:35 +00:00
|
|
|
|
if (runparams.silent)
|
|
|
|
|
d->errorLists["ChkTeX"].clear();
|
|
|
|
|
else
|
|
|
|
|
errors("ChkTeX");
|
2006-08-13 16:16:43 +00:00
|
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
|
return res;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2000-02-04 09:38:32 +00:00
|
|
|
|
void Buffer::validate(LaTeXFeatures & features) const
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2012-10-05 07:59:44 +00:00
|
|
|
|
// Validate the buffer params, but not for included
|
|
|
|
|
// files, since they also use the parent buffer's
|
|
|
|
|
// params (# 5941)
|
|
|
|
|
if (!features.runparams().is_child)
|
|
|
|
|
params().validate(features);
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
2020-04-26 02:17:51 +00:00
|
|
|
|
if (!parent())
|
|
|
|
|
clearIncludeList();
|
|
|
|
|
|
2016-06-29 09:22:13 +00:00
|
|
|
|
for (Paragraph const & p : paragraphs())
|
|
|
|
|
p.validate(features);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
1999-10-07 18:44:17 +00:00
|
|
|
|
if (lyxerr.debugging(Debug::LATEX)) {
|
2000-04-10 21:40:13 +00:00
|
|
|
|
features.showStruct();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2006-10-12 10:50:45 +00:00
|
|
|
|
void Buffer::getLabelList(vector<docstring> & list) const
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2010-09-29 12:55:39 +00:00
|
|
|
|
// If this is a child document, use the master's list instead.
|
|
|
|
|
if (parent()) {
|
|
|
|
|
masterBuffer()->getLabelList(list);
|
2004-03-25 10:12:44 +00:00
|
|
|
|
return;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2008-03-07 19:46:04 +00:00
|
|
|
|
list.clear();
|
2015-09-01 16:08:35 +00:00
|
|
|
|
shared_ptr<Toc> toc = d->toc_backend.toc("label");
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & tocit : *toc) {
|
|
|
|
|
if (tocit.depth() == 0)
|
|
|
|
|
list.push_back(tocit.str());
|
2008-03-07 19:52:44 +00:00
|
|
|
|
}
|
2000-05-19 16:46:01 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-08-10 14:12:48 +00:00
|
|
|
|
void Buffer::invalidateBibinfoCache() const
|
2008-06-05 06:42:53 +00:00
|
|
|
|
{
|
2010-01-09 16:15:46 +00:00
|
|
|
|
d->bibinfo_cache_valid_ = false;
|
2011-05-07 23:02:53 +00:00
|
|
|
|
d->cite_labels_valid_ = false;
|
2017-11-05 01:23:25 +00:00
|
|
|
|
removeBiblioTempFiles();
|
2010-08-10 14:12:48 +00:00
|
|
|
|
// also invalidate the cache for the parent buffer
|
|
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
|
if (pbuf)
|
|
|
|
|
pbuf->invalidateBibinfoCache();
|
2006-04-15 11:46:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-09-02 04:10:01 +00:00
|
|
|
|
docstring_list const & Buffer::getBibfiles(UpdateScope scope) const
|
2006-04-15 11:46:17 +00:00
|
|
|
|
{
|
2010-08-10 14:12:48 +00:00
|
|
|
|
// FIXME This is probably unnecessary, given where we call this.
|
2017-11-05 01:23:25 +00:00
|
|
|
|
// If this is a child document, use the master instead.
|
2010-08-10 14:12:48 +00:00
|
|
|
|
Buffer const * const pbuf = masterBuffer();
|
|
|
|
|
if (pbuf != this && scope != UpdateChildOnly)
|
2017-11-05 01:23:25 +00:00
|
|
|
|
return pbuf->getBibfiles();
|
2018-12-14 15:58:13 +00:00
|
|
|
|
|
|
|
|
|
// In 2.3.x, we have:
|
|
|
|
|
//if (!d->bibfile_cache_valid_)
|
|
|
|
|
// this->updateBibfilesCache(scope);
|
|
|
|
|
// I think that is a leftover, but there have been so many back-
|
|
|
|
|
// and-forths with this, due to Windows issues, that I am not sure.
|
|
|
|
|
|
2010-01-09 16:15:46 +00:00
|
|
|
|
return d->bibfiles_cache_;
|
2006-04-15 11:46:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-04-25 20:03:03 +00:00
|
|
|
|
BiblioInfo const & Buffer::masterBibInfo() const
|
2008-08-01 17:57:01 +00:00
|
|
|
|
{
|
2008-04-25 20:03:03 +00:00
|
|
|
|
Buffer const * const tmp = masterBuffer();
|
|
|
|
|
if (tmp != this)
|
|
|
|
|
return tmp->masterBibInfo();
|
2010-01-08 16:40:41 +00:00
|
|
|
|
return d->bibinfo_;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-03-22 14:52:23 +00:00
|
|
|
|
BiblioInfo const & Buffer::bibInfo() const
|
|
|
|
|
{
|
|
|
|
|
return d->bibinfo_;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-09-02 04:10:01 +00:00
|
|
|
|
void Buffer::registerBibfiles(const docstring_list & bf) const
|
2018-07-07 13:25:35 +00:00
|
|
|
|
{
|
2018-03-22 22:37:19 +00:00
|
|
|
|
// We register the bib files in the master buffer,
|
|
|
|
|
// if there is one, but also in every single buffer,
|
|
|
|
|
// in case a child is compiled alone.
|
2017-11-05 01:23:25 +00:00
|
|
|
|
Buffer const * const tmp = masterBuffer();
|
|
|
|
|
if (tmp != this)
|
2018-03-22 14:52:23 +00:00
|
|
|
|
tmp->registerBibfiles(bf);
|
2017-11-05 01:23:25 +00:00
|
|
|
|
|
|
|
|
|
for (auto const & p : bf) {
|
2018-09-02 04:10:01 +00:00
|
|
|
|
docstring_list::const_iterator temp =
|
2017-11-05 01:23:25 +00:00
|
|
|
|
find(d->bibfiles_cache_.begin(), d->bibfiles_cache_.end(), p);
|
2017-12-16 04:20:57 +00:00
|
|
|
|
if (temp == d->bibfiles_cache_.end())
|
2017-11-05 01:23:25 +00:00
|
|
|
|
d->bibfiles_cache_.push_back(p);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-09-02 04:10:01 +00:00
|
|
|
|
static map<docstring, FileName> bibfileCache;
|
|
|
|
|
|
|
|
|
|
FileName Buffer::getBibfilePath(docstring const & bibid) const
|
|
|
|
|
{
|
|
|
|
|
map<docstring, FileName>::const_iterator it =
|
|
|
|
|
bibfileCache.find(bibid);
|
|
|
|
|
if (it != bibfileCache.end()) {
|
2019-05-31 00:32:35 +00:00
|
|
|
|
// i.e., return bibfileCache[bibid];
|
2018-09-02 04:10:01 +00:00
|
|
|
|
return it->second;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
LYXERR(Debug::FILES, "Reading file location for " << bibid);
|
2019-05-31 00:32:35 +00:00
|
|
|
|
string const texfile = changeExtension(to_utf8(bibid), "bib");
|
|
|
|
|
// we need to check first if this file exists where it's said to be.
|
|
|
|
|
// there's a weird bug that occurs otherwise: if the file is in the
|
|
|
|
|
// Buffer's directory but has the same name as some file that would be
|
|
|
|
|
// found by kpsewhich, then we find the latter, not the former.
|
|
|
|
|
FileName const local_file = makeAbsPath(texfile, filePath());
|
|
|
|
|
FileName file = local_file;
|
|
|
|
|
if (!file.exists()) {
|
|
|
|
|
// there's no need now to check whether the file can be found
|
|
|
|
|
// locally
|
|
|
|
|
file = findtexfile(texfile, "bib", true);
|
|
|
|
|
if (file.empty())
|
2019-05-31 00:39:43 +00:00
|
|
|
|
file = local_file;
|
2019-05-31 00:32:35 +00:00
|
|
|
|
}
|
2018-09-02 04:10:01 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Found at: " << file);
|
|
|
|
|
|
|
|
|
|
bibfileCache[bibid] = file;
|
|
|
|
|
return bibfileCache[bibid];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-12-03 19:00:55 +00:00
|
|
|
|
void Buffer::checkIfBibInfoCacheIsValid() const
|
2008-06-05 05:46:49 +00:00
|
|
|
|
{
|
2010-08-10 14:12:48 +00:00
|
|
|
|
// use the master's cache
|
|
|
|
|
Buffer const * const tmp = masterBuffer();
|
|
|
|
|
if (tmp != this) {
|
2010-12-03 19:00:55 +00:00
|
|
|
|
tmp->checkIfBibInfoCacheIsValid();
|
2010-08-10 14:12:48 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 12:24:51 +00:00
|
|
|
|
// If we already know the cache is invalid, stop here.
|
|
|
|
|
// This is important in the case when the bibliography
|
|
|
|
|
// environment (rather than Bib[la]TeX) is used.
|
|
|
|
|
// In that case, the timestamp check below gives no
|
|
|
|
|
// sensible result. Rather than that, the cache will
|
|
|
|
|
// be invalidated explicitly via invalidateBibInfoCache()
|
|
|
|
|
// by the Bibitem inset.
|
|
|
|
|
// Same applies for bib encoding changes, which trigger
|
|
|
|
|
// invalidateBibInfoCache() by InsetBibtex.
|
2017-11-05 01:23:25 +00:00
|
|
|
|
if (!d->bibinfo_cache_valid_)
|
|
|
|
|
return;
|
|
|
|
|
|
2019-01-04 12:01:03 +00:00
|
|
|
|
if (d->have_bibitems_) {
|
|
|
|
|
// We have a bibliography environment.
|
|
|
|
|
// Invalidate the bibinfo cache unconditionally.
|
|
|
|
|
// Cite labels will get invalidated by the inset if needed.
|
|
|
|
|
d->bibinfo_cache_valid_ = false;
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// OK. This is with Bib(la)tex. We'll assume the cache
|
|
|
|
|
// is valid and change this if we find changes in the bibs.
|
2018-12-14 15:55:16 +00:00
|
|
|
|
d->bibinfo_cache_valid_ = true;
|
|
|
|
|
d->cite_labels_valid_ = true;
|
|
|
|
|
|
2010-01-08 18:18:05 +00:00
|
|
|
|
// compare the cached timestamps with the actual ones.
|
2018-09-02 04:10:01 +00:00
|
|
|
|
docstring_list const & bibfiles_cache = getBibfiles();
|
|
|
|
|
for (auto const & bf : bibfiles_cache) {
|
|
|
|
|
FileName const fn = getBibfilePath(bf);
|
2017-01-02 12:00:14 +00:00
|
|
|
|
time_t lastw = fn.lastModified();
|
|
|
|
|
time_t prevw = d->bibfile_status_[fn];
|
2010-01-08 18:18:05 +00:00
|
|
|
|
if (lastw != prevw) {
|
2010-01-09 16:15:46 +00:00
|
|
|
|
d->bibinfo_cache_valid_ = false;
|
2011-05-07 23:02:53 +00:00
|
|
|
|
d->cite_labels_valid_ = false;
|
2017-01-02 12:00:14 +00:00
|
|
|
|
d->bibfile_status_[fn] = lastw;
|
2008-04-25 20:03:03 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2010-12-03 19:00:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-10-13 09:06:24 +00:00
|
|
|
|
void Buffer::clearBibFileCache() const
|
|
|
|
|
{
|
|
|
|
|
bibfileCache.clear();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-07-07 13:25:35 +00:00
|
|
|
|
void Buffer::reloadBibInfoCache(bool const force) const
|
2010-12-03 19:00:55 +00:00
|
|
|
|
{
|
2018-12-14 15:55:16 +00:00
|
|
|
|
// we should not need to do this for internal buffers
|
|
|
|
|
if (isInternal())
|
|
|
|
|
return;
|
|
|
|
|
|
2010-12-03 19:00:55 +00:00
|
|
|
|
// use the master's cache
|
|
|
|
|
Buffer const * const tmp = masterBuffer();
|
|
|
|
|
if (tmp != this) {
|
2018-07-07 13:25:35 +00:00
|
|
|
|
tmp->reloadBibInfoCache(force);
|
2010-12-03 19:00:55 +00:00
|
|
|
|
return;
|
2010-08-10 14:12:48 +00:00
|
|
|
|
}
|
2010-12-03 19:00:55 +00:00
|
|
|
|
|
2018-09-02 04:10:01 +00:00
|
|
|
|
if (!force) {
|
|
|
|
|
checkIfBibInfoCacheIsValid();
|
|
|
|
|
if (d->bibinfo_cache_valid_)
|
|
|
|
|
return;
|
|
|
|
|
}
|
2010-12-03 19:00:55 +00:00
|
|
|
|
|
2018-12-14 15:55:16 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Bibinfo cache was invalid.");
|
2018-09-02 04:10:01 +00:00
|
|
|
|
// re-read file locations when this info changes
|
|
|
|
|
// FIXME Is this sufficient? Or should we also force that
|
|
|
|
|
// in some other cases? If so, then it is easy enough to
|
|
|
|
|
// add the following line in some other places.
|
2018-10-13 09:06:24 +00:00
|
|
|
|
clearBibFileCache();
|
2010-12-03 19:00:55 +00:00
|
|
|
|
d->bibinfo_.clear();
|
2017-10-18 07:20:31 +00:00
|
|
|
|
FileNameList checkedFiles;
|
2019-01-04 12:01:03 +00:00
|
|
|
|
d->have_bibitems_ = false;
|
2017-10-18 07:20:31 +00:00
|
|
|
|
collectBibKeys(checkedFiles);
|
2010-12-03 19:00:55 +00:00
|
|
|
|
d->bibinfo_cache_valid_ = true;
|
2010-08-10 14:12:48 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-10-18 07:20:31 +00:00
|
|
|
|
void Buffer::collectBibKeys(FileNameList & checkedFiles) const
|
2010-08-10 14:12:48 +00:00
|
|
|
|
{
|
2020-04-26 02:17:51 +00:00
|
|
|
|
if (!parent())
|
|
|
|
|
clearIncludeList();
|
|
|
|
|
|
2020-11-27 18:10:52 +00:00
|
|
|
|
for (InsetIterator it = begin(inset()); it; ++it) {
|
2017-10-18 07:20:31 +00:00
|
|
|
|
it->collectBibKeys(it, checkedFiles);
|
2019-01-04 12:01:03 +00:00
|
|
|
|
if (it->lyxCode() == BIBITEM_CODE) {
|
2020-02-19 00:27:12 +00:00
|
|
|
|
if (parent() != nullptr)
|
2019-01-04 12:01:03 +00:00
|
|
|
|
parent()->d->have_bibitems_ = true;
|
|
|
|
|
else
|
|
|
|
|
d->have_bibitems_ = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
2010-12-03 20:08:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-12-06 01:30:29 +00:00
|
|
|
|
void Buffer::addBiblioInfo(BiblioInfo const & bi_in) const
|
2010-12-03 20:08:04 +00:00
|
|
|
|
{
|
2020-12-06 01:27:29 +00:00
|
|
|
|
// We add the biblio info to the parent buffer,
|
|
|
|
|
// if there is one, but also to this buffer, in case
|
|
|
|
|
// it is compiled alone.
|
2020-12-06 01:30:29 +00:00
|
|
|
|
BiblioInfo & our_bi = d->bibinfo_;
|
|
|
|
|
our_bi.mergeBiblioInfo(bi_in);
|
2018-03-22 14:52:23 +00:00
|
|
|
|
|
2020-12-06 01:27:29 +00:00
|
|
|
|
if (parent())
|
2020-12-06 01:30:29 +00:00
|
|
|
|
parent()->addBiblioInfo(bi_in);
|
2010-12-03 20:08:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-03-22 22:37:19 +00:00
|
|
|
|
void Buffer::addBibTeXInfo(docstring const & key, BibTeXInfo const & bin) const
|
2010-12-03 20:08:04 +00:00
|
|
|
|
{
|
2018-03-22 22:37:19 +00:00
|
|
|
|
// We add the bibtex info to the master buffer,
|
|
|
|
|
// if there is one, but also to every single buffer,
|
|
|
|
|
// in case a child is compiled alone.
|
|
|
|
|
BiblioInfo & bi = d->bibinfo_;
|
|
|
|
|
bi[key] = bin;
|
|
|
|
|
|
2020-02-19 00:27:12 +00:00
|
|
|
|
if (parent() != nullptr) {
|
2018-09-02 08:17:23 +00:00
|
|
|
|
BiblioInfo & masterbi = masterBuffer()->d->bibinfo_;
|
2018-03-22 22:37:19 +00:00
|
|
|
|
masterbi[key] = bin;
|
|
|
|
|
}
|
2008-04-25 20:03:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2013-02-02 18:23:21 +00:00
|
|
|
|
void Buffer::makeCitationLabels() const
|
|
|
|
|
{
|
|
|
|
|
Buffer const * const master = masterBuffer();
|
|
|
|
|
return d->bibinfo_.makeCitationLabels(*master);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-01-29 04:33:45 +00:00
|
|
|
|
void Buffer::invalidateCiteLabels() const
|
|
|
|
|
{
|
|
|
|
|
masterBuffer()->d->cite_labels_valid_ = false;
|
|
|
|
|
}
|
|
|
|
|
|
2011-05-07 23:02:53 +00:00
|
|
|
|
bool Buffer::citeLabelsValid() const
|
|
|
|
|
{
|
|
|
|
|
return masterBuffer()->d->cite_labels_valid_;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-12-03 22:58:38 +00:00
|
|
|
|
void Buffer::removeBiblioTempFiles() const
|
|
|
|
|
{
|
|
|
|
|
// We remove files that contain LaTeX commands specific to the
|
|
|
|
|
// particular bibliographic style being used, in order to avoid
|
|
|
|
|
// LaTeX errors when we switch style.
|
|
|
|
|
FileName const aux_file(addName(temppath(), changeExtension(latexName(),".aux")));
|
|
|
|
|
FileName const bbl_file(addName(temppath(), changeExtension(latexName(),".bbl")));
|
|
|
|
|
LYXERR(Debug::FILES, "Removing the .aux file " << aux_file);
|
|
|
|
|
aux_file.removeFile();
|
|
|
|
|
LYXERR(Debug::FILES, "Removing the .bbl file " << bbl_file);
|
|
|
|
|
bbl_file.removeFile();
|
|
|
|
|
// Also for the parent buffer
|
|
|
|
|
Buffer const * const pbuf = parent();
|
|
|
|
|
if (pbuf)
|
|
|
|
|
pbuf->removeBiblioTempFiles();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
1999-10-02 16:21:10 +00:00
|
|
|
|
bool Buffer::isDepClean(string const & name) const
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
DepClean::const_iterator const it = d->dep_clean.find(name);
|
|
|
|
|
if (it == d->dep_clean.end())
|
2003-02-09 00:27:52 +00:00
|
|
|
|
return true;
|
|
|
|
|
return it->second;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
1999-10-02 16:21:10 +00:00
|
|
|
|
void Buffer::markDepClean(string const & name)
|
1999-09-27 18:44:28 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
d->dep_clean[name] = true;
|
1999-09-27 18:44:28 +00:00
|
|
|
|
}
|
1999-12-10 00:07:59 +00:00
|
|
|
|
|
2000-01-08 21:02:58 +00:00
|
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
|
bool Buffer::getStatus(FuncRequest const & cmd, FuncStatus & flag)
|
|
|
|
|
{
|
2009-09-19 14:05:52 +00:00
|
|
|
|
if (isInternal()) {
|
|
|
|
|
// FIXME? if there is an Buffer LFUN that can be dispatched even
|
|
|
|
|
// if internal, put a switch '(cmd.action)' here.
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool enable = true;
|
|
|
|
|
|
2010-04-09 19:00:42 +00:00
|
|
|
|
switch (cmd.action()) {
|
2009-09-19 14:05:52 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BUFFER_TOGGLE_READ_ONLY:
|
2017-03-01 21:03:44 +00:00
|
|
|
|
flag.setOnOff(hasReadonlyFlag());
|
2013-04-16 14:22:10 +00:00
|
|
|
|
break;
|
2009-09-19 11:59:39 +00:00
|
|
|
|
|
2009-09-19 14:05:52 +00:00
|
|
|
|
// FIXME: There is need for a command-line import.
|
|
|
|
|
//case LFUN_BUFFER_IMPORT:
|
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BUFFER_AUTO_SAVE:
|
|
|
|
|
break;
|
2009-09-19 14:05:52 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BUFFER_EXPORT_CUSTOM:
|
|
|
|
|
// FIXME: Nothing to check here?
|
|
|
|
|
break;
|
2009-09-19 14:05:52 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BUFFER_EXPORT: {
|
2020-10-31 13:09:46 +00:00
|
|
|
|
docstring const & arg = cmd.argument();
|
2013-04-16 14:22:10 +00:00
|
|
|
|
if (arg == "custom") {
|
|
|
|
|
enable = true;
|
2009-04-06 12:12:06 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2017-03-09 22:35:27 +00:00
|
|
|
|
string format = (arg.empty() || arg == "default") ?
|
2017-02-28 21:15:53 +00:00
|
|
|
|
params().getDefaultOutputFormat() : to_utf8(arg);
|
2013-04-16 14:22:10 +00:00
|
|
|
|
size_t pos = format.find(' ');
|
|
|
|
|
if (pos != string::npos)
|
|
|
|
|
format = format.substr(0, pos);
|
2016-10-21 02:56:40 +00:00
|
|
|
|
enable = params().isExportable(format, false);
|
2013-04-16 14:22:10 +00:00
|
|
|
|
if (!enable)
|
|
|
|
|
flag.message(bformat(
|
|
|
|
|
_("Don't know how to export to format: %1$s"), arg));
|
|
|
|
|
break;
|
|
|
|
|
}
|
2009-04-06 12:12:06 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BUILD_PROGRAM:
|
2016-10-21 02:56:40 +00:00
|
|
|
|
enable = params().isExportable("program", false);
|
2013-04-16 14:22:10 +00:00
|
|
|
|
break;
|
2009-09-19 14:05:52 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BRANCH_ACTIVATE:
|
|
|
|
|
case LFUN_BRANCH_DEACTIVATE:
|
|
|
|
|
case LFUN_BRANCH_MASTER_ACTIVATE:
|
|
|
|
|
case LFUN_BRANCH_MASTER_DEACTIVATE: {
|
|
|
|
|
bool const master = (cmd.action() == LFUN_BRANCH_MASTER_ACTIVATE
|
|
|
|
|
|| cmd.action() == LFUN_BRANCH_MASTER_DEACTIVATE);
|
|
|
|
|
BranchList const & branchList = master ? masterBuffer()->params().branchlist()
|
|
|
|
|
: params().branchlist();
|
2020-10-31 13:09:46 +00:00
|
|
|
|
docstring const & branchName = cmd.argument();
|
2013-04-16 14:22:10 +00:00
|
|
|
|
flag.setEnabled(!branchName.empty() && branchList.find(branchName));
|
|
|
|
|
break;
|
|
|
|
|
}
|
2009-04-06 12:12:06 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BRANCH_ADD:
|
|
|
|
|
case LFUN_BRANCHES_RENAME:
|
|
|
|
|
// if no Buffer is present, then of course we won't be called!
|
|
|
|
|
break;
|
2009-09-19 14:20:05 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BUFFER_LANGUAGE:
|
|
|
|
|
enable = !isReadonly();
|
|
|
|
|
break;
|
|
|
|
|
|
2015-03-31 22:54:49 +00:00
|
|
|
|
case LFUN_BUFFER_VIEW_CACHE:
|
2015-08-19 01:55:59 +00:00
|
|
|
|
(d->preview_file_).refresh();
|
2015-10-29 02:51:24 +00:00
|
|
|
|
enable = (d->preview_file_).exists() && !(d->preview_file_).isFileEmpty();
|
2015-03-31 22:54:49 +00:00
|
|
|
|
break;
|
|
|
|
|
|
2015-11-05 21:42:30 +00:00
|
|
|
|
case LFUN_CHANGES_TRACK:
|
|
|
|
|
flag.setEnabled(true);
|
|
|
|
|
flag.setOnOff(params().track_changes);
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case LFUN_CHANGES_OUTPUT:
|
|
|
|
|
flag.setEnabled(true);
|
|
|
|
|
flag.setOnOff(params().output_changes);
|
|
|
|
|
break;
|
|
|
|
|
|
2018-02-07 14:35:46 +00:00
|
|
|
|
case LFUN_BUFFER_TOGGLE_COMPRESSION:
|
2015-11-05 21:42:30 +00:00
|
|
|
|
flag.setOnOff(params().compressed);
|
|
|
|
|
break;
|
|
|
|
|
|
2018-02-07 14:35:46 +00:00
|
|
|
|
case LFUN_BUFFER_TOGGLE_OUTPUT_SYNC:
|
2015-11-05 21:42:30 +00:00
|
|
|
|
flag.setOnOff(params().output_sync);
|
|
|
|
|
break;
|
2018-02-07 14:35:46 +00:00
|
|
|
|
|
|
|
|
|
case LFUN_BUFFER_ANONYMIZE:
|
|
|
|
|
break;
|
2015-11-05 21:42:30 +00:00
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
default:
|
|
|
|
|
return false;
|
2009-04-06 12:12:06 +00:00
|
|
|
|
}
|
2009-09-19 14:05:52 +00:00
|
|
|
|
flag.setEnabled(enable);
|
2009-04-06 12:12:06 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::dispatch(string const & command, DispatchResult & result)
|
1999-12-10 00:07:59 +00:00
|
|
|
|
{
|
2003-09-21 23:00:47 +00:00
|
|
|
|
return dispatch(lyxaction.lookupFunc(command), result);
|
1999-12-10 00:07:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
2000-01-08 21:02:58 +00:00
|
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
|
// NOTE We can end up here even if we have no GUI, because we are called
|
2011-12-03 22:15:11 +00:00
|
|
|
|
// by LyX::exec to handled command-line requests. So we may need to check
|
2009-04-06 12:12:06 +00:00
|
|
|
|
// whether we have a GUI or not. The boolean use_gui holds this information.
|
|
|
|
|
void Buffer::dispatch(FuncRequest const & func, DispatchResult & dr)
|
1999-12-10 00:07:59 +00:00
|
|
|
|
{
|
2009-09-19 14:05:52 +00:00
|
|
|
|
if (isInternal()) {
|
|
|
|
|
// FIXME? if there is an Buffer LFUN that can be dispatched even
|
2010-04-09 19:00:42 +00:00
|
|
|
|
// if internal, put a switch '(cmd.action())' here.
|
2009-09-19 14:05:52 +00:00
|
|
|
|
dr.dispatched(false);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
string const argument = to_utf8(func.argument());
|
2009-04-06 12:12:06 +00:00
|
|
|
|
// We'll set this back to false if need be.
|
2000-04-28 11:18:04 +00:00
|
|
|
|
bool dispatched = true;
|
2017-12-13 10:10:49 +00:00
|
|
|
|
// This handles undo groups automagically
|
|
|
|
|
UndoGroupHelper ugh(this);
|
2002-06-24 20:28:12 +00:00
|
|
|
|
|
2010-04-09 19:00:42 +00:00
|
|
|
|
switch (func.action()) {
|
2009-09-19 11:59:39 +00:00
|
|
|
|
case LFUN_BUFFER_TOGGLE_READ_ONLY:
|
2013-02-03 18:21:54 +00:00
|
|
|
|
if (lyxvc().inUse()) {
|
|
|
|
|
string log = lyxvc().toggleReadOnly();
|
|
|
|
|
if (!log.empty())
|
|
|
|
|
dr.setMessage(log);
|
|
|
|
|
}
|
2009-09-19 11:59:39 +00:00
|
|
|
|
else
|
2017-03-01 21:03:44 +00:00
|
|
|
|
setReadonly(!hasReadonlyFlag());
|
2009-09-19 11:59:39 +00:00
|
|
|
|
break;
|
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
|
case LFUN_BUFFER_EXPORT: {
|
2017-03-09 22:35:27 +00:00
|
|
|
|
string const format = (argument.empty() || argument == "default") ?
|
2017-02-28 21:15:53 +00:00
|
|
|
|
params().getDefaultOutputFormat() : argument;
|
2017-02-27 19:43:11 +00:00
|
|
|
|
ExportStatus const status = doExport(format, false);
|
2011-10-27 20:00:06 +00:00
|
|
|
|
dr.setError(status != ExportSuccess);
|
|
|
|
|
if (status != ExportSuccess)
|
2011-12-03 22:15:11 +00:00
|
|
|
|
dr.setMessage(bformat(_("Error exporting to format: %1$s."),
|
2017-02-27 19:43:11 +00:00
|
|
|
|
from_utf8(format)));
|
2009-04-24 12:37:56 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2013-11-14 20:04:37 +00:00
|
|
|
|
case LFUN_BUILD_PROGRAM: {
|
|
|
|
|
ExportStatus const status = doExport("program", true);
|
|
|
|
|
dr.setError(status != ExportSuccess);
|
|
|
|
|
if (status != ExportSuccess)
|
|
|
|
|
dr.setMessage(_("Error generating literate programming code."));
|
2009-09-19 14:05:52 +00:00
|
|
|
|
break;
|
2013-11-14 20:04:37 +00:00
|
|
|
|
}
|
2009-09-19 14:05:52 +00:00
|
|
|
|
|
|
|
|
|
case LFUN_BUFFER_EXPORT_CUSTOM: {
|
|
|
|
|
string format_name;
|
|
|
|
|
string command = split(argument, format_name, ' ');
|
2017-03-14 03:33:40 +00:00
|
|
|
|
Format const * format = theFormats().getFormat(format_name);
|
2009-09-19 14:05:52 +00:00
|
|
|
|
if (!format) {
|
|
|
|
|
lyxerr << "Format \"" << format_name
|
|
|
|
|
<< "\" not recognized!"
|
|
|
|
|
<< endl;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The name of the file created by the conversion process
|
|
|
|
|
string filename;
|
|
|
|
|
|
|
|
|
|
// Output to filename
|
|
|
|
|
if (format->name() == "lyx") {
|
|
|
|
|
string const latexname = latexName(false);
|
|
|
|
|
filename = changeExtension(latexname,
|
|
|
|
|
format->extension());
|
|
|
|
|
filename = addName(temppath(), filename);
|
|
|
|
|
|
|
|
|
|
if (!writeFile(FileName(filename)))
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
} else {
|
2011-10-30 08:53:20 +00:00
|
|
|
|
doExport(format_name, true, filename);
|
2009-09-19 14:05:52 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Substitute $$FName for filename
|
|
|
|
|
if (!contains(command, "$$FName"))
|
|
|
|
|
command = "( " + command + " ) < $$FName";
|
|
|
|
|
command = subst(command, "$$FName", filename);
|
|
|
|
|
|
|
|
|
|
// Execute the command in the background
|
|
|
|
|
Systemcall call;
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
call.startscript(Systemcall::DontWait, command,
|
|
|
|
|
filePath(), layoutPos());
|
2009-09-19 14:05:52 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// FIXME: There is need for a command-line import.
|
|
|
|
|
/*
|
|
|
|
|
case LFUN_BUFFER_IMPORT:
|
|
|
|
|
doImport(argument);
|
|
|
|
|
break;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
case LFUN_BUFFER_AUTO_SAVE:
|
|
|
|
|
autoSave();
|
2010-11-17 02:18:12 +00:00
|
|
|
|
resetAutosaveTimers();
|
2009-09-19 14:05:52 +00:00
|
|
|
|
break;
|
|
|
|
|
|
2013-04-16 14:22:10 +00:00
|
|
|
|
case LFUN_BRANCH_ACTIVATE:
|
|
|
|
|
case LFUN_BRANCH_DEACTIVATE:
|
|
|
|
|
case LFUN_BRANCH_MASTER_ACTIVATE:
|
|
|
|
|
case LFUN_BRANCH_MASTER_DEACTIVATE: {
|
|
|
|
|
bool const master = (func.action() == LFUN_BRANCH_MASTER_ACTIVATE
|
|
|
|
|
|| func.action() == LFUN_BRANCH_MASTER_DEACTIVATE);
|
|
|
|
|
Buffer * buf = master ? const_cast<Buffer *>(masterBuffer())
|
|
|
|
|
: this;
|
|
|
|
|
|
2020-10-31 13:09:46 +00:00
|
|
|
|
docstring const & branch_name = func.argument();
|
2013-04-16 14:22:10 +00:00
|
|
|
|
// the case without a branch name is handled elsewhere
|
|
|
|
|
if (branch_name.empty()) {
|
|
|
|
|
dispatched = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
Branch * branch = buf->params().branchlist().find(branch_name);
|
|
|
|
|
if (!branch) {
|
|
|
|
|
LYXERR0("Branch " << branch_name << " does not exist.");
|
|
|
|
|
dr.setError(true);
|
|
|
|
|
docstring const msg =
|
|
|
|
|
bformat(_("Branch \"%1$s\" does not exist."), branch_name);
|
|
|
|
|
dr.setMessage(msg);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
bool const activate = (func.action() == LFUN_BRANCH_ACTIVATE
|
|
|
|
|
|| func.action() == LFUN_BRANCH_MASTER_ACTIVATE);
|
|
|
|
|
if (branch->isSelected() != activate) {
|
2015-01-17 19:38:22 +00:00
|
|
|
|
buf->undo().recordUndoBufferParams(CursorData());
|
2013-04-16 14:22:10 +00:00
|
|
|
|
branch->setSelected(activate);
|
|
|
|
|
dr.setError(false);
|
|
|
|
|
dr.screenUpdate(Update::Force);
|
|
|
|
|
dr.forceBufferUpdate();
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2009-07-09 09:48:34 +00:00
|
|
|
|
case LFUN_BRANCH_ADD: {
|
2020-10-31 13:09:46 +00:00
|
|
|
|
docstring const & branchnames = func.argument();
|
2017-12-16 04:20:57 +00:00
|
|
|
|
if (branchnames.empty()) {
|
2009-07-09 09:48:34 +00:00
|
|
|
|
dispatched = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2009-10-29 22:06:27 +00:00
|
|
|
|
BranchList & branch_list = params().branchlist();
|
2010-06-14 13:39:08 +00:00
|
|
|
|
vector<docstring> const branches =
|
2017-12-16 04:20:57 +00:00
|
|
|
|
getVectorFromString(branchnames, branch_list.separator());
|
2010-06-14 13:39:08 +00:00
|
|
|
|
docstring msg;
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (docstring const & branch_name : branches) {
|
2010-06-14 13:39:08 +00:00
|
|
|
|
Branch * branch = branch_list.find(branch_name);
|
|
|
|
|
if (branch) {
|
|
|
|
|
LYXERR0("Branch " << branch_name << " already exists.");
|
|
|
|
|
dr.setError(true);
|
|
|
|
|
if (!msg.empty())
|
|
|
|
|
msg += ("\n");
|
|
|
|
|
msg += bformat(_("Branch \"%1$s\" already exists."), branch_name);
|
|
|
|
|
} else {
|
2015-01-17 19:38:22 +00:00
|
|
|
|
undo().recordUndoBufferParams(CursorData());
|
2010-06-14 13:39:08 +00:00
|
|
|
|
branch_list.add(branch_name);
|
2021-01-30 08:16:15 +00:00
|
|
|
|
branch = branch_list.find(branch_name);
|
2021-01-30 12:46:47 +00:00
|
|
|
|
if (branch)
|
|
|
|
|
// needed to update the color table for dark mode
|
|
|
|
|
branch->setColors("background", "background");
|
2010-06-14 13:39:08 +00:00
|
|
|
|
dr.setError(false);
|
2010-10-13 17:28:55 +00:00
|
|
|
|
dr.screenUpdate(Update::Force);
|
2010-06-14 13:39:08 +00:00
|
|
|
|
}
|
2009-07-09 09:48:34 +00:00
|
|
|
|
}
|
2010-06-14 13:39:08 +00:00
|
|
|
|
if (!msg.empty())
|
|
|
|
|
dr.setMessage(msg);
|
2009-07-09 09:48:34 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2011-12-03 22:15:11 +00:00
|
|
|
|
case LFUN_BRANCHES_RENAME: {
|
2009-07-11 08:18:26 +00:00
|
|
|
|
if (func.argument().empty())
|
|
|
|
|
break;
|
|
|
|
|
|
2009-07-10 06:49:51 +00:00
|
|
|
|
docstring const oldname = from_utf8(func.getArg(0));
|
|
|
|
|
docstring const newname = from_utf8(func.getArg(1));
|
2020-11-27 18:10:52 +00:00
|
|
|
|
InsetIterator it = begin(inset());
|
|
|
|
|
InsetIterator const itend = end(inset());
|
2009-07-11 08:18:26 +00:00
|
|
|
|
bool success = false;
|
2020-11-27 18:10:52 +00:00
|
|
|
|
for (; it != itend; ++it) {
|
2009-07-11 08:18:26 +00:00
|
|
|
|
if (it->lyxCode() == BRANCH_CODE) {
|
Remove unneccessary uses of dynamic_cast from the code.
A dynamic_cast is necessary when:
- the object to be casted is from an external library because we can't add Qxxx::asXxxx() to Qt e.g.:
* QAbstractListModel to GuiIdListModel,
* QValidator to PathValidator,
* QWidget to TabWorkArea,
* QWidget to GuiWorkArea;
- the object is to be casted from an interface to the implementing class, because the Interface does not know by whom it is implemented:
* ProgressInterface to GuiProgress,
* Application to GuiApplication.
A dynamic_cast can be replaced by:
- already existing as***Inset() functions, e.g.:
* asHullInset(),
* asInsetMath()->asMacro(),
* asInsetText();
- a static_cast when we are sure this can't go wrong, e.g.:
* we are sure that CellData::inset->clone() is an InsetTableCell,
* in cases where we explicitly check it->lyxCode().
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@35855 a592a061-630c-0410-9148-cb99ea01b6c8
2010-10-26 15:03:51 +00:00
|
|
|
|
InsetBranch & ins = static_cast<InsetBranch &>(*it);
|
2009-07-11 08:18:26 +00:00
|
|
|
|
if (ins.branch() == oldname) {
|
2012-07-15 16:16:09 +00:00
|
|
|
|
undo().recordUndo(CursorData(it));
|
2009-07-11 08:18:26 +00:00
|
|
|
|
ins.rename(newname);
|
|
|
|
|
success = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (it->lyxCode() == INCLUDE_CODE) {
|
|
|
|
|
// get buffer of external file
|
|
|
|
|
InsetInclude const & ins =
|
|
|
|
|
static_cast<InsetInclude const &>(*it);
|
2020-04-26 02:27:09 +00:00
|
|
|
|
Buffer * child = ins.loadIfNeeded();
|
2009-07-11 08:18:26 +00:00
|
|
|
|
if (!child)
|
|
|
|
|
continue;
|
|
|
|
|
child->dispatch(func, dr);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2010-07-09 14:37:00 +00:00
|
|
|
|
if (success) {
|
2010-10-13 17:28:55 +00:00
|
|
|
|
dr.screenUpdate(Update::Force);
|
2010-07-09 14:37:00 +00:00
|
|
|
|
dr.forceBufferUpdate();
|
|
|
|
|
}
|
2009-07-10 06:49:51 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-31 22:54:49 +00:00
|
|
|
|
case LFUN_BUFFER_VIEW_CACHE:
|
2017-03-14 03:33:40 +00:00
|
|
|
|
if (!theFormats().view(*this, d->preview_file_,
|
2015-03-31 22:54:49 +00:00
|
|
|
|
d->preview_format_))
|
|
|
|
|
dr.setMessage(_("Error viewing the output file."));
|
|
|
|
|
break;
|
|
|
|
|
|
2015-11-05 21:42:30 +00:00
|
|
|
|
case LFUN_CHANGES_TRACK:
|
2015-12-20 16:37:29 +00:00
|
|
|
|
if (params().save_transient_properties)
|
|
|
|
|
undo().recordUndoBufferParams(CursorData());
|
2015-11-05 21:42:30 +00:00
|
|
|
|
params().track_changes = !params().track_changes;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case LFUN_CHANGES_OUTPUT:
|
2015-12-20 16:37:29 +00:00
|
|
|
|
if (params().save_transient_properties)
|
|
|
|
|
undo().recordUndoBufferParams(CursorData());
|
2015-11-05 21:42:30 +00:00
|
|
|
|
params().output_changes = !params().output_changes;
|
|
|
|
|
if (params().output_changes) {
|
|
|
|
|
bool xcolorulem = LaTeXFeatures::isAvailable("ulem") &&
|
|
|
|
|
LaTeXFeatures::isAvailable("xcolor");
|
|
|
|
|
|
2020-01-13 07:59:26 +00:00
|
|
|
|
if (!xcolorulem) {
|
2015-11-05 21:42:30 +00:00
|
|
|
|
Alert::warning(_("Changes not shown in LaTeX output"),
|
|
|
|
|
_("Changes will not be highlighted in LaTeX output, "
|
2020-01-13 07:59:26 +00:00
|
|
|
|
"because xcolor and ulem are not installed.\n"
|
2015-11-05 21:42:30 +00:00
|
|
|
|
"Please install both packages or redefine "
|
|
|
|
|
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case LFUN_BUFFER_TOGGLE_COMPRESSION:
|
|
|
|
|
// turn compression on/off
|
|
|
|
|
undo().recordUndoBufferParams(CursorData());
|
|
|
|
|
params().compressed = !params().compressed;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case LFUN_BUFFER_TOGGLE_OUTPUT_SYNC:
|
|
|
|
|
undo().recordUndoBufferParams(CursorData());
|
|
|
|
|
params().output_sync = !params().output_sync;
|
|
|
|
|
break;
|
|
|
|
|
|
2018-02-07 14:35:46 +00:00
|
|
|
|
case LFUN_BUFFER_ANONYMIZE: {
|
|
|
|
|
undo().recordUndoFullBuffer(CursorData());
|
|
|
|
|
CursorData cur(doc_iterator_begin(this));
|
|
|
|
|
for ( ; cur ; cur.forwardPar())
|
|
|
|
|
cur.paragraph().anonymize();
|
|
|
|
|
dr.forceBufferUpdate();
|
2018-12-02 01:24:04 +00:00
|
|
|
|
dr.screenUpdate(Update::Force);
|
2018-02-07 14:35:46 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
|
default:
|
|
|
|
|
dispatched = false;
|
|
|
|
|
break;
|
2000-04-28 11:18:04 +00:00
|
|
|
|
}
|
2009-04-06 12:12:06 +00:00
|
|
|
|
dr.dispatched(dispatched);
|
1999-12-10 00:07:59 +00:00
|
|
|
|
}
|
2000-04-10 21:40:13 +00:00
|
|
|
|
|
2000-04-26 13:57:28 +00:00
|
|
|
|
|
2001-06-28 10:25:20 +00:00
|
|
|
|
void Buffer::changeLanguage(Language const * from, Language const * to)
|
2000-04-10 21:40:13 +00:00
|
|
|
|
{
|
2013-04-25 21:27:10 +00:00
|
|
|
|
LASSERT(from, return);
|
|
|
|
|
LASSERT(to, return);
|
2005-01-05 20:21:27 +00:00
|
|
|
|
|
2020-05-13 17:18:32 +00:00
|
|
|
|
ParIterator it = par_iterator_begin();
|
|
|
|
|
ParIterator eit = par_iterator_end();
|
|
|
|
|
for (; it != eit; ++it)
|
|
|
|
|
it->changeLanguage(params(), from, to);
|
2000-04-10 21:40:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
2003-04-28 21:58:53 +00:00
|
|
|
|
|
2004-02-25 12:00:53 +00:00
|
|
|
|
bool Buffer::isMultiLingual() const
|
2000-04-10 21:40:13 +00:00
|
|
|
|
{
|
2004-02-25 12:00:53 +00:00
|
|
|
|
ParConstIterator end = par_iterator_end();
|
|
|
|
|
for (ParConstIterator it = par_iterator_begin(); it != end; ++it)
|
2003-09-09 09:47:59 +00:00
|
|
|
|
if (it->isMultiLingual(params()))
|
2000-04-10 21:40:13 +00:00
|
|
|
|
return true;
|
2001-09-01 21:26:34 +00:00
|
|
|
|
|
2000-04-10 21:40:13 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
2000-05-19 16:46:01 +00:00
|
|
|
|
|
|
|
|
|
|
2010-02-08 17:15:00 +00:00
|
|
|
|
std::set<Language const *> Buffer::getLanguages() const
|
|
|
|
|
{
|
2017-12-16 04:20:57 +00:00
|
|
|
|
std::set<Language const *> langs;
|
|
|
|
|
getLanguages(langs);
|
|
|
|
|
return langs;
|
2010-02-08 17:15:00 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-12-16 04:20:57 +00:00
|
|
|
|
void Buffer::getLanguages(std::set<Language const *> & langs) const
|
2010-02-08 17:15:00 +00:00
|
|
|
|
{
|
|
|
|
|
ParConstIterator end = par_iterator_end();
|
2010-02-25 18:00:43 +00:00
|
|
|
|
// add the buffer language, even if it's not actively used
|
2017-12-16 04:20:57 +00:00
|
|
|
|
langs.insert(language());
|
2010-02-25 18:00:43 +00:00
|
|
|
|
// iterate over the paragraphs
|
2010-02-08 17:15:00 +00:00
|
|
|
|
for (ParConstIterator it = par_iterator_begin(); it != end; ++it)
|
2017-12-16 04:20:57 +00:00
|
|
|
|
it->getLanguages(langs);
|
2010-02-08 17:15:00 +00:00
|
|
|
|
// also children
|
2019-12-16 04:36:17 +00:00
|
|
|
|
ListOfBuffers clist = getDescendants();
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & cit : clist)
|
2017-12-16 04:20:57 +00:00
|
|
|
|
cit->getLanguages(langs);
|
2010-02-08 17:15:00 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-02-09 15:23:05 +00:00
|
|
|
|
DocIterator Buffer::getParFromID(int const id) const
|
2001-07-09 09:16:00 +00:00
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
Buffer * buf = const_cast<Buffer *>(this);
|
2016-09-03 22:54:05 +00:00
|
|
|
|
if (id < 0)
|
|
|
|
|
// This means non-existent
|
2008-11-17 11:46:07 +00:00
|
|
|
|
return doc_iterator_end(buf);
|
2008-01-12 21:38:51 +00:00
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
|
for (DocIterator it = doc_iterator_begin(buf); !it.atEnd(); it.forwardPar())
|
2008-02-09 15:23:05 +00:00
|
|
|
|
if (it.paragraph().id() == id)
|
2008-01-12 21:38:51 +00:00
|
|
|
|
return it;
|
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
|
return doc_iterator_end(buf);
|
2008-01-12 21:38:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2005-01-05 20:21:27 +00:00
|
|
|
|
bool Buffer::hasParWithID(int const id) const
|
2003-05-05 17:28:21 +00:00
|
|
|
|
{
|
2008-02-09 15:23:05 +00:00
|
|
|
|
return !getParFromID(id).atEnd();
|
2003-05-05 17:28:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2001-09-01 21:26:34 +00:00
|
|
|
|
ParIterator Buffer::par_iterator_begin()
|
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
return ParIterator(doc_iterator_begin(this));
|
2001-09-01 21:26:34 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ParIterator Buffer::par_iterator_end()
|
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
return ParIterator(doc_iterator_end(this));
|
2001-09-01 21:26:34 +00:00
|
|
|
|
}
|
2002-08-20 17:18:21 +00:00
|
|
|
|
|
2003-08-26 14:50:16 +00:00
|
|
|
|
|
2002-11-08 01:08:27 +00:00
|
|
|
|
ParConstIterator Buffer::par_iterator_begin() const
|
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
return ParConstIterator(doc_iterator_begin(this));
|
2002-11-08 01:08:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ParConstIterator Buffer::par_iterator_end() const
|
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
|
return ParConstIterator(doc_iterator_end(this));
|
2002-11-08 01:08:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-12-03 17:02:40 +00:00
|
|
|
|
bool Buffer::empty() const
|
|
|
|
|
{
|
|
|
|
|
return paragraphs().size() == 1 && paragraphs().front().empty();
|
|
|
|
|
}
|
|
|
|
|
|
2002-11-08 01:08:27 +00:00
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
|
Language const * Buffer::language() const
|
2002-08-20 17:18:21 +00:00
|
|
|
|
{
|
2003-09-09 09:47:59 +00:00
|
|
|
|
return params().language;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-02-28 06:39:06 +00:00
|
|
|
|
docstring Buffer::B_(string const & l10n) const
|
2003-04-24 23:19:41 +00:00
|
|
|
|
{
|
2007-05-01 08:26:40 +00:00
|
|
|
|
return params().B_(l10n);
|
2003-04-24 23:19:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2002-08-20 17:18:21 +00:00
|
|
|
|
bool Buffer::isClean() const
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->lyx_clean;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-03-05 19:12:07 +00:00
|
|
|
|
bool Buffer::isChecksumModified() const
|
2007-08-09 20:46:22 +00:00
|
|
|
|
{
|
2013-04-25 21:27:10 +00:00
|
|
|
|
LASSERT(d->filename.exists(), return false);
|
2017-03-05 19:12:07 +00:00
|
|
|
|
return d->checksum_ != d->filename.checksum();
|
2007-08-09 20:46:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-10-28 22:12:29 +00:00
|
|
|
|
void Buffer::saveCheckSum() const
|
2007-09-06 15:54:17 +00:00
|
|
|
|
{
|
2010-10-28 22:12:29 +00:00
|
|
|
|
FileName const & file = d->filename;
|
2011-02-20 10:53:07 +00:00
|
|
|
|
file.refresh();
|
2017-03-01 19:32:33 +00:00
|
|
|
|
d->checksum_ = file.exists() ? file.checksum()
|
|
|
|
|
: 0; // in the case of save to a new file.
|
2007-09-06 15:54:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2002-08-20 17:18:21 +00:00
|
|
|
|
void Buffer::markClean() const
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
if (!d->lyx_clean) {
|
|
|
|
|
d->lyx_clean = true;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
updateTitles();
|
|
|
|
|
}
|
|
|
|
|
// if the .lyx file has been saved, we don't need an
|
|
|
|
|
// autosave
|
2007-11-30 17:46:49 +00:00
|
|
|
|
d->bak_clean = true;
|
2010-08-06 23:54:04 +00:00
|
|
|
|
d->undo_.markDirty();
|
2017-03-05 19:12:07 +00:00
|
|
|
|
clearExternalModification();
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::setUnnamed(bool flag)
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
d->unnamed = flag;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2004-02-25 12:00:53 +00:00
|
|
|
|
bool Buffer::isUnnamed() const
|
2002-08-20 17:18:21 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->unnamed;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-09-08 01:29:07 +00:00
|
|
|
|
/// \note
|
|
|
|
|
/// Don't check unnamed, here: isInternal() is used in
|
|
|
|
|
/// newBuffer(), where the unnamed flag has not been set by anyone
|
|
|
|
|
/// yet. Also, for an internal buffer, there should be no need for
|
|
|
|
|
/// retrieving fileName() nor for checking if it is unnamed or not.
|
|
|
|
|
bool Buffer::isInternal() const
|
|
|
|
|
{
|
2012-05-06 18:06:28 +00:00
|
|
|
|
return d->internal_buffer;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::setInternal(bool flag)
|
|
|
|
|
{
|
|
|
|
|
d->internal_buffer = flag;
|
2009-09-08 01:29:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2002-08-20 17:18:21 +00:00
|
|
|
|
void Buffer::markDirty()
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
if (d->lyx_clean) {
|
|
|
|
|
d->lyx_clean = false;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
updateTitles();
|
|
|
|
|
}
|
2007-11-30 17:46:49 +00:00
|
|
|
|
d->bak_clean = false;
|
2003-02-09 00:27:52 +00:00
|
|
|
|
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto & depit : d->dep_clean)
|
|
|
|
|
depit.second = false;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-11-03 17:37:37 +00:00
|
|
|
|
FileName Buffer::fileName() const
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->filename;
|
2007-11-03 17:37:37 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
|
string Buffer::absFileName() const
|
2002-08-20 17:18:21 +00:00
|
|
|
|
{
|
2010-04-21 01:19:09 +00:00
|
|
|
|
return d->filename.absFileName();
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-11-28 15:25:07 +00:00
|
|
|
|
string Buffer::filePath() const
|
2002-08-20 17:18:21 +00:00
|
|
|
|
{
|
2013-04-14 17:45:36 +00:00
|
|
|
|
string const abs = d->filename.onlyPath().absFileName();
|
|
|
|
|
if (abs.empty())
|
|
|
|
|
return abs;
|
|
|
|
|
int last = abs.length() - 1;
|
2012-05-14 11:08:30 +00:00
|
|
|
|
|
2013-04-14 17:45:36 +00:00
|
|
|
|
return abs[last] == '/' ? abs : abs + '/';
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2016-01-10 18:46:01 +00:00
|
|
|
|
DocFileName Buffer::getReferencedFileName(string const & fn) const
|
Correctly load documents moved elsewhere after save.
It is now possible opening documents that where manually moved to a
different location after they were saved and still produce an output.
Indeed, (hopefully) all needed included files are now still found.
When the moved document is saved again, all paths are accordingly updated.
Of course, for this to work, a document has to be saved in Format 490,
at least.
As an example, after converting the user guide to the last format, it can
be copied anywhere and opened without the need of adapting the paths of
included files or moving them to a proper place.
There is one glitch I am aware of. When moving a child document (but not
the master) the path to the master is correctly updated but it won't be
recognized as such. This is because LyX checks that the parent actually
includes this document but, of course, being the parent document not
touched, it appears not including this child. Anyway, it will also occur
when saving the child to a different location and the user is warned
on the terminal about this fact when the moved child is loaded.
However, there is no problem when it is the master that has been moved.
2015-05-16 17:51:53 +00:00
|
|
|
|
{
|
2016-01-10 18:46:01 +00:00
|
|
|
|
DocFileName result;
|
|
|
|
|
if (FileName::isAbsolute(fn) || !FileName::isAbsolute(params().origin))
|
|
|
|
|
result.set(fn, filePath());
|
|
|
|
|
else {
|
|
|
|
|
// filePath() ends with a path separator
|
|
|
|
|
FileName const test(filePath() + fn);
|
|
|
|
|
if (test.exists())
|
|
|
|
|
result.set(fn, filePath());
|
|
|
|
|
else
|
|
|
|
|
result.set(fn, params().origin);
|
|
|
|
|
}
|
Correctly load documents moved elsewhere after save.
It is now possible opening documents that where manually moved to a
different location after they were saved and still produce an output.
Indeed, (hopefully) all needed included files are now still found.
When the moved document is saved again, all paths are accordingly updated.
Of course, for this to work, a document has to be saved in Format 490,
at least.
As an example, after converting the user guide to the last format, it can
be copied anywhere and opened without the need of adapting the paths of
included files or moving them to a proper place.
There is one glitch I am aware of. When moving a child document (but not
the master) the path to the master is correctly updated but it won't be
recognized as such. This is because LyX checks that the parent actually
includes this document but, of course, being the parent document not
touched, it appears not including this child. Anyway, it will also occur
when saving the child to a different location and the user is warned
on the terminal about this fact when the moved child is loaded.
However, there is no problem when it is the master that has been moved.
2015-05-16 17:51:53 +00:00
|
|
|
|
|
2016-01-10 18:46:01 +00:00
|
|
|
|
return result;
|
Correctly load documents moved elsewhere after save.
It is now possible opening documents that where manually moved to a
different location after they were saved and still produce an output.
Indeed, (hopefully) all needed included files are now still found.
When the moved document is saved again, all paths are accordingly updated.
Of course, for this to work, a document has to be saved in Format 490,
at least.
As an example, after converting the user guide to the last format, it can
be copied anywhere and opened without the need of adapting the paths of
included files or moving them to a proper place.
There is one glitch I am aware of. When moving a child document (but not
the master) the path to the master is correctly updated but it won't be
recognized as such. This is because LyX checks that the parent actually
includes this document but, of course, being the parent document not
touched, it appears not including this child. Anyway, it will also occur
when saving the child to a different location and the user is warned
on the terminal about this fact when the moved child is loaded.
However, there is no problem when it is the master that has been moved.
2015-05-16 17:51:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-01-08 08:39:46 +00:00
|
|
|
|
string const Buffer::prepareFileNameForLaTeX(string const & name,
|
|
|
|
|
string const & ext, bool nice) const
|
|
|
|
|
{
|
|
|
|
|
string const fname = makeAbsPath(name, filePath()).absFileName();
|
|
|
|
|
if (FileName::isAbsolute(name) || !FileName(fname + ext).isReadableFile())
|
|
|
|
|
return name;
|
|
|
|
|
if (!nice)
|
|
|
|
|
return fname;
|
|
|
|
|
|
|
|
|
|
// FIXME UNICODE
|
|
|
|
|
return to_utf8(makeRelPath(from_utf8(fname),
|
|
|
|
|
from_utf8(masterBuffer()->filePath())));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2019-03-29 14:45:55 +00:00
|
|
|
|
vector<pair<docstring, string>> const Buffer::prepareBibFilePaths(OutputParams const & runparams,
|
2018-09-02 04:10:01 +00:00
|
|
|
|
docstring_list const & bibfilelist,
|
2017-01-08 08:39:46 +00:00
|
|
|
|
bool const add_extension) const
|
|
|
|
|
{
|
|
|
|
|
// If we are processing the LaTeX file in a temp directory then
|
|
|
|
|
// copy the .bib databases to this temp directory, mangling their
|
|
|
|
|
// names in the process. Store this mangled name in the list of
|
|
|
|
|
// all databases.
|
|
|
|
|
// (We need to do all this because BibTeX *really*, *really*
|
|
|
|
|
// can't handle "files with spaces" and Windows users tend to
|
|
|
|
|
// use such filenames.)
|
|
|
|
|
// Otherwise, store the (maybe absolute) path to the original,
|
|
|
|
|
// unmangled database name.
|
|
|
|
|
|
2019-03-29 14:45:55 +00:00
|
|
|
|
vector<pair<docstring, string>> res;
|
2017-01-08 08:39:46 +00:00
|
|
|
|
|
|
|
|
|
// determine the export format
|
|
|
|
|
string const tex_format = flavor2format(runparams.flavor);
|
|
|
|
|
|
|
|
|
|
// check for spaces in paths
|
|
|
|
|
bool found_space = false;
|
|
|
|
|
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & bit : bibfilelist) {
|
2018-09-02 04:10:01 +00:00
|
|
|
|
string utf8input = to_utf8(bit);
|
2017-01-08 08:39:46 +00:00
|
|
|
|
string database =
|
|
|
|
|
prepareFileNameForLaTeX(utf8input, ".bib", runparams.nice);
|
2018-04-07 20:09:06 +00:00
|
|
|
|
FileName try_in_file =
|
2017-01-08 08:39:46 +00:00
|
|
|
|
makeAbsPath(database + ".bib", filePath());
|
2018-04-07 20:09:06 +00:00
|
|
|
|
bool not_from_texmf = try_in_file.isReadableFile();
|
|
|
|
|
// If the file has not been found, try with the real file name
|
|
|
|
|
// (it might come from a child in a sub-directory)
|
|
|
|
|
if (!not_from_texmf) {
|
2018-09-02 04:10:01 +00:00
|
|
|
|
try_in_file = getBibfilePath(bit);
|
2018-04-07 20:09:06 +00:00
|
|
|
|
if (try_in_file.isReadableFile()) {
|
|
|
|
|
// Check if the file is in texmf
|
|
|
|
|
FileName kpsefile(findtexfile(changeExtension(utf8input, "bib"), "bib", true));
|
|
|
|
|
not_from_texmf = kpsefile.empty()
|
|
|
|
|
|| kpsefile.absFileName() != try_in_file.absFileName();
|
|
|
|
|
if (not_from_texmf)
|
|
|
|
|
// If this exists, make path relative to the master
|
|
|
|
|
// FIXME Unicode
|
|
|
|
|
database =
|
|
|
|
|
removeExtension(prepareFileNameForLaTeX(
|
|
|
|
|
to_utf8(makeRelPath(from_utf8(try_in_file.absFileName()),
|
|
|
|
|
from_utf8(filePath()))),
|
|
|
|
|
".bib", runparams.nice));
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-01-08 08:39:46 +00:00
|
|
|
|
|
|
|
|
|
if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
|
|
|
|
|
not_from_texmf) {
|
|
|
|
|
// mangledFileName() needs the extension
|
|
|
|
|
DocFileName const in_file = DocFileName(try_in_file);
|
|
|
|
|
database = removeExtension(in_file.mangledFileName());
|
|
|
|
|
FileName const out_file = makeAbsPath(database + ".bib",
|
|
|
|
|
masterBuffer()->temppath());
|
|
|
|
|
bool const success = in_file.copyTo(out_file);
|
|
|
|
|
if (!success) {
|
|
|
|
|
LYXERR0("Failed to copy '" << in_file
|
|
|
|
|
<< "' to '" << out_file << "'");
|
|
|
|
|
}
|
|
|
|
|
} else if (!runparams.inComment && runparams.nice && not_from_texmf) {
|
|
|
|
|
runparams.exportdata->addExternalFile(tex_format, try_in_file, database + ".bib");
|
|
|
|
|
if (!isValidLaTeXFileName(database)) {
|
|
|
|
|
frontend::Alert::warning(_("Invalid filename"),
|
|
|
|
|
_("The following filename will cause troubles "
|
|
|
|
|
"when running the exported file through LaTeX: ") +
|
|
|
|
|
from_utf8(database));
|
|
|
|
|
}
|
|
|
|
|
if (!isValidDVIFileName(database)) {
|
|
|
|
|
frontend::Alert::warning(_("Problematic filename for DVI"),
|
|
|
|
|
_("The following filename can cause troubles "
|
|
|
|
|
"when running the exported file through LaTeX "
|
|
|
|
|
"and opening the resulting DVI: ") +
|
|
|
|
|
from_utf8(database), true);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (add_extension)
|
|
|
|
|
database += ".bib";
|
|
|
|
|
|
|
|
|
|
// FIXME UNICODE
|
|
|
|
|
docstring const path = from_utf8(latex_path(database));
|
|
|
|
|
|
|
|
|
|
if (contains(path, ' '))
|
|
|
|
|
found_space = true;
|
2019-03-29 14:45:55 +00:00
|
|
|
|
string enc;
|
|
|
|
|
if (params().useBiblatex() && !params().bibFileEncoding(utf8input).empty())
|
|
|
|
|
enc = params().bibFileEncoding(utf8input);
|
|
|
|
|
|
|
|
|
|
bool recorded = false;
|
2020-02-28 05:39:50 +00:00
|
|
|
|
for (auto const & pe : res) {
|
2019-03-29 14:45:55 +00:00
|
|
|
|
if (pe.first == path) {
|
|
|
|
|
recorded = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2017-01-08 08:39:46 +00:00
|
|
|
|
|
2019-03-29 14:45:55 +00:00
|
|
|
|
}
|
|
|
|
|
if (!recorded)
|
|
|
|
|
res.push_back(make_pair(path, enc));
|
2017-01-08 08:39:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if there are spaces in the path and warn BibTeX users, if so.
|
|
|
|
|
// (biber can cope with such paths)
|
|
|
|
|
if (!prefixIs(runparams.bibtex_command, "biber")) {
|
|
|
|
|
// Post this warning only once.
|
|
|
|
|
static bool warned_about_spaces = false;
|
|
|
|
|
if (!warned_about_spaces &&
|
|
|
|
|
runparams.nice && found_space) {
|
|
|
|
|
warned_about_spaces = true;
|
|
|
|
|
Alert::warning(_("Export Warning!"),
|
|
|
|
|
_("There are spaces in the paths to your BibTeX databases.\n"
|
|
|
|
|
"BibTeX will be unable to find them."));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return res;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
string Buffer::layoutPos() const
|
|
|
|
|
{
|
|
|
|
|
return d->layout_position;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::setLayoutPos(string const & path)
|
|
|
|
|
{
|
|
|
|
|
if (path.empty()) {
|
|
|
|
|
d->layout_position.clear();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
LATTEST(FileName::isAbsolute(path));
|
|
|
|
|
|
|
|
|
|
d->layout_position =
|
|
|
|
|
to_utf8(makeRelPath(from_utf8(path), from_utf8(filePath())));
|
|
|
|
|
|
|
|
|
|
if (d->layout_position.empty())
|
|
|
|
|
d->layout_position = ".";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-03-01 21:03:44 +00:00
|
|
|
|
bool Buffer::hasReadonlyFlag() const
|
2002-08-20 17:18:21 +00:00
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
return d->read_only;
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-03-01 21:03:44 +00:00
|
|
|
|
bool Buffer::isReadonly() const
|
|
|
|
|
{
|
|
|
|
|
return hasReadonlyFlag() || notifiesExternalModification();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-11-30 17:41:27 +00:00
|
|
|
|
void Buffer::setParent(Buffer const * buffer)
|
2002-08-20 17:18:21 +00:00
|
|
|
|
{
|
2020-04-26 02:17:51 +00:00
|
|
|
|
// We need to do some work here to avoid recursive parent structures.
|
|
|
|
|
// This is the easy case.
|
|
|
|
|
if (buffer == this) {
|
|
|
|
|
LYXERR0("Ignoring attempt to set self as parent in\n" << fileName());
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// Now we check parents going upward, to make sure that IF we set the
|
|
|
|
|
// parent as requested, we would not generate a recursive include.
|
|
|
|
|
set<Buffer const *> sb;
|
|
|
|
|
Buffer const * b = buffer;
|
|
|
|
|
bool found_recursion = false;
|
|
|
|
|
while (b) {
|
|
|
|
|
if (sb.find(b) != sb.end()) {
|
|
|
|
|
found_recursion = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
sb.insert(b);
|
|
|
|
|
b = b->parent();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (found_recursion) {
|
|
|
|
|
LYXERR0("Ignoring attempt to set parent of\n" <<
|
|
|
|
|
fileName() <<
|
|
|
|
|
"\nto " <<
|
|
|
|
|
buffer->fileName() <<
|
|
|
|
|
"\nbecause that would create a recursive inclusion.");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// We should be safe now.
|
|
|
|
|
d->setParent(buffer);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
updateMacros();
|
2002-08-20 17:18:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-11-16 21:28:06 +00:00
|
|
|
|
Buffer const * Buffer::parent() const
|
2004-03-25 10:12:44 +00:00
|
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
|
return d->parent();
|
2004-03-25 10:12:44 +00:00
|
|
|
|
}
|
2004-04-13 06:27:29 +00:00
|
|
|
|
|
|
|
|
|
|
2010-09-29 11:55:10 +00:00
|
|
|
|
ListOfBuffers Buffer::allRelatives() const
|
2008-11-16 21:28:06 +00:00
|
|
|
|
{
|
2019-12-16 04:36:17 +00:00
|
|
|
|
ListOfBuffers lb = masterBuffer()->getDescendants();
|
2011-03-04 22:14:52 +00:00
|
|
|
|
lb.push_front(const_cast<Buffer *>(masterBuffer()));
|
2010-10-05 15:21:14 +00:00
|
|
|
|
return lb;
|
2008-11-16 21:28:06 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-11-30 17:41:27 +00:00
|
|
|
|
Buffer const * Buffer::masterBuffer() const
|
2006-04-15 11:46:17 +00:00
|
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
|
if (!pbuf)
|
2007-11-30 17:41:27 +00:00
|
|
|
|
return this;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2009-04-03 00:44:33 +00:00
|
|
|
|
return pbuf->masterBuffer();
|
2006-04-15 11:46:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-07-20 17:52:55 +00:00
|
|
|
|
bool Buffer::isChild(Buffer * child) const
|
|
|
|
|
{
|
|
|
|
|
return d->children_positions.find(child) != d->children_positions.end();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-11-16 21:28:06 +00:00
|
|
|
|
DocIterator Buffer::firstChildPosition(Buffer const * child)
|
|
|
|
|
{
|
|
|
|
|
Impl::BufferPositionMap::iterator it;
|
|
|
|
|
it = d->children_positions.find(child);
|
|
|
|
|
if (it == d->children_positions.end())
|
2008-11-17 11:46:07 +00:00
|
|
|
|
return DocIterator(this);
|
2008-11-16 21:28:06 +00:00
|
|
|
|
return it->second;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-09-29 12:17:24 +00:00
|
|
|
|
bool Buffer::hasChildren() const
|
|
|
|
|
{
|
2011-12-03 22:15:11 +00:00
|
|
|
|
return !d->children_positions.empty();
|
2010-09-29 12:17:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-10-31 17:18:51 +00:00
|
|
|
|
void Buffer::collectChildren(ListOfBuffers & children, bool grand_children) const
|
2009-03-07 16:33:03 +00:00
|
|
|
|
{
|
|
|
|
|
// loop over children
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & p : d->children_positions) {
|
|
|
|
|
Buffer * child = const_cast<Buffer *>(p.first);
|
2010-09-29 12:30:31 +00:00
|
|
|
|
// No duplicates
|
2020-10-31 17:18:51 +00:00
|
|
|
|
ListOfBuffers::const_iterator bit = find(children.begin(), children.end(), child);
|
|
|
|
|
if (bit != children.end())
|
2010-09-29 12:30:31 +00:00
|
|
|
|
continue;
|
2020-10-31 17:18:51 +00:00
|
|
|
|
children.push_back(child);
|
2011-12-03 22:15:11 +00:00
|
|
|
|
if (grand_children)
|
2009-10-29 23:28:08 +00:00
|
|
|
|
// there might be grandchildren
|
2020-10-31 17:18:51 +00:00
|
|
|
|
child->collectChildren(children, true);
|
2009-03-07 16:33:03 +00:00
|
|
|
|
}
|
2009-12-30 18:40:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-09-29 13:05:57 +00:00
|
|
|
|
ListOfBuffers Buffer::getChildren() const
|
2009-12-30 18:40:18 +00:00
|
|
|
|
{
|
2010-09-29 11:55:10 +00:00
|
|
|
|
ListOfBuffers v;
|
2010-09-29 13:05:57 +00:00
|
|
|
|
collectChildren(v, false);
|
2011-07-18 13:57:15 +00:00
|
|
|
|
// Make sure we have not included ourselves.
|
|
|
|
|
ListOfBuffers::iterator bit = find(v.begin(), v.end(), this);
|
|
|
|
|
if (bit != v.end()) {
|
|
|
|
|
LYXERR0("Recursive include detected in `" << fileName() << "'.");
|
|
|
|
|
v.erase(bit);
|
|
|
|
|
}
|
2010-09-29 13:05:57 +00:00
|
|
|
|
return v;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2019-12-16 04:36:17 +00:00
|
|
|
|
ListOfBuffers Buffer::getDescendants() const
|
2010-09-29 13:05:57 +00:00
|
|
|
|
{
|
|
|
|
|
ListOfBuffers v;
|
|
|
|
|
collectChildren(v, true);
|
2011-07-18 13:57:15 +00:00
|
|
|
|
// Make sure we have not included ourselves.
|
|
|
|
|
ListOfBuffers::iterator bit = find(v.begin(), v.end(), this);
|
|
|
|
|
if (bit != v.end()) {
|
|
|
|
|
LYXERR0("Recursive include detected in `" << fileName() << "'.");
|
|
|
|
|
v.erase(bit);
|
|
|
|
|
}
|
2009-12-30 18:40:18 +00:00
|
|
|
|
return v;
|
2009-03-07 16:33:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
template<typename M>
|
2010-01-25 18:39:08 +00:00
|
|
|
|
typename M::const_iterator greatest_below(M & m, typename M::key_type const & x)
|
2004-04-13 06:27:29 +00:00
|
|
|
|
{
|
2007-12-21 20:42:46 +00:00
|
|
|
|
if (m.empty())
|
|
|
|
|
return m.end();
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
|
typename M::const_iterator it = m.lower_bound(x);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
if (it == m.begin())
|
|
|
|
|
return m.end();
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2019-09-13 14:23:49 +00:00
|
|
|
|
--it;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
return it;
|
2004-04-13 06:27:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
|
MacroData const * Buffer::Impl::getBufferMacro(docstring const & name,
|
2007-12-21 20:42:46 +00:00
|
|
|
|
DocIterator const & pos) const
|
2004-04-13 06:27:29 +00:00
|
|
|
|
{
|
2007-12-21 20:43:21 +00:00
|
|
|
|
LYXERR(Debug::MACROS, "Searching for " << to_ascii(name) << " at " << pos);
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// if paragraphs have no macro context set, pos will be empty
|
|
|
|
|
if (pos.empty())
|
2020-02-19 00:27:12 +00:00
|
|
|
|
return nullptr;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// we haven't found anything yet
|
2010-01-25 18:39:08 +00:00
|
|
|
|
DocIterator bestPos = owner_->par_iterator_begin();
|
2020-02-19 00:27:12 +00:00
|
|
|
|
MacroData const * bestData = nullptr;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// find macro definitions for name
|
2010-01-25 18:39:08 +00:00
|
|
|
|
NamePositionScopeMacroMap::const_iterator nameIt = macros.find(name);
|
|
|
|
|
if (nameIt != macros.end()) {
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// find last definition in front of pos or at pos itself
|
2010-01-25 18:39:08 +00:00
|
|
|
|
PositionScopeMacroMap::const_iterator it
|
2008-11-17 11:46:07 +00:00
|
|
|
|
= greatest_below(nameIt->second, pos);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
if (it != nameIt->second.end()) {
|
|
|
|
|
while (true) {
|
|
|
|
|
// scope ends behind pos?
|
2015-03-15 12:57:45 +00:00
|
|
|
|
if (pos < it->second.scope) {
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// Looks good, remember this. If there
|
|
|
|
|
// is no external macro behind this,
|
|
|
|
|
// we found the right one already.
|
|
|
|
|
bestPos = it->first;
|
2015-03-15 12:57:45 +00:00
|
|
|
|
bestData = &it->second.macro;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// try previous macro if there is one
|
|
|
|
|
if (it == nameIt->second.begin())
|
|
|
|
|
break;
|
2012-05-28 20:41:32 +00:00
|
|
|
|
--it;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// find macros in included files
|
2010-01-25 18:39:08 +00:00
|
|
|
|
PositionScopeBufferMap::const_iterator it
|
|
|
|
|
= greatest_below(position_to_children, pos);
|
|
|
|
|
if (it == position_to_children.end())
|
2008-02-06 21:32:31 +00:00
|
|
|
|
// no children before
|
|
|
|
|
return bestData;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
2008-02-06 21:32:31 +00:00
|
|
|
|
while (true) {
|
|
|
|
|
// do we know something better (i.e. later) already?
|
|
|
|
|
if (it->first < bestPos )
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
// scope ends behind pos?
|
2015-03-15 12:57:45 +00:00
|
|
|
|
if (pos < it->second.scope
|
2011-01-13 21:01:29 +00:00
|
|
|
|
&& (cloned_buffer_ ||
|
2015-03-15 12:57:45 +00:00
|
|
|
|
theBufferList().isLoaded(it->second.buffer))) {
|
2008-02-06 21:32:31 +00:00
|
|
|
|
// look for macro in external file
|
2010-01-25 18:39:08 +00:00
|
|
|
|
macro_lock = true;
|
2008-02-06 21:32:31 +00:00
|
|
|
|
MacroData const * data
|
2015-03-15 12:57:45 +00:00
|
|
|
|
= it->second.buffer->getMacro(name, false);
|
2010-01-25 18:39:08 +00:00
|
|
|
|
macro_lock = false;
|
2008-02-06 21:32:31 +00:00
|
|
|
|
if (data) {
|
|
|
|
|
bestPos = it->first;
|
|
|
|
|
bestData = data;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
break;
|
2008-02-06 21:32:31 +00:00
|
|
|
|
}
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
2008-02-06 21:32:31 +00:00
|
|
|
|
|
|
|
|
|
// try previous file if there is one
|
2010-01-25 18:39:08 +00:00
|
|
|
|
if (it == position_to_children.begin())
|
2008-02-06 21:32:31 +00:00
|
|
|
|
break;
|
|
|
|
|
--it;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// return the best macro we have found
|
|
|
|
|
return bestData;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
MacroData const * Buffer::getMacro(docstring const & name,
|
|
|
|
|
DocIterator const & pos, bool global) const
|
2007-11-01 11:13:07 +00:00
|
|
|
|
{
|
2007-12-21 20:42:46 +00:00
|
|
|
|
if (d->macro_lock)
|
2020-02-19 00:27:12 +00:00
|
|
|
|
return nullptr;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
|
|
// query buffer macros
|
2010-01-25 18:39:08 +00:00
|
|
|
|
MacroData const * data = d->getBufferMacro(name, pos);
|
2020-02-19 00:27:12 +00:00
|
|
|
|
if (data != nullptr)
|
2007-12-21 20:42:46 +00:00
|
|
|
|
return data;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
|
|
|
|
// If there is a master buffer, query that
|
2009-04-03 00:44:33 +00:00
|
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
|
if (pbuf) {
|
2007-12-21 20:42:46 +00:00
|
|
|
|
d->macro_lock = true;
|
2009-04-03 00:44:33 +00:00
|
|
|
|
MacroData const * macro = pbuf->getMacro(
|
2008-03-07 20:51:56 +00:00
|
|
|
|
name, *this, false);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
d->macro_lock = false;
|
|
|
|
|
if (macro)
|
|
|
|
|
return macro;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (global) {
|
|
|
|
|
data = MacroTable::globalMacros().get(name);
|
2020-02-19 00:27:12 +00:00
|
|
|
|
if (data != nullptr)
|
2007-12-21 20:42:46 +00:00
|
|
|
|
return data;
|
|
|
|
|
}
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2020-02-19 00:27:12 +00:00
|
|
|
|
return nullptr;
|
2004-04-13 06:27:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
MacroData const * Buffer::getMacro(docstring const & name, bool global) const
|
2004-04-13 06:27:29 +00:00
|
|
|
|
{
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// set scope end behind the last paragraph
|
|
|
|
|
DocIterator scope = par_iterator_begin();
|
|
|
|
|
scope.pit() = scope.lastpit() + 1;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
return getMacro(name, scope, global);
|
|
|
|
|
}
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
2008-03-07 20:51:56 +00:00
|
|
|
|
MacroData const * Buffer::getMacro(docstring const & name,
|
|
|
|
|
Buffer const & child, bool global) const
|
2007-12-21 20:42:46 +00:00
|
|
|
|
{
|
|
|
|
|
// look where the child buffer is included first
|
2008-03-07 20:51:56 +00:00
|
|
|
|
Impl::BufferPositionMap::iterator it = d->children_positions.find(&child);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
if (it == d->children_positions.end())
|
2020-02-19 00:27:12 +00:00
|
|
|
|
return nullptr;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
|
|
// check for macros at the inclusion position
|
|
|
|
|
return getMacro(name, it->second, global);
|
2004-04-13 06:27:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-07-21 21:58:54 +00:00
|
|
|
|
void Buffer::Impl::updateMacros(DocIterator & it, DocIterator & scope)
|
2004-04-13 06:27:29 +00:00
|
|
|
|
{
|
2010-01-12 15:25:04 +00:00
|
|
|
|
pit_type const lastpit = it.lastpit();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
|
|
// look for macros in each paragraph
|
|
|
|
|
while (it.pit() <= lastpit) {
|
|
|
|
|
Paragraph & par = it.paragraph();
|
|
|
|
|
|
|
|
|
|
// iterate over the insets of the current paragraph
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & insit : par.insetList()) {
|
|
|
|
|
it.pos() = insit.pos;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// is it a nested text inset?
|
2020-11-11 16:24:59 +00:00
|
|
|
|
if (insit.inset->asInsetText()) {
|
2020-11-04 09:27:08 +00:00
|
|
|
|
// Inset needs its own scope?
|
2020-11-11 16:24:59 +00:00
|
|
|
|
InsetText const * itext = insit.inset->asInsetText();
|
2008-02-27 20:43:16 +00:00
|
|
|
|
bool newScope = itext->isMacroScope();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
2008-08-01 17:57:01 +00:00
|
|
|
|
// scope which ends just behind the inset
|
2007-12-21 20:42:46 +00:00
|
|
|
|
DocIterator insetScope = it;
|
2008-02-27 20:43:16 +00:00
|
|
|
|
++insetScope.pos();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
|
|
// collect macros in inset
|
2017-11-05 02:15:01 +00:00
|
|
|
|
it.push_back(CursorSlice(*insit.inset));
|
2008-03-10 12:49:02 +00:00
|
|
|
|
updateMacros(it, newScope ? insetScope : scope);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
it.pop_back();
|
2020-11-11 16:24:59 +00:00
|
|
|
|
continue;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
2020-11-11 16:24:59 +00:00
|
|
|
|
|
|
|
|
|
if (insit.inset->asInsetTabular()) {
|
2017-11-05 02:15:01 +00:00
|
|
|
|
CursorSlice slice(*insit.inset);
|
2010-10-19 15:36:11 +00:00
|
|
|
|
size_t const numcells = slice.nargs();
|
|
|
|
|
for (; slice.idx() < numcells; slice.forwardIdx()) {
|
|
|
|
|
it.push_back(slice);
|
|
|
|
|
updateMacros(it, scope);
|
|
|
|
|
it.pop_back();
|
|
|
|
|
}
|
2020-11-11 16:24:59 +00:00
|
|
|
|
continue;
|
2010-10-19 15:36:11 +00:00
|
|
|
|
}
|
2020-11-11 16:24:59 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// is it an external file?
|
2020-11-11 16:24:59 +00:00
|
|
|
|
if (insit.inset->lyxCode() == INCLUDE_CODE) {
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// get buffer of external file
|
2017-12-16 04:20:57 +00:00
|
|
|
|
InsetInclude const & incinset =
|
2017-11-05 02:15:01 +00:00
|
|
|
|
static_cast<InsetInclude const &>(*insit.inset);
|
2010-01-25 18:39:08 +00:00
|
|
|
|
macro_lock = true;
|
2020-04-26 02:27:09 +00:00
|
|
|
|
Buffer * child = incinset.loadIfNeeded();
|
2010-01-25 18:39:08 +00:00
|
|
|
|
macro_lock = false;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
if (!child)
|
2020-11-11 16:24:59 +00:00
|
|
|
|
continue;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
2008-02-08 16:19:36 +00:00
|
|
|
|
// register its position, but only when it is
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// included first in the buffer
|
2020-10-31 15:13:52 +00:00
|
|
|
|
children_positions.insert({child, it});
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// register child with its scope
|
2010-01-25 18:39:08 +00:00
|
|
|
|
position_to_children[it] = Impl::ScopeBuffer(scope, child);
|
2020-11-11 16:24:59 +00:00
|
|
|
|
continue;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
2020-11-11 16:24:59 +00:00
|
|
|
|
|
|
|
|
|
InsetMath * im = insit.inset->asInsetMath();
|
|
|
|
|
if (doing_export && im) {
|
|
|
|
|
InsetMathHull * hull = im->asHullInset();
|
|
|
|
|
if (hull)
|
|
|
|
|
hull->recordLocation(it);
|
2010-10-26 19:38:45 +00:00
|
|
|
|
}
|
2020-11-11 16:24:59 +00:00
|
|
|
|
|
|
|
|
|
if (insit.inset->lyxCode() != MATHMACRO_CODE)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
// get macro data
|
|
|
|
|
InsetMathMacroTemplate & macroTemplate =
|
|
|
|
|
*insit.inset->asInsetMath()->asMacroTemplate();
|
|
|
|
|
MacroContext mc(owner_, it);
|
|
|
|
|
macroTemplate.updateToContext(mc);
|
|
|
|
|
|
|
|
|
|
// valid?
|
|
|
|
|
bool valid = macroTemplate.validMacro();
|
|
|
|
|
// FIXME: Should be fixNameAndCheckIfValid() in fact,
|
|
|
|
|
// then the BufferView's cursor will be invalid in
|
|
|
|
|
// some cases which leads to crashes.
|
|
|
|
|
if (!valid)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
// register macro
|
|
|
|
|
// FIXME (Abdel), I don't understand why we pass 'it' here
|
|
|
|
|
// instead of 'macroTemplate' defined above... is this correct?
|
|
|
|
|
macros[macroTemplate.name()][it] =
|
|
|
|
|
Impl::ScopeMacro(scope, MacroData(const_cast<Buffer *>(owner_), it));
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// next paragraph
|
|
|
|
|
it.pit()++;
|
|
|
|
|
it.pos() = 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-07-21 21:58:54 +00:00
|
|
|
|
void Buffer::updateMacros() const
|
2007-12-21 20:42:46 +00:00
|
|
|
|
{
|
|
|
|
|
if (d->macro_lock)
|
|
|
|
|
return;
|
|
|
|
|
|
2007-12-21 20:43:21 +00:00
|
|
|
|
LYXERR(Debug::MACROS, "updateMacro of " << d->filename.onlyFileName());
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
|
|
// start with empty table
|
|
|
|
|
d->macros.clear();
|
|
|
|
|
d->children_positions.clear();
|
|
|
|
|
d->position_to_children.clear();
|
|
|
|
|
|
|
|
|
|
// Iterate over buffer, starting with first paragraph
|
|
|
|
|
// The scope must be bigger than any lookup DocIterator
|
|
|
|
|
// later. For the global lookup, lastpit+1 is used, hence
|
|
|
|
|
// we use lastpit+2 here.
|
|
|
|
|
DocIterator it = par_iterator_begin();
|
|
|
|
|
DocIterator outerScope = it;
|
|
|
|
|
outerScope.pit() = outerScope.lastpit() + 2;
|
2010-07-21 21:58:54 +00:00
|
|
|
|
d->updateMacros(it, outerScope);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-07-09 09:48:34 +00:00
|
|
|
|
void Buffer::getUsedBranches(std::list<docstring> & result, bool const from_master) const
|
|
|
|
|
{
|
2020-11-27 18:10:52 +00:00
|
|
|
|
for (Inset const & it : inset()) {
|
|
|
|
|
if (it.lyxCode() == BRANCH_CODE) {
|
|
|
|
|
InsetBranch const & br = static_cast<InsetBranch const &>(it);
|
2009-07-11 08:29:30 +00:00
|
|
|
|
docstring const name = br.branch();
|
|
|
|
|
if (!from_master && !params().branchlist().find(name))
|
|
|
|
|
result.push_back(name);
|
|
|
|
|
else if (from_master && !masterBuffer()->params().branchlist().find(name))
|
|
|
|
|
result.push_back(name);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2020-11-27 18:10:52 +00:00
|
|
|
|
if (it.lyxCode() == INCLUDE_CODE) {
|
2009-07-11 08:29:30 +00:00
|
|
|
|
// get buffer of external file
|
|
|
|
|
InsetInclude const & ins =
|
2020-11-27 18:10:52 +00:00
|
|
|
|
static_cast<InsetInclude const &>(it);
|
2020-04-26 02:27:09 +00:00
|
|
|
|
Buffer * child = ins.loadIfNeeded();
|
2009-07-11 08:29:30 +00:00
|
|
|
|
if (!child)
|
2009-07-09 09:48:34 +00:00
|
|
|
|
continue;
|
2009-07-11 08:29:30 +00:00
|
|
|
|
child->getUsedBranches(result, true);
|
2009-07-09 09:48:34 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// remove duplicates
|
|
|
|
|
result.unique();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-05-07 11:57:08 +00:00
|
|
|
|
void Buffer::updateMacroInstances(UpdateType utype) const
|
2007-12-21 20:42:46 +00:00
|
|
|
|
{
|
2008-01-12 21:38:51 +00:00
|
|
|
|
LYXERR(Debug::MACROS, "updateMacroInstances for "
|
|
|
|
|
<< d->filename.onlyFileName());
|
2008-11-17 11:46:07 +00:00
|
|
|
|
DocIterator it = doc_iterator_begin(this);
|
2009-12-08 22:41:10 +00:00
|
|
|
|
it.forwardInset();
|
|
|
|
|
DocIterator const end = doc_iterator_end(this);
|
|
|
|
|
for (; it != end; it.forwardInset()) {
|
|
|
|
|
// look for MathData cells in InsetMathNest insets
|
|
|
|
|
InsetMath * minset = it.nextInset()->asInsetMath();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
if (!minset)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
// update macro in all cells of the InsetMathNest
|
2020-11-20 22:06:05 +00:00
|
|
|
|
idx_type n = minset->nargs();
|
2009-11-08 11:45:46 +00:00
|
|
|
|
MacroContext mc = MacroContext(this, it);
|
2020-11-20 22:06:05 +00:00
|
|
|
|
for (idx_type i = 0; i < n; ++i) {
|
2007-12-21 20:42:46 +00:00
|
|
|
|
MathData & data = minset->cell(i);
|
2020-02-19 00:27:12 +00:00
|
|
|
|
data.updateMacros(nullptr, mc, utype, 0);
|
2004-04-13 06:27:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2005-07-17 14:29:35 +00:00
|
|
|
|
|
2005-07-17 23:03:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
void Buffer::listMacroNames(MacroNameSet & macros) const
|
|
|
|
|
{
|
|
|
|
|
if (d->macro_lock)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
d->macro_lock = true;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// loop over macro names
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & nameit : d->macros)
|
|
|
|
|
macros.insert(nameit.first);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
|
|
// loop over children
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & p : d->children_positions) {
|
|
|
|
|
Buffer * child = const_cast<Buffer *>(p.first);
|
2017-09-28 07:06:33 +00:00
|
|
|
|
// The buffer might have been closed (see #10766).
|
|
|
|
|
if (theBufferList().isLoaded(child))
|
|
|
|
|
child->listMacroNames(macros);
|
|
|
|
|
}
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
|
|
// call parent
|
2009-04-03 00:44:33 +00:00
|
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
|
if (pbuf)
|
|
|
|
|
pbuf->listMacroNames(macros);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
2008-08-01 17:57:01 +00:00
|
|
|
|
d->macro_lock = false;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
|
void Buffer::listParentMacros(MacroSet & macros, LaTeXFeatures & features) const
|
2007-12-21 20:42:46 +00:00
|
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
|
if (!pbuf)
|
2007-12-21 20:42:46 +00:00
|
|
|
|
return;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
MacroNameSet names;
|
2009-04-03 00:44:33 +00:00
|
|
|
|
pbuf->listMacroNames(names);
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
|
// resolve macros
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & mit : names) {
|
2007-12-21 20:42:46 +00:00
|
|
|
|
// defined?
|
2017-11-05 02:15:01 +00:00
|
|
|
|
MacroData const * data = pbuf->getMacro(mit, *this, false);
|
2008-03-26 12:55:36 +00:00
|
|
|
|
if (data) {
|
|
|
|
|
macros.insert(data);
|
2008-08-01 17:57:01 +00:00
|
|
|
|
|
2017-07-05 12:31:28 +00:00
|
|
|
|
// we cannot access the original InsetMathMacroTemplate anymore
|
2008-03-26 12:55:36 +00:00
|
|
|
|
// here to calls validate method. So we do its work here manually.
|
|
|
|
|
// FIXME: somehow make the template accessible here.
|
|
|
|
|
if (data->optionals() > 0)
|
|
|
|
|
features.require("xargs");
|
|
|
|
|
}
|
2007-12-21 20:42:46 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2013-03-18 23:32:30 +00:00
|
|
|
|
Buffer::References & Buffer::getReferenceCache(docstring const & label)
|
2008-03-02 15:27:35 +00:00
|
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
|
if (d->parent())
|
2013-03-18 23:32:30 +00:00
|
|
|
|
return const_cast<Buffer *>(masterBuffer())->getReferenceCache(label);
|
2008-03-02 15:27:35 +00:00
|
|
|
|
|
2018-12-31 17:27:34 +00:00
|
|
|
|
RefCache::iterator it = d->ref_cache_.find(label);
|
|
|
|
|
if (it != d->ref_cache_.end())
|
|
|
|
|
return it->second;
|
2008-03-02 15:27:35 +00:00
|
|
|
|
|
2018-12-31 17:27:34 +00:00
|
|
|
|
static References const dummy_refs = References();
|
|
|
|
|
it = d->ref_cache_.insert(
|
|
|
|
|
make_pair(label, dummy_refs)).first;
|
|
|
|
|
return it->second;
|
2008-03-02 15:27:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Buffer::References const & Buffer::references(docstring const & label) const
|
|
|
|
|
{
|
2013-03-18 23:32:30 +00:00
|
|
|
|
return const_cast<Buffer *>(this)->getReferenceCache(label);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::addReference(docstring const & label, Inset * inset, ParIterator it)
|
|
|
|
|
{
|
|
|
|
|
References & refs = getReferenceCache(label);
|
|
|
|
|
refs.push_back(make_pair(inset, it));
|
2008-03-02 15:27:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-12-29 09:08:02 +00:00
|
|
|
|
void Buffer::setInsetLabel(docstring const & label, InsetLabel const * il,
|
|
|
|
|
bool const active)
|
2008-03-02 15:27:35 +00:00
|
|
|
|
{
|
2018-12-29 16:52:25 +00:00
|
|
|
|
LabelInfo linfo;
|
2018-12-29 09:08:02 +00:00
|
|
|
|
linfo.label = label;
|
|
|
|
|
linfo.inset = il;
|
|
|
|
|
linfo.active = active;
|
2018-12-31 17:27:34 +00:00
|
|
|
|
masterBuffer()->d->label_cache_.push_back(linfo);
|
2008-03-02 15:27:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2018-12-29 09:08:02 +00:00
|
|
|
|
InsetLabel const * Buffer::insetLabel(docstring const & label,
|
|
|
|
|
bool const active) const
|
2008-03-02 15:27:35 +00:00
|
|
|
|
{
|
2020-02-28 05:39:50 +00:00
|
|
|
|
for (auto const & rc : masterBuffer()->d->label_cache_) {
|
2018-12-29 09:08:02 +00:00
|
|
|
|
if (rc.label == label && (rc.active || !active))
|
|
|
|
|
return rc.inset;
|
|
|
|
|
}
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::activeLabel(docstring const & label) const
|
|
|
|
|
{
|
2020-10-05 10:38:09 +00:00
|
|
|
|
return insetLabel(label, true) != nullptr;
|
2008-03-02 15:27:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::clearReferenceCache() const
|
|
|
|
|
{
|
2018-12-31 17:27:34 +00:00
|
|
|
|
if (!d->parent()) {
|
2008-03-02 15:27:35 +00:00
|
|
|
|
d->ref_cache_.clear();
|
2018-12-31 17:27:34 +00:00
|
|
|
|
d->label_cache_.clear();
|
|
|
|
|
}
|
2008-03-02 15:27:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2013-04-06 16:34:26 +00:00
|
|
|
|
void Buffer::changeRefsIfUnique(docstring const & from, docstring const & to)
|
2005-07-17 23:03:01 +00:00
|
|
|
|
{
|
2006-10-09 14:21:11 +00:00
|
|
|
|
//FIXME: This does not work for child documents yet.
|
2010-12-03 19:00:55 +00:00
|
|
|
|
reloadBibInfoCache();
|
2010-12-03 18:01:37 +00:00
|
|
|
|
|
2010-12-03 19:00:55 +00:00
|
|
|
|
// Check if the label 'from' appears more than once
|
2010-12-03 18:01:37 +00:00
|
|
|
|
vector<docstring> labels;
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & bibit : masterBibInfo())
|
|
|
|
|
labels.push_back(bibit.first);
|
2005-07-17 23:03:01 +00:00
|
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
|
if (count(labels.begin(), labels.end(), from) > 1)
|
2005-07-17 23:03:01 +00:00
|
|
|
|
return;
|
|
|
|
|
|
2010-12-03 18:01:37 +00:00
|
|
|
|
string const paramName = "key";
|
2020-01-12 21:18:36 +00:00
|
|
|
|
UndoGroupHelper ugh(this);
|
2020-11-27 18:10:52 +00:00
|
|
|
|
InsetIterator it = begin(inset());
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (; it; ++it) {
|
2013-11-12 19:52:35 +00:00
|
|
|
|
if (it->lyxCode() != CITE_CODE)
|
2013-04-06 16:34:26 +00:00
|
|
|
|
continue;
|
|
|
|
|
InsetCommand * inset = it->asInsetCommand();
|
|
|
|
|
docstring const oldValue = inset->getParam(paramName);
|
2020-01-12 21:18:36 +00:00
|
|
|
|
if (oldValue == from) {
|
|
|
|
|
undo().recordUndo(CursorData(it));
|
2013-04-06 16:34:26 +00:00
|
|
|
|
inset->setParam(paramName, to);
|
2020-01-12 21:18:36 +00:00
|
|
|
|
}
|
2005-07-17 23:03:01 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
|
|
2015-08-04 22:16:40 +00:00
|
|
|
|
// returns NULL if id-to-row conversion is unsupported
|
2016-06-02 20:58:52 +00:00
|
|
|
|
unique_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
|
|
|
|
|
pit_type par_begin, pit_type par_end,
|
|
|
|
|
OutputWhat output, bool master) const
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
|
{
|
2016-06-02 20:58:52 +00:00
|
|
|
|
unique_ptr<TexRow> texrow;
|
2007-03-18 10:59:16 +00:00
|
|
|
|
OutputParams runparams(¶ms().encoding());
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
|
runparams.nice = true;
|
2011-05-13 19:39:56 +00:00
|
|
|
|
runparams.flavor = params().getOutputFlavor(format);
|
2007-01-15 22:49:14 +00:00
|
|
|
|
runparams.linelen = lyxrc.plaintext_linelen;
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
|
runparams.dryrun = true;
|
|
|
|
|
|
2018-05-04 14:37:21 +00:00
|
|
|
|
// Some macros rely on font encoding
|
|
|
|
|
runparams.main_fontenc = params().main_font_encoding();
|
|
|
|
|
|
2021-01-06 03:28:15 +00:00
|
|
|
|
// Use the right wrapping for the comment at the beginning of the generated
|
|
|
|
|
// snippet, so that it is either valid LaTeX or valid XML (including HTML and DocBook).
|
|
|
|
|
docstring comment_start = from_ascii("% ");
|
|
|
|
|
docstring comment_end = from_ascii("");
|
|
|
|
|
if (runparams.flavor == Flavor::Html || runparams.flavor == Flavor::DocBook5) {
|
|
|
|
|
comment_start = from_ascii("<!-- ");
|
|
|
|
|
comment_end = from_ascii(" -->");
|
|
|
|
|
}
|
|
|
|
|
|
2011-10-29 21:00:23 +00:00
|
|
|
|
if (output == CurrentParagraph) {
|
2006-08-04 13:59:12 +00:00
|
|
|
|
runparams.par_begin = par_begin;
|
|
|
|
|
runparams.par_end = par_end;
|
2008-03-07 20:51:56 +00:00
|
|
|
|
if (par_begin + 1 == par_end) {
|
2021-01-06 03:28:15 +00:00
|
|
|
|
os << comment_start
|
2007-08-12 18:58:59 +00:00
|
|
|
|
<< bformat(_("Preview source code for paragraph %1$d"), par_begin)
|
2021-01-06 03:28:15 +00:00
|
|
|
|
<< comment_end
|
2007-05-25 09:20:35 +00:00
|
|
|
|
<< "\n\n";
|
2008-03-07 20:51:56 +00:00
|
|
|
|
} else {
|
2021-01-06 03:28:15 +00:00
|
|
|
|
os << comment_start
|
2007-05-25 09:20:35 +00:00
|
|
|
|
<< bformat(_("Preview source code from paragraph %1$s to %2$s"),
|
|
|
|
|
convert<docstring>(par_begin),
|
|
|
|
|
convert<docstring>(par_end - 1))
|
2021-01-06 03:28:15 +00:00
|
|
|
|
<< comment_end
|
2007-05-25 09:20:35 +00:00
|
|
|
|
<< "\n\n";
|
2008-03-07 20:51:56 +00:00
|
|
|
|
}
|
2006-08-04 13:59:12 +00:00
|
|
|
|
// output paragraphs
|
2020-11-30 22:00:40 +00:00
|
|
|
|
if (runparams.flavor == Flavor::LyX) {
|
2012-06-05 02:01:26 +00:00
|
|
|
|
Paragraph const & par = text().paragraphs()[par_begin];
|
|
|
|
|
ostringstream ods;
|
|
|
|
|
depth_type dt = par.getDepth();
|
|
|
|
|
par.write(ods, params(), dt);
|
|
|
|
|
os << from_utf8(ods.str());
|
2020-11-30 22:00:40 +00:00
|
|
|
|
} else if (runparams.flavor == Flavor::Html) {
|
2019-05-09 23:35:40 +00:00
|
|
|
|
XMLStream xs(os);
|
2011-12-06 22:17:06 +00:00
|
|
|
|
setMathFlavor(runparams);
|
2010-12-07 08:36:42 +00:00
|
|
|
|
xhtmlParagraphs(text(), *this, xs, runparams);
|
2020-11-30 22:00:40 +00:00
|
|
|
|
} else if (runparams.flavor == Flavor::Text) {
|
2015-02-20 14:53:19 +00:00
|
|
|
|
bool dummy = false;
|
2012-03-09 22:24:20 +00:00
|
|
|
|
// FIXME Handles only one paragraph, unlike the others.
|
|
|
|
|
// Probably should have some routine with a signature like them.
|
|
|
|
|
writePlaintextParagraph(*this,
|
|
|
|
|
text().paragraphs()[par_begin], os, runparams, dummy);
|
2020-11-30 22:00:40 +00:00
|
|
|
|
} else if (runparams.flavor == Flavor::DocBook5) {
|
2020-06-08 21:27:49 +00:00
|
|
|
|
XMLStream xs{os};
|
|
|
|
|
docbookParagraphs(text(), *this, xs, runparams);
|
Introduce a wrapper class for odocstream to help ensuring that no
blank lines may be inadvertently output. This is achieved by using two
special iomanip-like variables (breakln and safebreakln) in the lyx::
namespace. When they are inserted in the stream, a newline is output
only if not already at the beginning of a line. The difference between
breakln and safebreakln is that, if needed, the former outputs '\n'
and the latter "%\n".
In future, the new class will also be used for counting the number of
newlines issued. Even if the infractrure for doing that is already in
place, the counting is essentially still done the old way.
There are still places in the code where the functionality of the
class could be used, most probably. ATM, it is used for InsetTabular,
InsetListings, InsetFloat, and InsetText.
The Comment and GreyedOut insets required a special treatment and a
new InsetLayout parameter (Display) has been introduced. The default
for Display is "true", meaning that the corresponding latex
environment is of "display" type, i.e., it stands on its own, whereas
"false" means that the contents appear inline with the text. The
latter is the case for both Comment and GreyedOut insets.
Mostly, the only visible effects on latex exports should be the
disappearing of some redundant % chars and the appearing/disappearing
of null {} latex groups after a comment or lyxgreyedout environments
(they are related to the presence or absence of a space immediately
after those environments), as well as the fact that math environments
are now started on their own lines.
As a last thing, only the latex code between \begin{document} and
\end{document} goes through the new class, the preamble being directly
output through odocstream, as usual.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@37360 a592a061-630c-0410-9148-cb99ea01b6c8
2011-01-29 02:41:13 +00:00
|
|
|
|
} else {
|
2012-10-03 08:41:07 +00:00
|
|
|
|
// If we are previewing a paragraph, even if this is the
|
|
|
|
|
// child of some other buffer, let's cut the link here,
|
|
|
|
|
// so that no concurring settings from the master
|
|
|
|
|
// (e.g. branch state) interfere (see #8101).
|
2012-10-05 12:30:20 +00:00
|
|
|
|
if (!master)
|
|
|
|
|
d->ignore_parent = true;
|
2012-06-28 16:15:43 +00:00
|
|
|
|
// We need to validate the Buffer params' features here
|
|
|
|
|
// in order to know if we should output polyglossia
|
|
|
|
|
// macros (instead of babel macros)
|
|
|
|
|
LaTeXFeatures features(*this, params(), runparams);
|
2016-10-31 17:46:06 +00:00
|
|
|
|
validate(features);
|
2012-06-28 16:15:43 +00:00
|
|
|
|
runparams.use_polyglossia = features.usePolyglossia();
|
2020-04-05 13:23:22 +00:00
|
|
|
|
runparams.use_hyperref = features.isRequired("hyperref");
|
2008-10-16 07:43:46 +00:00
|
|
|
|
// latex or literate
|
2016-06-19 02:39:38 +00:00
|
|
|
|
otexstream ots(os);
|
|
|
|
|
// output above
|
|
|
|
|
ots.texrow().newlines(2);
|
2012-10-03 08:41:07 +00:00
|
|
|
|
// the real stuff
|
2011-02-10 20:02:48 +00:00
|
|
|
|
latexParagraphs(*this, text(), ots, runparams);
|
2016-06-19 02:39:38 +00:00
|
|
|
|
texrow = ots.releaseTexRow();
|
2012-10-03 08:41:07 +00:00
|
|
|
|
|
|
|
|
|
// Restore the parenthood
|
2012-10-05 12:30:20 +00:00
|
|
|
|
if (!master)
|
|
|
|
|
d->ignore_parent = false;
|
Introduce a wrapper class for odocstream to help ensuring that no
blank lines may be inadvertently output. This is achieved by using two
special iomanip-like variables (breakln and safebreakln) in the lyx::
namespace. When they are inserted in the stream, a newline is output
only if not already at the beginning of a line. The difference between
breakln and safebreakln is that, if needed, the former outputs '\n'
and the latter "%\n".
In future, the new class will also be used for counting the number of
newlines issued. Even if the infractrure for doing that is already in
place, the counting is essentially still done the old way.
There are still places in the code where the functionality of the
class could be used, most probably. ATM, it is used for InsetTabular,
InsetListings, InsetFloat, and InsetText.
The Comment and GreyedOut insets required a special treatment and a
new InsetLayout parameter (Display) has been introduced. The default
for Display is "true", meaning that the corresponding latex
environment is of "display" type, i.e., it stands on its own, whereas
"false" means that the contents appear inline with the text. The
latter is the case for both Comment and GreyedOut insets.
Mostly, the only visible effects on latex exports should be the
disappearing of some redundant % chars and the appearing/disappearing
of null {} latex groups after a comment or lyxgreyedout environments
(they are related to the presence or absence of a space immediately
after those environments), as well as the fact that math environments
are now started on their own lines.
As a last thing, only the latex code between \begin{document} and
\end{document} goes through the new class, the preamble being directly
output through odocstream, as usual.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@37360 a592a061-630c-0410-9148-cb99ea01b6c8
2011-01-29 02:41:13 +00:00
|
|
|
|
}
|
2011-10-29 21:00:23 +00:00
|
|
|
|
} else {
|
2021-01-06 03:28:15 +00:00
|
|
|
|
os << comment_start;
|
2011-12-03 22:15:11 +00:00
|
|
|
|
if (output == FullSource)
|
2011-10-29 21:00:23 +00:00
|
|
|
|
os << _("Preview source code");
|
|
|
|
|
else if (output == OnlyPreamble)
|
|
|
|
|
os << _("Preview preamble");
|
|
|
|
|
else if (output == OnlyBody)
|
|
|
|
|
os << _("Preview body");
|
2021-01-06 03:28:15 +00:00
|
|
|
|
os << comment_end;
|
2011-10-29 21:00:23 +00:00
|
|
|
|
os << "\n\n";
|
2020-11-30 22:00:40 +00:00
|
|
|
|
if (runparams.flavor == Flavor::LyX) {
|
2012-06-05 02:01:26 +00:00
|
|
|
|
ostringstream ods;
|
|
|
|
|
if (output == FullSource)
|
|
|
|
|
write(ods);
|
|
|
|
|
else if (output == OnlyPreamble)
|
Fix bug #4812 (Layout in local directory lost on Save As, Copying)
The "save-as" part of the bug is fixed by extending the \textclass tag
such that, if a local layout file is used, its path relative to the
document directory is now stored together with the name. If a relative
path cannot be used, an absolute one is used but, in this case, the
document is not usable on a different platform.
The "copy" part is fixed by introducing a new \origin tag, which is
written when the file is saved. This tag stores the absolute path of
the document directory. If the document is manually copied to a
different location, the local layout file is retrivied by using
\origin (which is only updated on save).
This new tag may prove useful also for locating other files when the
document is manually moved to a different directory.
As in the original implementation the files needed for the layout
(for example, a latex class) had to be in the same directory as the
layout file, this directory has also to be added to TEXINPUTS.
2015-05-13 19:40:51 +00:00
|
|
|
|
params().writeFile(ods, this);
|
2012-06-05 02:01:26 +00:00
|
|
|
|
else if (output == OnlyBody)
|
|
|
|
|
text().write(ods);
|
|
|
|
|
os << from_utf8(ods.str());
|
2020-11-30 22:00:40 +00:00
|
|
|
|
} else if (runparams.flavor == Flavor::Html) {
|
2011-10-29 21:00:23 +00:00
|
|
|
|
writeLyXHTMLSource(os, runparams, output);
|
2020-11-30 22:00:40 +00:00
|
|
|
|
} else if (runparams.flavor == Flavor::Text) {
|
2021-01-06 03:28:15 +00:00
|
|
|
|
if (output == OnlyPreamble)
|
2012-09-20 11:35:53 +00:00
|
|
|
|
os << "% "<< _("Plain text does not have a preamble.");
|
2021-01-06 03:28:15 +00:00
|
|
|
|
else
|
2012-04-28 01:17:08 +00:00
|
|
|
|
writePlaintextFile(*this, os, runparams);
|
2020-11-30 22:00:40 +00:00
|
|
|
|
} else if (runparams.flavor == Flavor::DocBook5) {
|
2020-06-08 21:27:49 +00:00
|
|
|
|
writeDocBookSource(os, runparams, output);
|
2012-04-28 01:14:25 +00:00
|
|
|
|
} else {
|
2011-10-29 21:00:23 +00:00
|
|
|
|
// latex or literate
|
2016-06-19 02:39:38 +00:00
|
|
|
|
otexstream ots(os);
|
|
|
|
|
// output above
|
|
|
|
|
ots.texrow().newlines(2);
|
2012-10-05 12:30:20 +00:00
|
|
|
|
if (master)
|
|
|
|
|
runparams.is_child = true;
|
2017-05-01 23:36:23 +00:00
|
|
|
|
updateBuffer();
|
2011-10-29 21:00:23 +00:00
|
|
|
|
writeLaTeXSource(ots, string(), runparams, output);
|
2016-06-19 02:39:38 +00:00
|
|
|
|
texrow = ots.releaseTexRow();
|
2011-10-29 21:00:23 +00:00
|
|
|
|
}
|
2006-08-04 13:59:12 +00:00
|
|
|
|
}
|
2015-08-04 22:16:40 +00:00
|
|
|
|
return texrow;
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
|
}
|
|
|
|
|
|
2006-07-15 22:43:37 +00:00
|
|
|
|
|
2011-06-12 18:03:32 +00:00
|
|
|
|
ErrorList & Buffer::errorList(string const & type) const
|
2011-06-08 00:12:52 +00:00
|
|
|
|
{
|
|
|
|
|
return d->errorLists[type];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2008-09-30 09:50:54 +00:00
|
|
|
|
void Buffer::updateTocItem(std::string const & type,
|
|
|
|
|
DocIterator const & dit) const
|
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
if (d->gui_)
|
|
|
|
|
d->gui_->updateTocItem(type, dit);
|
2008-09-30 09:50:54 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-02 18:27:20 +00:00
|
|
|
|
void Buffer::structureChanged() const
|
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
if (d->gui_)
|
|
|
|
|
d->gui_->structureChanged();
|
2007-10-02 18:27:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-06-21 12:26:41 +00:00
|
|
|
|
void Buffer::errors(string const & err, bool from_master) const
|
2007-10-02 18:27:20 +00:00
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
if (d->gui_)
|
|
|
|
|
d->gui_->errors(err, from_master);
|
2007-10-02 18:27:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::message(docstring const & msg) const
|
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
if (d->gui_)
|
|
|
|
|
d->gui_->message(msg);
|
2007-10-02 18:27:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-21 10:50:56 +00:00
|
|
|
|
void Buffer::setBusy(bool on) const
|
2007-10-02 18:27:20 +00:00
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
if (d->gui_)
|
|
|
|
|
d->gui_->setBusy(on);
|
2007-10-02 18:27:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::updateTitles() const
|
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
|
if (d->wa_)
|
|
|
|
|
d->wa_->updateTitles();
|
2007-10-02 18:27:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::resetAutosaveTimers() const
|
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
if (d->gui_)
|
|
|
|
|
d->gui_->resetAutosaveTimers();
|
2007-10-02 18:27:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-02-19 00:29:04 +00:00
|
|
|
|
bool Buffer::hasGuiDelegate() const
|
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
return d->gui_;
|
2009-02-19 00:29:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-10-02 18:27:20 +00:00
|
|
|
|
void Buffer::setGuiDelegate(frontend::GuiBufferDelegate * gui)
|
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
|
d->gui_ = gui;
|
2007-10-02 18:27:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
2007-10-03 11:00:18 +00:00
|
|
|
|
|
2010-10-25 12:31:22 +00:00
|
|
|
|
FileName Buffer::getEmergencyFileName() const
|
|
|
|
|
{
|
2010-11-05 21:21:01 +00:00
|
|
|
|
return FileName(d->filename.absFileName() + ".emergency");
|
2010-10-25 12:31:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-04-21 01:19:33 +00:00
|
|
|
|
FileName Buffer::getAutosaveFileName() const
|
2009-04-04 03:13:46 +00:00
|
|
|
|
{
|
2009-05-03 10:24:12 +00:00
|
|
|
|
// if the document is unnamed try to save in the backup dir, else
|
2011-12-03 22:15:11 +00:00
|
|
|
|
// in the default document path, and as a last try in the filePath,
|
2009-05-03 10:24:12 +00:00
|
|
|
|
// which will most often be the temporary directory
|
|
|
|
|
string fpath;
|
|
|
|
|
if (isUnnamed())
|
|
|
|
|
fpath = lyxrc.backupdir_path.empty() ? lyxrc.document_path
|
|
|
|
|
: lyxrc.backupdir_path;
|
|
|
|
|
if (!isUnnamed() || fpath.empty() || !FileName(fpath).exists())
|
|
|
|
|
fpath = filePath();
|
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
string const fname = "#" + d->filename.onlyFileName() + "#";
|
2010-10-25 12:31:22 +00:00
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
return makeAbsPath(fname, fpath);
|
2009-04-04 03:13:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::removeAutosaveFile() const
|
|
|
|
|
{
|
2010-04-21 01:19:33 +00:00
|
|
|
|
FileName const f = getAutosaveFileName();
|
2009-04-04 03:13:46 +00:00
|
|
|
|
if (f.exists())
|
|
|
|
|
f.removeFile();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-10-16 20:22:53 +00:00
|
|
|
|
void Buffer::moveAutosaveFile(FileName const & oldauto) const
|
2009-05-03 10:21:21 +00:00
|
|
|
|
{
|
2010-04-21 01:19:33 +00:00
|
|
|
|
FileName const newauto = getAutosaveFileName();
|
2010-02-12 23:01:36 +00:00
|
|
|
|
oldauto.refresh();
|
|
|
|
|
if (newauto != oldauto && oldauto.exists())
|
2010-02-12 23:03:43 +00:00
|
|
|
|
if (!oldauto.moveTo(newauto))
|
2010-02-12 23:01:36 +00:00
|
|
|
|
LYXERR0("Unable to move autosave file `" << oldauto << "'!");
|
2009-05-03 10:21:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-12-03 22:15:11 +00:00
|
|
|
|
bool Buffer::autoSave() const
|
2007-10-03 11:00:18 +00:00
|
|
|
|
{
|
2010-11-17 02:18:12 +00:00
|
|
|
|
Buffer const * buf = d->cloned_buffer_ ? d->cloned_buffer_ : this;
|
2017-03-01 21:03:44 +00:00
|
|
|
|
if (buf->d->bak_clean || hasReadonlyFlag())
|
2010-11-17 02:18:12 +00:00
|
|
|
|
return true;
|
2007-10-03 11:00:18 +00:00
|
|
|
|
|
|
|
|
|
message(_("Autosaving current document..."));
|
2010-11-17 02:18:12 +00:00
|
|
|
|
buf->d->bak_clean = true;
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2010-11-17 02:18:12 +00:00
|
|
|
|
FileName const fname = getAutosaveFileName();
|
2012-11-04 15:50:56 +00:00
|
|
|
|
LASSERT(d->cloned_buffer_, return false);
|
|
|
|
|
|
|
|
|
|
// If this buffer is cloned, we assume that
|
|
|
|
|
// we are running in a separate thread already.
|
2014-06-09 11:05:50 +00:00
|
|
|
|
TempFile tempfile("lyxautoXXXXXX.lyx");
|
|
|
|
|
tempfile.setAutoRemove(false);
|
|
|
|
|
FileName const tmp_ret = tempfile.name();
|
2012-11-04 15:50:56 +00:00
|
|
|
|
if (!tmp_ret.empty()) {
|
|
|
|
|
writeFile(tmp_ret);
|
|
|
|
|
// assume successful write of tmp_ret
|
|
|
|
|
if (tmp_ret.moveTo(fname))
|
|
|
|
|
return true;
|
2010-11-17 02:18:12 +00:00
|
|
|
|
}
|
2012-11-04 15:50:56 +00:00
|
|
|
|
// failed to write/rename tmp_ret so try writing direct
|
|
|
|
|
return writeFile(fname);
|
2007-10-03 11:00:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-07-21 21:51:33 +00:00
|
|
|
|
void Buffer::setExportStatus(bool e) const
|
|
|
|
|
{
|
2011-12-03 22:15:11 +00:00
|
|
|
|
d->doing_export = e;
|
2019-12-16 04:36:17 +00:00
|
|
|
|
ListOfBuffers clist = getDescendants();
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & bit : clist)
|
|
|
|
|
bit->d->doing_export = e;
|
2010-07-21 21:51:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::isExporting() const
|
|
|
|
|
{
|
|
|
|
|
return d->doing_export;
|
|
|
|
|
}
|
|
|
|
|
|
2009-04-07 05:01:08 +00:00
|
|
|
|
|
2011-10-30 08:53:20 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::doExport(string const & target, bool put_in_tempdir)
|
|
|
|
|
const
|
|
|
|
|
{
|
|
|
|
|
string result_file;
|
|
|
|
|
return doExport(target, put_in_tempdir, result_file);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Buffer::ExportStatus Buffer::doExport(string const & target, bool put_in_tempdir,
|
|
|
|
|
string & result_file) const
|
2011-10-27 15:50:50 +00:00
|
|
|
|
{
|
|
|
|
|
bool const update_unincluded =
|
2020-03-13 14:46:35 +00:00
|
|
|
|
params().maintain_unincluded_children != BufferParams::CM_None
|
2011-10-27 15:50:50 +00:00
|
|
|
|
&& !params().getIncludedChildren().empty();
|
2011-10-30 08:53:20 +00:00
|
|
|
|
|
|
|
|
|
// (1) export with all included children (omit \includeonly)
|
2011-12-03 22:15:11 +00:00
|
|
|
|
if (update_unincluded) {
|
|
|
|
|
ExportStatus const status =
|
2011-10-30 08:53:20 +00:00
|
|
|
|
doExport(target, put_in_tempdir, true, result_file);
|
|
|
|
|
if (status != ExportSuccess)
|
|
|
|
|
return status;
|
|
|
|
|
}
|
|
|
|
|
// (2) export with included children only
|
|
|
|
|
return doExport(target, put_in_tempdir, false, result_file);
|
2011-10-27 15:50:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-27 20:00:11 +00:00
|
|
|
|
|
2011-12-06 22:17:06 +00:00
|
|
|
|
void Buffer::setMathFlavor(OutputParams & op) const
|
|
|
|
|
{
|
|
|
|
|
switch (params().html_math_output) {
|
|
|
|
|
case BufferParams::MathML:
|
|
|
|
|
op.math_flavor = OutputParams::MathAsMathML;
|
|
|
|
|
break;
|
|
|
|
|
case BufferParams::HTML:
|
|
|
|
|
op.math_flavor = OutputParams::MathAsHTML;
|
|
|
|
|
break;
|
|
|
|
|
case BufferParams::Images:
|
|
|
|
|
op.math_flavor = OutputParams::MathAsImages;
|
|
|
|
|
break;
|
|
|
|
|
case BufferParams::LaTeX:
|
|
|
|
|
op.math_flavor = OutputParams::MathAsLaTeX;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-10-27 20:00:06 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::doExport(string const & target, bool put_in_tempdir,
|
2010-01-10 13:25:41 +00:00
|
|
|
|
bool includeall, string & result_file) const
|
2007-10-20 10:51:13 +00:00
|
|
|
|
{
|
2011-10-27 20:00:02 +00:00
|
|
|
|
LYXERR(Debug::FILES, "target=" << target);
|
2011-09-15 01:07:49 +00:00
|
|
|
|
OutputParams runparams(¶ms().encoding());
|
|
|
|
|
string format = target;
|
2011-10-06 23:23:45 +00:00
|
|
|
|
string dest_filename;
|
2011-09-15 01:07:49 +00:00
|
|
|
|
size_t pos = target.find(' ');
|
|
|
|
|
if (pos != string::npos) {
|
2011-10-06 23:23:45 +00:00
|
|
|
|
dest_filename = target.substr(pos + 1, target.length() - pos - 1);
|
2011-09-15 01:07:49 +00:00
|
|
|
|
format = target.substr(0, pos);
|
2017-03-09 22:35:27 +00:00
|
|
|
|
if (format == "default")
|
2017-03-07 08:09:42 +00:00
|
|
|
|
format = params().getDefaultOutputFormat();
|
2011-10-06 23:23:45 +00:00
|
|
|
|
runparams.export_folder = FileName(dest_filename).onlyPath().realPath();
|
|
|
|
|
FileName(dest_filename).onlyPath().createPath();
|
|
|
|
|
LYXERR(Debug::FILES, "format=" << format << ", dest_filename=" << dest_filename << ", export_folder=" << runparams.export_folder);
|
2011-09-15 01:07:49 +00:00
|
|
|
|
}
|
2010-07-21 21:51:33 +00:00
|
|
|
|
MarkAsExporting exporting(this);
|
2007-10-20 10:51:13 +00:00
|
|
|
|
string backend_format;
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::LaTeX;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
runparams.linelen = lyxrc.plaintext_linelen;
|
2010-01-10 13:25:41 +00:00
|
|
|
|
runparams.includeall = includeall;
|
2011-05-13 19:39:56 +00:00
|
|
|
|
vector<string> backs = params().backends();
|
2011-01-22 09:30:03 +00:00
|
|
|
|
Converters converters = theConverters();
|
2012-05-22 12:23:27 +00:00
|
|
|
|
bool need_nice_file = false;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
if (find(backs.begin(), backs.end(), format) == backs.end()) {
|
|
|
|
|
// Get shortest path to format
|
2011-01-22 09:30:03 +00:00
|
|
|
|
converters.buildGraph();
|
2007-10-20 10:51:13 +00:00
|
|
|
|
Graph::EdgePath path;
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (string const & sit : backs) {
|
|
|
|
|
Graph::EdgePath p = converters.getPath(sit, format);
|
2007-10-20 10:51:13 +00:00
|
|
|
|
if (!p.empty() && (path.empty() || p.size() < path.size())) {
|
2017-11-05 02:15:01 +00:00
|
|
|
|
backend_format = sit;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
path = p;
|
|
|
|
|
}
|
|
|
|
|
}
|
2009-12-18 22:51:06 +00:00
|
|
|
|
if (path.empty()) {
|
|
|
|
|
if (!put_in_tempdir) {
|
|
|
|
|
// Only show this alert if this is an export to a non-temporary
|
|
|
|
|
// file (not for previewing).
|
2019-04-03 15:58:20 +00:00
|
|
|
|
docstring s = bformat(_("No information for exporting the format %1$s."),
|
2020-08-29 09:50:43 +00:00
|
|
|
|
translateIfPossible(theFormats().prettyName(format)));
|
2019-03-16 11:43:50 +00:00
|
|
|
|
if (format == "pdf4")
|
2019-04-03 15:58:20 +00:00
|
|
|
|
s += "\n"
|
|
|
|
|
+ bformat(_("Hint: use non-TeX fonts or set input encoding "
|
2019-07-12 10:46:50 +00:00
|
|
|
|
" to '%1$s'"), from_utf8(encodings.fromLyXName("ascii")->guiName()));
|
2019-04-03 15:58:20 +00:00
|
|
|
|
Alert::error(_("Couldn't export file"), s);
|
2009-12-18 22:51:06 +00:00
|
|
|
|
}
|
2011-10-27 20:00:06 +00:00
|
|
|
|
return ExportNoPathToFormat;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
}
|
2012-04-11 16:19:11 +00:00
|
|
|
|
runparams.flavor = converters.getFlavor(path, this);
|
2018-04-08 17:02:01 +00:00
|
|
|
|
runparams.hyperref_driver = converters.getHyperrefDriver(path);
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & edge : path)
|
|
|
|
|
if (theConverters().get(edge).nice()) {
|
2012-05-22 12:23:27 +00:00
|
|
|
|
need_nice_file = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2009-12-18 22:51:06 +00:00
|
|
|
|
|
2007-10-20 10:51:13 +00:00
|
|
|
|
} else {
|
|
|
|
|
backend_format = format;
|
2011-10-06 23:23:45 +00:00
|
|
|
|
LYXERR(Debug::FILES, "backend_format=" << backend_format);
|
2007-10-20 10:51:13 +00:00
|
|
|
|
// FIXME: Don't hardcode format names here, but use a flag
|
|
|
|
|
if (backend_format == "pdflatex")
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::PdfLaTeX;
|
2010-11-23 16:07:42 +00:00
|
|
|
|
else if (backend_format == "luatex")
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::LuaTeX;
|
2011-08-10 02:23:44 +00:00
|
|
|
|
else if (backend_format == "dviluatex")
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::DviLuaTeX;
|
2010-11-23 11:53:27 +00:00
|
|
|
|
else if (backend_format == "xetex")
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::XeTeX;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-06 23:23:45 +00:00
|
|
|
|
string filename = latexName(false);
|
|
|
|
|
filename = addName(temppath(), filename);
|
|
|
|
|
filename = changeExtension(filename,
|
2017-03-14 03:33:40 +00:00
|
|
|
|
theFormats().extension(backend_format));
|
2011-10-06 23:23:45 +00:00
|
|
|
|
LYXERR(Debug::FILES, "filename=" << filename);
|
2007-10-20 10:51:13 +00:00
|
|
|
|
|
|
|
|
|
// Plain text backend
|
2009-11-14 12:54:12 +00:00
|
|
|
|
if (backend_format == "text") {
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::Text;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
try {
|
|
|
|
|
writePlaintextFile(*this, FileName(filename), runparams);
|
|
|
|
|
}
|
|
|
|
|
catch (ConversionException const &) { return ExportCancel; }
|
2009-11-14 12:54:12 +00:00
|
|
|
|
}
|
2009-10-25 01:48:14 +00:00
|
|
|
|
// HTML backend
|
|
|
|
|
else if (backend_format == "xhtml") {
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::Html;
|
2011-12-06 22:17:06 +00:00
|
|
|
|
setMathFlavor(runparams);
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
if (makeLyXHTMLFile(FileName(filename), runparams) == ExportKilled)
|
|
|
|
|
return ExportKilled;
|
2010-11-28 13:49:14 +00:00
|
|
|
|
} else if (backend_format == "lyx")
|
2007-10-20 10:51:13 +00:00
|
|
|
|
writeFile(FileName(filename));
|
2020-06-08 21:27:49 +00:00
|
|
|
|
// DocBook backend
|
|
|
|
|
else if (backend_format == "docbook5") {
|
2020-11-30 22:00:40 +00:00
|
|
|
|
runparams.flavor = Flavor::DocBook5;
|
2020-10-03 11:28:26 +00:00
|
|
|
|
runparams.nice = false;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
if (makeDocBookFile(FileName(filename), runparams) == ExportKilled)
|
|
|
|
|
return ExportKilled;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
}
|
|
|
|
|
// LaTeX backend
|
2012-05-22 12:23:27 +00:00
|
|
|
|
else if (backend_format == format || need_nice_file) {
|
2007-10-20 10:51:13 +00:00
|
|
|
|
runparams.nice = true;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
ExportStatus const retval =
|
|
|
|
|
makeLaTeXFile(FileName(filename), string(), runparams);
|
|
|
|
|
if (retval == ExportKilled)
|
|
|
|
|
return ExportKilled;
|
2012-03-17 18:00:25 +00:00
|
|
|
|
if (d->cloned_buffer_)
|
|
|
|
|
d->cloned_buffer_->d->errorLists["Export"] = d->errorLists["Export"];
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
if (retval != ExportSuccess)
|
|
|
|
|
return retval;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
} else if (!lyxrc.tex_allows_spaces
|
2007-12-12 19:57:42 +00:00
|
|
|
|
&& contains(filePath(), ' ')) {
|
2007-10-20 10:51:13 +00:00
|
|
|
|
Alert::error(_("File name error"),
|
2018-04-25 15:52:21 +00:00
|
|
|
|
bformat(_("The directory path to the document\n%1$s\n"
|
|
|
|
|
"contains spaces, but your TeX installation does "
|
2018-04-25 22:43:49 +00:00
|
|
|
|
"not allow them. You should save the file to a directory "
|
2019-02-26 02:56:40 +00:00
|
|
|
|
"whose name does not contain spaces."), from_utf8(filePath())));
|
2011-10-27 20:00:06 +00:00
|
|
|
|
return ExportTexPathHasSpaces;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
} else {
|
|
|
|
|
runparams.nice = false;
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
ExportStatus const retval =
|
|
|
|
|
makeLaTeXFile(FileName(filename), filePath(), runparams);
|
|
|
|
|
if (retval == ExportKilled)
|
|
|
|
|
return ExportKilled;
|
2012-03-17 18:00:25 +00:00
|
|
|
|
if (d->cloned_buffer_)
|
|
|
|
|
d->cloned_buffer_->d->errorLists["Export"] = d->errorLists["Export"];
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
if (retval != ExportSuccess)
|
2011-10-27 20:00:06 +00:00
|
|
|
|
return ExportError;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
string const error_type = (format == "program")
|
2011-05-13 19:39:56 +00:00
|
|
|
|
? "Build" : params().bufferFormat();
|
2008-07-14 07:16:00 +00:00
|
|
|
|
ErrorList & error_list = d->errorLists[error_type];
|
2017-03-14 03:33:40 +00:00
|
|
|
|
string const ext = theFormats().extension(format);
|
2007-10-20 10:51:13 +00:00
|
|
|
|
FileName const tmp_result_file(changeExtension(filename, ext));
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
Converters::RetVal const retval =
|
2020-03-13 10:49:07 +00:00
|
|
|
|
converters.convert(this, FileName(filename), tmp_result_file,
|
|
|
|
|
FileName(absFileName()), backend_format, format,
|
|
|
|
|
error_list, Converters::none, includeall);
|
Fix some problems with background cancellation.
The problem was that, if we killed export when some graphic was
being converted, or some external template was being handled, it
would only cancel that process, and then it would just continue.
To deal with that, we need to do a few things:
1. Modify the return values for some of the Converters routines,
and similarly the routines for external templates, so we can
tell when something has been canceled.
2. Throw an exception from InsetGraphics or InsetExternal when this
has happened during export, but ONLY when the Buffer is a clone.
We shouldn't be able to 'cancel' export when we're, say, generating
code for the preview pane, but this keeps us on the safe side..
The exception then has to be caught, obviously, back in the export
routines in Buffer.
Probably Coverity will be unhappy about something here, but I'll
deal with that problem as it arises.
2018-02-20 03:43:44 +00:00
|
|
|
|
if (retval == Converters::KILLED)
|
|
|
|
|
return ExportCancel;
|
|
|
|
|
bool success = (retval == Converters::SUCCESS);
|
2010-03-13 11:39:50 +00:00
|
|
|
|
|
|
|
|
|
// Emit the signal to show the error list or copy it back to the
|
2011-02-13 01:34:00 +00:00
|
|
|
|
// cloned Buffer so that it can be emitted afterwards.
|
2009-06-21 12:26:41 +00:00
|
|
|
|
if (format != backend_format) {
|
2013-11-12 19:52:35 +00:00
|
|
|
|
if (runparams.silent)
|
|
|
|
|
error_list.clear();
|
|
|
|
|
else if (d->cloned_buffer_)
|
2011-12-03 22:15:11 +00:00
|
|
|
|
d->cloned_buffer_->d->errorLists[error_type] =
|
2010-03-13 11:39:50 +00:00
|
|
|
|
d->errorLists[error_type];
|
2013-11-12 19:52:35 +00:00
|
|
|
|
else
|
2010-03-13 11:39:50 +00:00
|
|
|
|
errors(error_type);
|
2009-06-21 12:26:41 +00:00
|
|
|
|
// also to the children, in case of master-buffer-view
|
2019-12-16 04:36:17 +00:00
|
|
|
|
ListOfBuffers clist = getDescendants();
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & bit : clist) {
|
2013-11-12 19:52:35 +00:00
|
|
|
|
if (runparams.silent)
|
2017-11-05 02:15:01 +00:00
|
|
|
|
bit->d->errorLists[error_type].clear();
|
2013-11-12 19:52:35 +00:00
|
|
|
|
else if (d->cloned_buffer_) {
|
2011-02-12 20:24:09 +00:00
|
|
|
|
// Enable reverse search by copying back the
|
|
|
|
|
// texrow object to the cloned buffer.
|
|
|
|
|
// FIXME: this is not thread safe.
|
2017-11-05 02:15:01 +00:00
|
|
|
|
bit->d->cloned_buffer_->d->texrow = bit->d->texrow;
|
|
|
|
|
bit->d->cloned_buffer_->d->errorLists[error_type] =
|
|
|
|
|
bit->d->errorLists[error_type];
|
2010-03-13 11:39:50 +00:00
|
|
|
|
} else
|
2017-11-05 02:15:01 +00:00
|
|
|
|
bit->errors(error_type, true);
|
2010-03-13 11:39:50 +00:00
|
|
|
|
}
|
2009-06-21 12:26:41 +00:00
|
|
|
|
}
|
2007-10-20 10:51:13 +00:00
|
|
|
|
|
2010-01-08 09:00:28 +00:00
|
|
|
|
if (d->cloned_buffer_) {
|
|
|
|
|
// Enable reverse dvi or pdf to work by copying back the texrow
|
|
|
|
|
// object to the cloned buffer.
|
|
|
|
|
// FIXME: There is a possibility of concurrent access to texrow
|
|
|
|
|
// here from the main GUI thread that should be securized.
|
|
|
|
|
d->cloned_buffer_->d->texrow = d->texrow;
|
2017-12-16 04:20:57 +00:00
|
|
|
|
string const err_type = params().bufferFormat();
|
|
|
|
|
d->cloned_buffer_->d->errorLists[error_type] = d->errorLists[err_type];
|
2010-01-08 09:00:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2010-03-13 11:39:50 +00:00
|
|
|
|
|
2008-01-14 14:09:20 +00:00
|
|
|
|
if (put_in_tempdir) {
|
2010-04-21 01:19:09 +00:00
|
|
|
|
result_file = tmp_result_file.absFileName();
|
2015-03-18 20:51:12 +00:00
|
|
|
|
return success ? ExportSuccess : ExportConverterError;
|
2008-01-14 14:09:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
2011-10-06 23:23:45 +00:00
|
|
|
|
if (dest_filename.empty())
|
|
|
|
|
result_file = changeExtension(d->exportFileName().absFileName(), ext);
|
|
|
|
|
else
|
|
|
|
|
result_file = dest_filename;
|
2008-01-14 14:09:20 +00:00
|
|
|
|
// We need to copy referenced files (e. g. included graphics
|
|
|
|
|
// if format == "dvi") to the result dir.
|
2019-09-12 00:54:51 +00:00
|
|
|
|
vector<ExportedFile> const extfiles =
|
2008-01-14 14:09:20 +00:00
|
|
|
|
runparams.exportdata->externalFiles(format);
|
2011-09-15 01:07:49 +00:00
|
|
|
|
string const dest = runparams.export_folder.empty() ?
|
|
|
|
|
onlyPath(result_file) : runparams.export_folder;
|
2010-04-20 16:49:49 +00:00
|
|
|
|
bool use_force = use_gui ? lyxrc.export_overwrite == ALL_FILES
|
|
|
|
|
: force_overwrite == ALL_FILES;
|
|
|
|
|
CopyStatus status = use_force ? FORCE : SUCCESS;
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2019-09-12 00:54:51 +00:00
|
|
|
|
for (ExportedFile const & exp : extfiles) {
|
2017-11-05 02:15:01 +00:00
|
|
|
|
if (status == CANCEL) {
|
|
|
|
|
message(_("Document export cancelled."));
|
|
|
|
|
return ExportCancel;
|
|
|
|
|
}
|
|
|
|
|
string const fmt = theFormats().getFormatFromFile(exp.sourceName);
|
|
|
|
|
string fixedName = exp.exportName;
|
2011-09-15 01:07:49 +00:00
|
|
|
|
if (!runparams.export_folder.empty()) {
|
|
|
|
|
// Relative pathnames starting with ../ will be sanitized
|
|
|
|
|
// if exporting to a different folder
|
|
|
|
|
while (fixedName.substr(0, 3) == "../")
|
|
|
|
|
fixedName = fixedName.substr(3, fixedName.length() - 3);
|
|
|
|
|
}
|
|
|
|
|
FileName fixedFileName = makeAbsPath(fixedName, dest);
|
|
|
|
|
fixedFileName.onlyPath().createPath();
|
2017-11-05 02:15:01 +00:00
|
|
|
|
status = copyFile(fmt, exp.sourceName,
|
2011-09-15 01:07:49 +00:00
|
|
|
|
fixedFileName,
|
2017-11-05 02:15:01 +00:00
|
|
|
|
exp.exportName, status == FORCE,
|
2011-09-15 01:07:49 +00:00
|
|
|
|
runparams.export_folder.empty());
|
2008-01-14 14:09:20 +00:00
|
|
|
|
}
|
2010-01-21 18:12:23 +00:00
|
|
|
|
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2011-10-30 19:52:27 +00:00
|
|
|
|
if (tmp_result_file.exists()) {
|
2008-01-14 14:09:20 +00:00
|
|
|
|
// Finally copy the main file
|
2010-04-20 16:49:49 +00:00
|
|
|
|
use_force = use_gui ? lyxrc.export_overwrite != NO_FILES
|
|
|
|
|
: force_overwrite != NO_FILES;
|
|
|
|
|
if (status == SUCCESS && use_force)
|
2010-04-19 23:53:23 +00:00
|
|
|
|
status = FORCE;
|
2008-01-14 14:09:20 +00:00
|
|
|
|
status = copyFile(format, tmp_result_file,
|
|
|
|
|
FileName(result_file), result_file,
|
|
|
|
|
status == FORCE);
|
2011-10-30 19:52:27 +00:00
|
|
|
|
if (status == CANCEL) {
|
|
|
|
|
message(_("Document export cancelled."));
|
|
|
|
|
return ExportCancel;
|
|
|
|
|
} else {
|
|
|
|
|
message(bformat(_("Document exported as %1$s "
|
|
|
|
|
"to file `%2$s'"),
|
2020-08-29 09:50:43 +00:00
|
|
|
|
translateIfPossible(theFormats().prettyName(format)),
|
2011-10-30 19:52:27 +00:00
|
|
|
|
makeDisplayPath(result_file)));
|
|
|
|
|
}
|
2008-01-14 14:09:20 +00:00
|
|
|
|
} else {
|
|
|
|
|
// This must be a dummy converter like fax (bug 1888)
|
|
|
|
|
message(bformat(_("Document exported as %1$s"),
|
2020-08-29 09:50:43 +00:00
|
|
|
|
translateIfPossible(theFormats().prettyName(format))));
|
2007-10-20 10:51:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-03-18 20:51:12 +00:00
|
|
|
|
return success ? ExportSuccess : ExportConverterError;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2011-10-27 20:00:11 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::preview(string const & format) const
|
2011-10-27 15:50:50 +00:00
|
|
|
|
{
|
|
|
|
|
bool const update_unincluded =
|
2020-03-13 14:46:35 +00:00
|
|
|
|
params().maintain_unincluded_children != BufferParams::CM_None
|
2011-10-27 15:50:50 +00:00
|
|
|
|
&& !params().getIncludedChildren().empty();
|
2011-10-27 20:00:11 +00:00
|
|
|
|
return preview(format, update_unincluded);
|
2011-10-27 15:50:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-11-17 21:07:38 +00:00
|
|
|
|
|
2011-10-27 20:00:08 +00:00
|
|
|
|
Buffer::ExportStatus Buffer::preview(string const & format, bool includeall) const
|
2007-10-20 10:51:13 +00:00
|
|
|
|
{
|
2010-07-21 21:51:33 +00:00
|
|
|
|
MarkAsExporting exporting(this);
|
2007-10-20 10:51:13 +00:00
|
|
|
|
string result_file;
|
2010-01-10 13:25:41 +00:00
|
|
|
|
// (1) export with all included children (omit \includeonly)
|
2011-12-03 22:15:11 +00:00
|
|
|
|
if (includeall) {
|
2011-10-30 08:53:20 +00:00
|
|
|
|
ExportStatus const status = doExport(format, true, true, result_file);
|
2011-10-27 20:00:08 +00:00
|
|
|
|
if (status != ExportSuccess)
|
|
|
|
|
return status;
|
|
|
|
|
}
|
2010-01-10 13:25:41 +00:00
|
|
|
|
// (2) export with included children only
|
2011-10-27 20:00:08 +00:00
|
|
|
|
ExportStatus const status = doExport(format, true, false, result_file);
|
2015-03-18 20:51:12 +00:00
|
|
|
|
FileName const previewFile(result_file);
|
2015-03-31 22:54:49 +00:00
|
|
|
|
|
2016-07-10 05:12:42 +00:00
|
|
|
|
Impl * theimpl = isClone() ? d->cloned_buffer_->d : d;
|
|
|
|
|
theimpl->preview_file_ = previewFile;
|
|
|
|
|
theimpl->preview_format_ = format;
|
2019-12-24 17:12:22 +00:00
|
|
|
|
theimpl->require_fresh_start_ = (status != ExportSuccess);
|
2015-03-31 22:54:49 +00:00
|
|
|
|
|
|
|
|
|
if (status != ExportSuccess)
|
|
|
|
|
return status;
|
2016-07-10 05:43:19 +00:00
|
|
|
|
|
|
|
|
|
if (previewFile.exists())
|
2017-03-14 03:33:40 +00:00
|
|
|
|
return theFormats().view(*this, previewFile, format) ?
|
2016-07-10 05:43:19 +00:00
|
|
|
|
PreviewSuccess : PreviewError;
|
|
|
|
|
|
2016-07-10 05:12:42 +00:00
|
|
|
|
// Successful export but no output file?
|
|
|
|
|
// Probably a bug in error detection.
|
|
|
|
|
LATTEST(status != ExportSuccess);
|
|
|
|
|
return status;
|
2007-10-20 10:51:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::extractFromVC()
|
2007-10-21 10:50:56 +00:00
|
|
|
|
{
|
2010-11-05 21:21:01 +00:00
|
|
|
|
bool const found = LyXVC::file_not_found_hook(d->filename);
|
2010-10-25 11:57:56 +00:00
|
|
|
|
if (!found)
|
|
|
|
|
return ReadFileNotFound;
|
2010-11-05 21:21:01 +00:00
|
|
|
|
if (!d->filename.isReadableFile())
|
2010-10-25 11:57:56 +00:00
|
|
|
|
return ReadVCError;
|
|
|
|
|
return ReadSuccess;
|
|
|
|
|
}
|
2007-10-21 10:50:56 +00:00
|
|
|
|
|
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::loadEmergency()
|
2010-10-25 11:57:56 +00:00
|
|
|
|
{
|
2010-11-05 21:21:01 +00:00
|
|
|
|
FileName const emergencyFile = getEmergencyFileName();
|
2011-12-03 22:15:11 +00:00
|
|
|
|
if (!emergencyFile.exists()
|
2010-11-05 21:21:01 +00:00
|
|
|
|
|| emergencyFile.lastModified() <= d->filename.lastModified())
|
2010-10-25 12:14:10 +00:00
|
|
|
|
return ReadFileNotFound;
|
2010-10-25 11:57:56 +00:00
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
docstring const file = makeDisplayPath(d->filename.absFileName(), 20);
|
2010-10-25 12:31:22 +00:00
|
|
|
|
docstring const text = bformat(_("An emergency save of the document "
|
|
|
|
|
"%1$s exists.\n\nRecover emergency save?"), file);
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2010-10-25 15:33:51 +00:00
|
|
|
|
int const load_emerg = Alert::prompt(_("Load emergency save?"), text,
|
2010-10-25 12:31:22 +00:00
|
|
|
|
0, 2, _("&Recover"), _("&Load Original"), _("&Cancel"));
|
|
|
|
|
|
2010-10-25 15:33:51 +00:00
|
|
|
|
switch (load_emerg)
|
2010-10-25 12:14:10 +00:00
|
|
|
|
{
|
|
|
|
|
case 0: {
|
|
|
|
|
docstring str;
|
2010-10-26 13:17:10 +00:00
|
|
|
|
ReadStatus const ret_llf = loadThisLyXFile(emergencyFile);
|
|
|
|
|
bool const success = (ret_llf == ReadSuccess);
|
2010-10-25 15:33:51 +00:00
|
|
|
|
if (success) {
|
2017-03-01 21:03:44 +00:00
|
|
|
|
if (hasReadonlyFlag()) {
|
2010-11-26 02:01:04 +00:00
|
|
|
|
Alert::warning(_("File is read-only"),
|
2010-12-29 16:57:04 +00:00
|
|
|
|
bformat(_("An emergency file is successfully loaded, "
|
2010-11-26 02:01:04 +00:00
|
|
|
|
"but the original file %1$s is marked read-only. "
|
|
|
|
|
"Please make sure to save the document as a different "
|
|
|
|
|
"file."), from_utf8(d->filename.absFileName())));
|
|
|
|
|
}
|
2010-10-25 15:33:51 +00:00
|
|
|
|
markDirty();
|
2012-11-13 20:52:38 +00:00
|
|
|
|
lyxvc().file_found_hook(d->filename);
|
2010-10-25 12:14:10 +00:00
|
|
|
|
str = _("Document was successfully recovered.");
|
2010-10-25 13:29:50 +00:00
|
|
|
|
} else
|
2010-10-25 12:14:10 +00:00
|
|
|
|
str = _("Document was NOT successfully recovered.");
|
|
|
|
|
str += "\n\n" + bformat(_("Remove emergency file now?\n(%1$s)"),
|
2010-11-05 21:21:01 +00:00
|
|
|
|
makeDisplayPath(emergencyFile.absFileName()));
|
2010-10-25 12:14:10 +00:00
|
|
|
|
|
2011-12-03 22:15:11 +00:00
|
|
|
|
int const del_emerg =
|
2010-10-25 15:33:51 +00:00
|
|
|
|
Alert::prompt(_("Delete emergency file?"), str, 1, 1,
|
|
|
|
|
_("&Remove"), _("&Keep"));
|
|
|
|
|
if (del_emerg == 0) {
|
2010-10-25 12:14:10 +00:00
|
|
|
|
emergencyFile.removeFile();
|
2010-10-25 15:33:51 +00:00
|
|
|
|
if (success)
|
2010-10-25 12:14:10 +00:00
|
|
|
|
Alert::warning(_("Emergency file deleted"),
|
|
|
|
|
_("Do not forget to save your file now!"), true);
|
|
|
|
|
}
|
2010-10-25 15:33:51 +00:00
|
|
|
|
return success ? ReadSuccess : ReadEmergencyFailure;
|
2010-10-25 12:14:10 +00:00
|
|
|
|
}
|
2010-10-25 15:33:51 +00:00
|
|
|
|
case 1: {
|
|
|
|
|
int const del_emerg =
|
|
|
|
|
Alert::prompt(_("Delete emergency file?"),
|
2010-10-25 12:14:10 +00:00
|
|
|
|
_("Remove emergency file now?"), 1, 1,
|
2010-10-25 15:33:51 +00:00
|
|
|
|
_("&Remove"), _("&Keep"));
|
|
|
|
|
if (del_emerg == 0)
|
2010-10-25 12:14:10 +00:00
|
|
|
|
emergencyFile.removeFile();
|
2019-01-16 16:13:53 +00:00
|
|
|
|
else {
|
|
|
|
|
// See bug #11464
|
|
|
|
|
FileName newname;
|
|
|
|
|
string const ename = emergencyFile.absFileName();
|
|
|
|
|
bool noname = true;
|
|
|
|
|
// Surely we can find one in 100 tries?
|
|
|
|
|
for (int i = 1; i < 100; ++i) {
|
|
|
|
|
newname.set(ename + to_string(i) + ".lyx");
|
|
|
|
|
if (!newname.exists()) {
|
|
|
|
|
noname = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!noname) {
|
|
|
|
|
// renameTo returns true on success. So inverting that
|
|
|
|
|
// will give us true if we fail.
|
|
|
|
|
noname = !emergencyFile.renameTo(newname);
|
|
|
|
|
}
|
|
|
|
|
if (noname) {
|
|
|
|
|
Alert::warning(_("Can't rename emergency file!"),
|
|
|
|
|
_("LyX was unable to rename the emergency file. "
|
2019-01-20 03:05:32 +00:00
|
|
|
|
"You should do so manually. Otherwise, you will be "
|
|
|
|
|
"asked about it again the next time you try to load "
|
2019-01-16 16:13:53 +00:00
|
|
|
|
"this file, and may over-write your own work."));
|
2019-05-21 01:23:40 +00:00
|
|
|
|
} else {
|
|
|
|
|
Alert::warning(_("Emergency File Renames"),
|
|
|
|
|
bformat(_("Emergency file renamed as:\n %1$s"),
|
|
|
|
|
from_utf8(newname.onlyFileName())));
|
2019-01-16 16:13:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2010-10-25 12:14:10 +00:00
|
|
|
|
return ReadOriginal;
|
2010-10-25 15:33:51 +00:00
|
|
|
|
}
|
2010-10-25 11:57:56 +00:00
|
|
|
|
|
2010-10-25 12:14:10 +00:00
|
|
|
|
default:
|
2010-10-25 12:31:22 +00:00
|
|
|
|
break;
|
2007-10-21 10:50:56 +00:00
|
|
|
|
}
|
2010-10-25 12:14:10 +00:00
|
|
|
|
return ReadCancel;
|
2010-10-25 11:57:56 +00:00
|
|
|
|
}
|
2007-10-21 10:50:56 +00:00
|
|
|
|
|
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::loadAutosave()
|
2010-10-25 11:57:56 +00:00
|
|
|
|
{
|
|
|
|
|
// Now check if autosave file is newer.
|
2010-11-05 21:21:01 +00:00
|
|
|
|
FileName const autosaveFile = getAutosaveFileName();
|
2011-12-03 22:15:11 +00:00
|
|
|
|
if (!autosaveFile.exists()
|
|
|
|
|
|| autosaveFile.lastModified() <= d->filename.lastModified())
|
2010-10-25 12:24:15 +00:00
|
|
|
|
return ReadFileNotFound;
|
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
docstring const file = makeDisplayPath(d->filename.absFileName(), 20);
|
2011-12-03 22:15:11 +00:00
|
|
|
|
docstring const text = bformat(_("The backup of the document %1$s "
|
2010-10-25 12:31:22 +00:00
|
|
|
|
"is newer.\n\nLoad the backup instead?"), file);
|
|
|
|
|
int const ret = Alert::prompt(_("Load backup?"), text, 0, 2,
|
|
|
|
|
_("&Load backup"), _("Load &original"), _("&Cancel"));
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2010-10-25 12:31:22 +00:00
|
|
|
|
switch (ret)
|
2010-10-25 12:24:15 +00:00
|
|
|
|
{
|
|
|
|
|
case 0: {
|
2010-10-26 13:17:10 +00:00
|
|
|
|
ReadStatus const ret_llf = loadThisLyXFile(autosaveFile);
|
2010-10-25 12:24:15 +00:00
|
|
|
|
// the file is not saved if we load the autosave file.
|
2010-10-26 13:17:10 +00:00
|
|
|
|
if (ret_llf == ReadSuccess) {
|
2017-03-01 21:03:44 +00:00
|
|
|
|
if (hasReadonlyFlag()) {
|
2010-11-26 02:01:04 +00:00
|
|
|
|
Alert::warning(_("File is read-only"),
|
2010-12-29 16:57:04 +00:00
|
|
|
|
bformat(_("A backup file is successfully loaded, "
|
2010-11-26 02:01:04 +00:00
|
|
|
|
"but the original file %1$s is marked read-only. "
|
|
|
|
|
"Please make sure to save the document as a "
|
2011-12-03 22:15:11 +00:00
|
|
|
|
"different file."),
|
2010-11-26 02:01:04 +00:00
|
|
|
|
from_utf8(d->filename.absFileName())));
|
|
|
|
|
}
|
2007-10-21 10:50:56 +00:00
|
|
|
|
markDirty();
|
2012-11-13 20:52:38 +00:00
|
|
|
|
lyxvc().file_found_hook(d->filename);
|
2010-10-25 12:24:15 +00:00
|
|
|
|
return ReadSuccess;
|
2007-10-21 10:50:56 +00:00
|
|
|
|
}
|
2010-10-25 12:24:15 +00:00
|
|
|
|
return ReadAutosaveFailure;
|
|
|
|
|
}
|
|
|
|
|
case 1:
|
|
|
|
|
// Here we delete the autosave
|
|
|
|
|
autosaveFile.removeFile();
|
|
|
|
|
return ReadOriginal;
|
|
|
|
|
default:
|
2010-10-25 12:31:22 +00:00
|
|
|
|
break;
|
2007-10-21 10:50:56 +00:00
|
|
|
|
}
|
2010-10-25 12:24:15 +00:00
|
|
|
|
return ReadCancel;
|
2007-10-21 10:50:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::loadLyXFile()
|
2007-10-21 10:50:56 +00:00
|
|
|
|
{
|
2010-11-05 21:21:01 +00:00
|
|
|
|
if (!d->filename.isReadableFile()) {
|
|
|
|
|
ReadStatus const ret_rvc = extractFromVC();
|
2010-10-25 11:57:56 +00:00
|
|
|
|
if (ret_rvc != ReadSuccess)
|
|
|
|
|
return ret_rvc;
|
2007-10-21 10:50:56 +00:00
|
|
|
|
}
|
2010-10-25 11:57:56 +00:00
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
ReadStatus const ret_re = loadEmergency();
|
2010-10-25 11:57:56 +00:00
|
|
|
|
if (ret_re == ReadSuccess || ret_re == ReadCancel)
|
|
|
|
|
return ret_re;
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
ReadStatus const ret_ra = loadAutosave();
|
2010-10-25 11:57:56 +00:00
|
|
|
|
if (ret_ra == ReadSuccess || ret_ra == ReadCancel)
|
|
|
|
|
return ret_ra;
|
|
|
|
|
|
2010-11-05 21:21:01 +00:00
|
|
|
|
return loadThisLyXFile(d->filename);
|
2010-10-26 13:17:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Buffer::ReadStatus Buffer::loadThisLyXFile(FileName const & fn)
|
|
|
|
|
{
|
2010-10-25 13:04:13 +00:00
|
|
|
|
return readFile(fn);
|
2007-10-21 10:50:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2019-06-07 14:47:04 +00:00
|
|
|
|
void Buffer::Impl::traverseErrors(TeXErrors::Errors::const_iterator err, TeXErrors::Errors::const_iterator end, ErrorList & errorList) const
|
2007-10-21 10:50:56 +00:00
|
|
|
|
{
|
2019-06-07 14:47:04 +00:00
|
|
|
|
for (; err != end; ++err) {
|
2017-03-01 16:18:19 +00:00
|
|
|
|
TexRow::TextEntry start = TexRow::text_none, end = TexRow::text_none;
|
2019-06-07 14:47:04 +00:00
|
|
|
|
int errorRow = err->error_in_line;
|
2020-02-19 00:27:12 +00:00
|
|
|
|
Buffer const * buf = nullptr;
|
2019-06-07 14:47:04 +00:00
|
|
|
|
Impl const * p = this;
|
|
|
|
|
if (err->child_name.empty())
|
2016-10-09 19:34:12 +00:00
|
|
|
|
tie(start, end) = p->texrow.getEntriesFromRow(errorRow);
|
2011-03-04 13:52:01 +00:00
|
|
|
|
else {
|
|
|
|
|
// The error occurred in a child
|
2019-12-16 04:36:17 +00:00
|
|
|
|
for (Buffer const * child : owner_->getDescendants()) {
|
2011-03-04 13:37:14 +00:00
|
|
|
|
string const child_name =
|
2016-10-09 19:34:12 +00:00
|
|
|
|
DocFileName(changeExtension(child->absFileName(), "tex")).
|
|
|
|
|
mangledFileName();
|
2019-06-07 14:47:04 +00:00
|
|
|
|
if (err->child_name != child_name)
|
2011-03-04 13:37:14 +00:00
|
|
|
|
continue;
|
2016-10-09 19:34:12 +00:00
|
|
|
|
tie(start, end) = child->d->texrow.getEntriesFromRow(errorRow);
|
|
|
|
|
if (!TexRow::isNone(start)) {
|
2019-06-07 14:47:04 +00:00
|
|
|
|
buf = this->cloned_buffer_
|
2016-10-09 19:34:12 +00:00
|
|
|
|
? child->d->cloned_buffer_->d->owner_
|
|
|
|
|
: child->d->owner_;
|
|
|
|
|
p = child->d;
|
2011-03-04 13:37:14 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-06-07 14:47:04 +00:00
|
|
|
|
errorList.push_back(ErrorItem(err->error_desc, err->error_text,
|
2016-10-09 19:34:12 +00:00
|
|
|
|
start, end, buf));
|
2007-10-21 10:50:56 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2008-04-20 03:08:11 +00:00
|
|
|
|
|
2019-06-07 14:47:04 +00:00
|
|
|
|
void Buffer::bufferErrors(TeXErrors const & terr, ErrorList & errorList) const
|
|
|
|
|
{
|
|
|
|
|
TeXErrors::Errors::const_iterator err = terr.begin();
|
|
|
|
|
TeXErrors::Errors::const_iterator end = terr.end();
|
|
|
|
|
|
|
|
|
|
d->traverseErrors(err, end, errorList);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::bufferRefs(TeXErrors const & terr, ErrorList & errorList) const
|
|
|
|
|
{
|
|
|
|
|
TeXErrors::Errors::const_iterator err = terr.begin_ref();
|
|
|
|
|
TeXErrors::Errors::const_iterator end = terr.end_ref();
|
|
|
|
|
|
|
|
|
|
d->traverseErrors(err, end, errorList);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-03-03 22:13:45 +00:00
|
|
|
|
void Buffer::updateBuffer(UpdateScope scope, UpdateType utype) const
|
2008-11-16 16:43:49 +00:00
|
|
|
|
{
|
2013-04-27 21:52:55 +00:00
|
|
|
|
LBUFERR(!text().paragraphs().empty());
|
2013-04-25 21:27:10 +00:00
|
|
|
|
|
2008-11-16 16:43:49 +00:00
|
|
|
|
// Use the master text class also for child documents
|
|
|
|
|
Buffer const * const master = masterBuffer();
|
|
|
|
|
DocumentClass const & textclass = master->params().documentClass();
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2018-09-02 04:10:01 +00:00
|
|
|
|
docstring_list old_bibfiles;
|
2018-03-27 12:29:25 +00:00
|
|
|
|
// Do this only if we are the top-level Buffer. We also need to account
|
|
|
|
|
// for the case of a previewed child with ignored parent here.
|
|
|
|
|
if (master == this && !d->ignore_parent) {
|
2013-05-14 19:50:13 +00:00
|
|
|
|
textclass.counters().reset(from_ascii("bibitem"));
|
2010-12-04 03:15:53 +00:00
|
|
|
|
reloadBibInfoCache();
|
2017-11-05 01:23:25 +00:00
|
|
|
|
// we will re-read this cache as we go through, but we need
|
|
|
|
|
// to know whether it's changed to know whether we need to
|
|
|
|
|
// update the bibinfo cache.
|
|
|
|
|
old_bibfiles = d->bibfiles_cache_;
|
|
|
|
|
d->bibfiles_cache_.clear();
|
2013-05-14 19:50:13 +00:00
|
|
|
|
}
|
2008-11-16 16:43:49 +00:00
|
|
|
|
|
|
|
|
|
// keep the buffers to be children in this set. If the call from the
|
|
|
|
|
// master comes back we can see which of them were actually seen (i.e.
|
|
|
|
|
// via an InsetInclude). The remaining ones in the set need still be updated.
|
|
|
|
|
static std::set<Buffer const *> bufToUpdate;
|
2009-03-12 20:02:12 +00:00
|
|
|
|
if (scope == UpdateMaster) {
|
2008-11-16 16:43:49 +00:00
|
|
|
|
// If this is a child document start with the master
|
|
|
|
|
if (master != this) {
|
|
|
|
|
bufToUpdate.insert(this);
|
2010-03-03 22:13:45 +00:00
|
|
|
|
master->updateBuffer(UpdateMaster, utype);
|
2015-03-15 17:20:01 +00:00
|
|
|
|
// If the master buffer has no gui associated with it, then the TocModel is
|
|
|
|
|
// not updated during the updateBuffer call and TocModel::toc_ is invalid
|
|
|
|
|
// (bug 5699). The same happens if the master buffer is open in a different
|
2014-01-27 19:58:21 +00:00
|
|
|
|
// window. This test catches both possibilities.
|
2017-12-17 01:24:26 +00:00
|
|
|
|
// See: https://marc.info/?l=lyx-devel&m=138590578911716&w=2
|
2014-01-27 19:58:21 +00:00
|
|
|
|
// There remains a problem here: If there is another child open in yet a third
|
|
|
|
|
// window, that TOC is not updated. So some more general solution is needed at
|
|
|
|
|
// some point.
|
|
|
|
|
if (master->d->gui_ != d->gui_)
|
2009-07-09 09:48:34 +00:00
|
|
|
|
structureChanged();
|
2008-11-16 16:43:49 +00:00
|
|
|
|
|
|
|
|
|
// was buf referenced from the master (i.e. not in bufToUpdate anymore)?
|
|
|
|
|
if (bufToUpdate.find(this) == bufToUpdate.end())
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// start over the counters in the master
|
|
|
|
|
textclass.counters().reset();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// update will be done below for this buffer
|
|
|
|
|
bufToUpdate.erase(this);
|
|
|
|
|
|
|
|
|
|
// update all caches
|
|
|
|
|
clearReferenceCache();
|
|
|
|
|
updateMacros();
|
|
|
|
|
|
|
|
|
|
Buffer & cbuf = const_cast<Buffer &>(*this);
|
2019-05-27 04:06:10 +00:00
|
|
|
|
// if we are reloading, then we could have a dangling TOC,
|
|
|
|
|
// in effect. so we need to go ahead and reset, even though
|
|
|
|
|
// we will do so again when we rebuild the TOC later.
|
|
|
|
|
cbuf.tocBackend().reset();
|
2008-11-16 16:43:49 +00:00
|
|
|
|
|
|
|
|
|
// do the real work
|
|
|
|
|
ParIterator parit = cbuf.par_iterator_begin();
|
2020-04-26 02:17:51 +00:00
|
|
|
|
if (scope == UpdateMaster)
|
|
|
|
|
clearIncludeList();
|
2010-03-03 22:13:45 +00:00
|
|
|
|
updateBuffer(parit, utype);
|
2008-11-16 16:43:49 +00:00
|
|
|
|
|
2017-11-05 01:23:25 +00:00
|
|
|
|
// If this document has siblings, then update the TocBackend later. The
|
|
|
|
|
// reason is to ensure that later siblings are up to date when e.g. the
|
|
|
|
|
// broken or not status of references is computed. The update is called
|
|
|
|
|
// in InsetInclude::addToToc.
|
2008-11-16 16:43:49 +00:00
|
|
|
|
if (master != this)
|
|
|
|
|
return;
|
|
|
|
|
|
2017-11-05 01:23:25 +00:00
|
|
|
|
// if the bibfiles changed, the cache of bibinfo is invalid
|
2018-09-02 04:10:01 +00:00
|
|
|
|
docstring_list new_bibfiles = d->bibfiles_cache_;
|
2018-04-25 17:40:37 +00:00
|
|
|
|
// this is a trick to determine whether the two vectors have
|
|
|
|
|
// the same elements.
|
|
|
|
|
sort(new_bibfiles.begin(), new_bibfiles.end());
|
|
|
|
|
sort(old_bibfiles.begin(), old_bibfiles.end());
|
|
|
|
|
if (old_bibfiles != new_bibfiles) {
|
|
|
|
|
LYXERR(Debug::FILES, "Reloading bibinfo cache.");
|
2017-11-05 01:23:25 +00:00
|
|
|
|
invalidateBibinfoCache();
|
|
|
|
|
reloadBibInfoCache();
|
|
|
|
|
// We relied upon the bibinfo cache when recalculating labels. But that
|
|
|
|
|
// cache was invalid, although we didn't find that out until now. So we
|
|
|
|
|
// have to do it all again.
|
|
|
|
|
// That said, the only thing we really need to do is update the citation
|
|
|
|
|
// labels. Nothing else will have changed. So we could create a new
|
|
|
|
|
// UpdateType that would signal that fact, if we needed to do so.
|
|
|
|
|
parit = cbuf.par_iterator_begin();
|
2018-04-20 02:12:44 +00:00
|
|
|
|
// we will be re-doing the counters and references and such.
|
|
|
|
|
textclass.counters().reset();
|
|
|
|
|
clearReferenceCache();
|
|
|
|
|
// we should not need to do this again?
|
|
|
|
|
// updateMacros();
|
2017-11-05 01:23:25 +00:00
|
|
|
|
updateBuffer(parit, utype);
|
2018-04-25 17:40:37 +00:00
|
|
|
|
// this will already have been done by reloadBibInfoCache();
|
|
|
|
|
// d->bibinfo_cache_valid_ = true;
|
2017-11-05 01:23:25 +00:00
|
|
|
|
}
|
2018-04-20 02:12:44 +00:00
|
|
|
|
else {
|
2018-04-25 17:40:37 +00:00
|
|
|
|
LYXERR(Debug::FILES, "Bibfiles unchanged.");
|
2018-04-20 02:12:44 +00:00
|
|
|
|
// this is also set to true on the other path, by reloadBibInfoCache.
|
2017-11-05 01:23:25 +00:00
|
|
|
|
d->bibinfo_cache_valid_ = true;
|
2018-04-20 02:12:44 +00:00
|
|
|
|
}
|
2011-05-07 23:02:53 +00:00
|
|
|
|
d->cite_labels_valid_ = true;
|
2017-05-16 10:14:25 +00:00
|
|
|
|
/// FIXME: Perf
|
2020-04-26 02:17:51 +00:00
|
|
|
|
clearIncludeList();
|
2017-05-16 10:14:25 +00:00
|
|
|
|
cbuf.tocBackend().update(true, utype);
|
2009-03-12 20:02:12 +00:00
|
|
|
|
if (scope == UpdateMaster)
|
2008-11-16 16:43:49 +00:00
|
|
|
|
cbuf.structureChanged();
|
2021-01-29 13:30:21 +00:00
|
|
|
|
|
|
|
|
|
d->need_update = false;
|
2008-11-16 16:43:49 +00:00
|
|
|
|
}
|
|
|
|
|
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
|
|
|
|
static depth_type getDepth(DocIterator const & it)
|
|
|
|
|
{
|
|
|
|
|
depth_type depth = 0;
|
|
|
|
|
for (size_t i = 0 ; i < it.depth() ; ++i)
|
|
|
|
|
if (!it[i].inset().inMathed())
|
|
|
|
|
depth += it[i].paragraph().getDepth() + 1;
|
|
|
|
|
// remove 1 since the outer inset does not count
|
2016-06-12 03:08:54 +00:00
|
|
|
|
// we should have at least one non-math inset, so
|
|
|
|
|
// depth should nevery be 0. but maybe it is worth
|
|
|
|
|
// marking this, just in case.
|
|
|
|
|
LATTEST(depth > 0);
|
|
|
|
|
// coverity[INTEGER_OVERFLOW]
|
2008-11-16 17:49:00 +00:00
|
|
|
|
return depth - 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static depth_type getItemDepth(ParIterator const & it)
|
|
|
|
|
{
|
|
|
|
|
Paragraph const & par = *it;
|
|
|
|
|
LabelType const labeltype = par.layout().labeltype;
|
|
|
|
|
|
|
|
|
|
if (labeltype != LABEL_ENUMERATE && labeltype != LABEL_ITEMIZE)
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
|
|
// this will hold the lowest depth encountered up to now.
|
|
|
|
|
depth_type min_depth = getDepth(it);
|
|
|
|
|
ParIterator prev_it = it;
|
|
|
|
|
while (true) {
|
|
|
|
|
if (prev_it.pit())
|
|
|
|
|
--prev_it.top().pit();
|
|
|
|
|
else {
|
|
|
|
|
// start of nested inset: go to outer par
|
|
|
|
|
prev_it.pop_back();
|
|
|
|
|
if (prev_it.empty()) {
|
|
|
|
|
// start of document: nothing to do
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// We search for the first paragraph with same label
|
|
|
|
|
// that is not more deeply nested.
|
|
|
|
|
Paragraph & prev_par = *prev_it;
|
|
|
|
|
depth_type const prev_depth = getDepth(prev_it);
|
|
|
|
|
if (labeltype == prev_par.layout().labeltype) {
|
|
|
|
|
if (prev_depth < min_depth)
|
|
|
|
|
return prev_par.itemdepth + 1;
|
|
|
|
|
if (prev_depth == min_depth)
|
|
|
|
|
return prev_par.itemdepth;
|
|
|
|
|
}
|
|
|
|
|
min_depth = min(min_depth, prev_depth);
|
|
|
|
|
// small optimization: if we are at depth 0, we won't
|
|
|
|
|
// find anything else
|
|
|
|
|
if (prev_depth == 0)
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static bool needEnumCounterReset(ParIterator const & it)
|
|
|
|
|
{
|
|
|
|
|
Paragraph const & par = *it;
|
2013-04-25 21:27:10 +00:00
|
|
|
|
LASSERT(par.layout().labeltype == LABEL_ENUMERATE, return false);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
depth_type const cur_depth = par.getDepth();
|
|
|
|
|
ParIterator prev_it = it;
|
|
|
|
|
while (prev_it.pit()) {
|
|
|
|
|
--prev_it.top().pit();
|
|
|
|
|
Paragraph const & prev_par = *prev_it;
|
|
|
|
|
if (prev_par.getDepth() <= cur_depth)
|
2016-10-14 18:08:12 +00:00
|
|
|
|
return prev_par.layout().name() != par.layout().name();
|
2008-11-16 17:49:00 +00:00
|
|
|
|
}
|
|
|
|
|
// start of nested inset: reset
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// set the label of a paragraph. This includes the counters.
|
2010-01-25 18:39:08 +00:00
|
|
|
|
void Buffer::Impl::setLabel(ParIterator & it, UpdateType utype) const
|
2008-11-16 17:49:00 +00:00
|
|
|
|
{
|
2010-01-25 18:39:08 +00:00
|
|
|
|
BufferParams const & bp = owner_->masterBuffer()->params();
|
2008-11-16 17:49:00 +00:00
|
|
|
|
DocumentClass const & textclass = bp.documentClass();
|
|
|
|
|
Paragraph & par = it.paragraph();
|
|
|
|
|
Layout const & layout = par.layout();
|
|
|
|
|
Counters & counters = textclass.counters();
|
|
|
|
|
|
|
|
|
|
if (par.params().startOfAppendix()) {
|
2012-07-21 18:14:12 +00:00
|
|
|
|
// We want to reset the counter corresponding to toplevel sectioning
|
|
|
|
|
Layout const & lay = textclass.getTOCLayout();
|
|
|
|
|
docstring const cnt = lay.counter;
|
|
|
|
|
if (!cnt.empty())
|
|
|
|
|
counters.reset(cnt);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
counters.appendix(true);
|
|
|
|
|
}
|
|
|
|
|
par.params().appendix(counters.appendix());
|
|
|
|
|
|
|
|
|
|
// Compute the item depth of the paragraph
|
|
|
|
|
par.itemdepth = getItemDepth(it);
|
|
|
|
|
|
2011-06-20 15:19:16 +00:00
|
|
|
|
if (layout.margintype == MARGIN_MANUAL) {
|
2008-11-16 17:49:00 +00:00
|
|
|
|
if (par.params().labelWidthString().empty())
|
2009-07-12 21:48:50 +00:00
|
|
|
|
par.params().labelWidthString(par.expandLabel(layout, bp));
|
2011-06-20 15:19:16 +00:00
|
|
|
|
} else if (layout.latextype == LATEX_BIB_ENVIRONMENT) {
|
|
|
|
|
// we do not need to do anything here, since the empty case is
|
|
|
|
|
// handled during export.
|
2008-11-16 17:49:00 +00:00
|
|
|
|
} else {
|
|
|
|
|
par.params().labelWidthString(docstring());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch(layout.labeltype) {
|
|
|
|
|
case LABEL_ITEMIZE: {
|
|
|
|
|
// At some point of time we should do something more
|
|
|
|
|
// clever here, like:
|
|
|
|
|
// par.params().labelString(
|
|
|
|
|
// bp.user_defined_bullet(par.itemdepth).getText());
|
|
|
|
|
// for now, use a simple hardcoded label
|
|
|
|
|
docstring itemlabel;
|
|
|
|
|
switch (par.itemdepth) {
|
|
|
|
|
case 0:
|
2019-04-03 10:03:24 +00:00
|
|
|
|
// • U+2022 BULLET
|
2008-11-16 17:49:00 +00:00
|
|
|
|
itemlabel = char_type(0x2022);
|
|
|
|
|
break;
|
|
|
|
|
case 1:
|
2019-04-03 10:03:24 +00:00
|
|
|
|
// – U+2013 EN DASH
|
2008-11-16 17:49:00 +00:00
|
|
|
|
itemlabel = char_type(0x2013);
|
|
|
|
|
break;
|
|
|
|
|
case 2:
|
2019-04-03 10:03:24 +00:00
|
|
|
|
// ∗ U+2217 ASTERISK OPERATOR
|
2008-11-16 17:49:00 +00:00
|
|
|
|
itemlabel = char_type(0x2217);
|
|
|
|
|
break;
|
|
|
|
|
case 3:
|
2019-04-03 10:03:24 +00:00
|
|
|
|
// · U+00B7 MIDDLE DOT
|
|
|
|
|
itemlabel = char_type(0x00b7);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
par.params().labelString(itemlabel);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case LABEL_ENUMERATE: {
|
2009-06-02 16:02:29 +00:00
|
|
|
|
docstring enumcounter = layout.counter.empty() ? from_ascii("enum") : layout.counter;
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
|
|
|
|
switch (par.itemdepth) {
|
|
|
|
|
case 2:
|
|
|
|
|
enumcounter += 'i';
|
2017-08-12 07:24:01 +00:00
|
|
|
|
// fall through
|
2008-11-16 17:49:00 +00:00
|
|
|
|
case 1:
|
|
|
|
|
enumcounter += 'i';
|
2017-08-12 07:24:01 +00:00
|
|
|
|
// fall through
|
2008-11-16 17:49:00 +00:00
|
|
|
|
case 0:
|
|
|
|
|
enumcounter += 'i';
|
|
|
|
|
break;
|
|
|
|
|
case 3:
|
|
|
|
|
enumcounter += "iv";
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
// not a valid enumdepth...
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2017-10-27 15:50:51 +00:00
|
|
|
|
if (needEnumCounterReset(it)) {
|
2020-11-02 22:36:58 +00:00
|
|
|
|
// Increase the parent counter?
|
|
|
|
|
if (layout.stepparentcounter)
|
2020-11-02 22:03:42 +00:00
|
|
|
|
counters.stepParent(enumcounter, utype);
|
2017-10-27 15:50:51 +00:00
|
|
|
|
// Maybe we have to reset the enumeration counter.
|
|
|
|
|
if (!layout.resumecounter)
|
|
|
|
|
counters.reset(enumcounter);
|
|
|
|
|
}
|
2010-01-20 19:42:12 +00:00
|
|
|
|
counters.step(enumcounter, utype);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
2009-07-12 20:09:53 +00:00
|
|
|
|
string const & lang = par.getParLanguage(bp)->code();
|
|
|
|
|
par.params().labelString(counters.theCounter(enumcounter, lang));
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case LABEL_SENSITIVE: {
|
|
|
|
|
string const & type = counters.current_float();
|
|
|
|
|
docstring full_label;
|
|
|
|
|
if (type.empty())
|
2010-01-25 18:39:08 +00:00
|
|
|
|
full_label = owner_->B_("Senseless!!! ");
|
2008-11-16 17:49:00 +00:00
|
|
|
|
else {
|
2010-01-25 18:39:08 +00:00
|
|
|
|
docstring name = owner_->B_(textclass.floats().getType(type).name());
|
2008-11-16 17:49:00 +00:00
|
|
|
|
if (counters.hasCounter(from_utf8(type))) {
|
2009-07-12 20:09:53 +00:00
|
|
|
|
string const & lang = par.getParLanguage(bp)->code();
|
2010-01-20 19:42:12 +00:00
|
|
|
|
counters.step(from_utf8(type), utype);
|
2011-12-03 22:15:11 +00:00
|
|
|
|
full_label = bformat(from_ascii("%1$s %2$s:"),
|
|
|
|
|
name,
|
2009-07-12 20:09:53 +00:00
|
|
|
|
counters.theCounter(from_utf8(type), lang));
|
2008-11-16 17:49:00 +00:00
|
|
|
|
} else
|
2011-12-03 22:15:11 +00:00
|
|
|
|
full_label = bformat(from_ascii("%1$s #:"), name);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
}
|
2011-12-03 22:15:11 +00:00
|
|
|
|
par.params().labelString(full_label);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case LABEL_NO_LABEL:
|
|
|
|
|
par.params().labelString(docstring());
|
|
|
|
|
break;
|
|
|
|
|
|
2013-02-09 16:13:01 +00:00
|
|
|
|
case LABEL_ABOVE:
|
|
|
|
|
case LABEL_CENTERED:
|
2013-02-09 16:09:56 +00:00
|
|
|
|
case LABEL_STATIC: {
|
|
|
|
|
docstring const & lcounter = layout.counter;
|
|
|
|
|
if (!lcounter.empty()) {
|
|
|
|
|
if (layout.toclevel <= bp.secnumdepth
|
|
|
|
|
&& (layout.latextype != LATEX_ENVIRONMENT
|
|
|
|
|
|| it.text()->isFirstInSequence(it.pit()))) {
|
|
|
|
|
if (counters.hasCounter(lcounter))
|
|
|
|
|
counters.step(lcounter, utype);
|
|
|
|
|
par.params().labelString(par.expandLabel(layout, bp));
|
|
|
|
|
} else
|
|
|
|
|
par.params().labelString(docstring());
|
|
|
|
|
} else
|
|
|
|
|
par.params().labelString(par.expandLabel(layout, bp));
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case LABEL_MANUAL:
|
2008-11-16 17:49:00 +00:00
|
|
|
|
case LABEL_BIBLIO:
|
2009-07-12 21:48:50 +00:00
|
|
|
|
par.params().labelString(par.expandLabel(layout, bp));
|
2008-11-16 17:49:00 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-03-06 12:23:01 +00:00
|
|
|
|
void Buffer::updateBuffer(ParIterator & parit, UpdateType utype, bool const deleted) const
|
2008-11-16 17:49:00 +00:00
|
|
|
|
{
|
2020-04-26 02:17:51 +00:00
|
|
|
|
pushIncludedBuffer(this);
|
2013-04-25 21:27:10 +00:00
|
|
|
|
// LASSERT: Is it safe to continue here, or should we just return?
|
2008-11-16 17:49:00 +00:00
|
|
|
|
LASSERT(parit.pit() == 0, /**/);
|
|
|
|
|
|
2010-10-21 17:53:21 +00:00
|
|
|
|
// Set the position of the text in the buffer to be able
|
|
|
|
|
// to resolve macros in it.
|
2008-11-16 17:49:00 +00:00
|
|
|
|
parit.text()->setMacrocontextPosition(parit);
|
|
|
|
|
|
|
|
|
|
depth_type maxdepth = 0;
|
|
|
|
|
pit_type const lastpit = parit.lastpit();
|
2020-01-12 19:09:41 +00:00
|
|
|
|
bool changed = false;
|
2008-11-16 17:49:00 +00:00
|
|
|
|
for ( ; parit.pit() <= lastpit ; ++parit.pit()) {
|
|
|
|
|
// reduce depth if necessary
|
2012-06-04 16:02:59 +00:00
|
|
|
|
if (parit->params().depth() > maxdepth) {
|
|
|
|
|
/** FIXME: this function is const, but
|
|
|
|
|
* nevertheless it modifies the buffer. To be
|
|
|
|
|
* cleaner, one should modify the buffer in
|
|
|
|
|
* another function, which is actually
|
|
|
|
|
* non-const. This would however be costly in
|
|
|
|
|
* terms of code duplication.
|
|
|
|
|
*/
|
Move some Cursor methods to CursorData
Basically, everything that does not depend on a BufferView should move
there. Some methods that do not seem to need a BufferView, like
selHandle or IdxFirst or push actually depend on it and could not be
moved.
This allows to simplify a few uses of recordUndo helpers.
- Move some methods to DocIterator: nextMath, prevMath, getPossibleLabel,
getEncoding;
- Move some methods to CursorData: setCursor, setCursorSelectionTo,
(setCursorTo|normal|reset)Anchor, (set|clear)Selection,
sel(|ection)(Begin|End), selectionAsString, info, currentState,
(mark|clear|check)NewWordPosition, fixIfBroken, sanitize, all undo
related methods, reset, isInside, leaveInset, current mode;
- kill some unused methods: macromode, replaceWord, setScreenPos, touch,
markInsert, markErase;
- Move code around to group things, and add a few comments (a lot remains to be done).
This changes lead to some related changes in other classes: removal,
change of parameter.
No intended change.
2017-07-23 16:13:33 +00:00
|
|
|
|
CursorData(parit).recordUndo();
|
2012-06-04 16:02:59 +00:00
|
|
|
|
parit->params().depth(maxdepth);
|
|
|
|
|
}
|
2008-11-16 17:49:00 +00:00
|
|
|
|
maxdepth = parit->getMaxDepthAfter();
|
|
|
|
|
|
2010-01-20 19:42:12 +00:00
|
|
|
|
if (utype == OutputUpdate) {
|
2010-01-20 19:03:17 +00:00
|
|
|
|
// track the active counters
|
|
|
|
|
// we have to do this for the master buffer, since the local
|
|
|
|
|
// buffer isn't tracking anything.
|
|
|
|
|
masterBuffer()->params().documentClass().counters().
|
|
|
|
|
setActiveLayout(parit->layout());
|
|
|
|
|
}
|
2011-12-03 22:15:11 +00:00
|
|
|
|
|
2008-11-16 17:49:00 +00:00
|
|
|
|
// set the counter for this paragraph
|
2010-01-25 18:39:08 +00:00
|
|
|
|
d->setLabel(parit, utype);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
2010-01-20 19:03:17 +00:00
|
|
|
|
// now the insets
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & insit : parit->insetList()) {
|
|
|
|
|
parit.pos() = insit.pos;
|
2020-03-06 12:23:01 +00:00
|
|
|
|
insit.inset->updateBuffer(parit, utype, deleted || parit->isDeleted(insit.pos));
|
2020-01-12 19:09:41 +00:00
|
|
|
|
changed |= insit.inset->isChanged();
|
2008-11-16 17:49:00 +00:00
|
|
|
|
}
|
2020-01-12 19:09:41 +00:00
|
|
|
|
|
|
|
|
|
// are there changes in this paragraph?
|
|
|
|
|
changed |= parit->isChanged();
|
2008-11-16 17:49:00 +00:00
|
|
|
|
}
|
2020-01-12 19:09:41 +00:00
|
|
|
|
|
2020-01-13 14:08:50 +00:00
|
|
|
|
// set change indicator for the inset (or the cell that the iterator
|
|
|
|
|
// points to, if applicable).
|
|
|
|
|
parit.text()->inset().isChanged(changed);
|
2020-04-26 02:17:51 +00:00
|
|
|
|
popIncludedBuffer();
|
2008-11-16 17:49:00 +00:00
|
|
|
|
}
|
|
|
|
|
|
2009-03-28 19:02:49 +00:00
|
|
|
|
|
2021-01-28 09:10:18 +00:00
|
|
|
|
void Buffer::forceUpdate() const
|
|
|
|
|
{
|
|
|
|
|
d->need_update = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::needUpdate() const
|
|
|
|
|
{
|
|
|
|
|
return d->need_update;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-04-04 16:40:47 +00:00
|
|
|
|
int Buffer::spellCheck(DocIterator & from, DocIterator & to,
|
|
|
|
|
WordLangTuple & word_lang, docstring_list & suggestions) const
|
|
|
|
|
{
|
|
|
|
|
int progress = 0;
|
2009-06-22 16:38:11 +00:00
|
|
|
|
WordLangTuple wl;
|
2009-04-04 16:40:47 +00:00
|
|
|
|
suggestions.clear();
|
2009-06-22 17:30:57 +00:00
|
|
|
|
word_lang = WordLangTuple();
|
2011-10-06 10:28:34 +00:00
|
|
|
|
bool const to_end = to.empty();
|
|
|
|
|
DocIterator const end = to_end ? doc_iterator_end(this) : to;
|
2009-06-22 16:38:11 +00:00
|
|
|
|
// OK, we start from here.
|
2009-06-22 17:30:57 +00:00
|
|
|
|
for (; from != end; from.forwardPos()) {
|
2015-05-08 09:13:32 +00:00
|
|
|
|
// This skips all insets with spell check disabled.
|
|
|
|
|
while (!from.allowSpellCheck()) {
|
2009-12-12 02:56:06 +00:00
|
|
|
|
from.pop_back();
|
|
|
|
|
from.pos()++;
|
|
|
|
|
}
|
|
|
|
|
// If from is at the end of the document (which is possible
|
2015-05-08 09:13:32 +00:00
|
|
|
|
// when "from" was changed above) LyX will crash later otherwise.
|
2011-10-07 07:59:50 +00:00
|
|
|
|
if (from.atEnd() || (!to_end && from >= end))
|
2009-12-12 02:56:06 +00:00
|
|
|
|
break;
|
2009-06-22 17:30:57 +00:00
|
|
|
|
to = from;
|
2010-09-14 05:24:04 +00:00
|
|
|
|
from.paragraph().spellCheck();
|
2010-08-05 20:10:40 +00:00
|
|
|
|
SpellChecker::Result res = from.paragraph().spellCheck(from.pos(), to.pos(), wl, suggestions);
|
|
|
|
|
if (SpellChecker::misspelled(res)) {
|
2009-07-04 18:00:01 +00:00
|
|
|
|
word_lang = wl;
|
2009-06-22 17:30:57 +00:00
|
|
|
|
break;
|
2009-07-04 18:00:01 +00:00
|
|
|
|
}
|
2009-12-12 02:56:06 +00:00
|
|
|
|
// Do not increase progress when from == to, otherwise the word
|
|
|
|
|
// count will be wrong.
|
|
|
|
|
if (from != to) {
|
|
|
|
|
from = to;
|
|
|
|
|
++progress;
|
|
|
|
|
}
|
2009-04-04 16:40:47 +00:00
|
|
|
|
}
|
|
|
|
|
return progress;
|
|
|
|
|
}
|
|
|
|
|
|
2010-01-08 02:15:56 +00:00
|
|
|
|
|
2012-01-12 07:03:24 +00:00
|
|
|
|
void Buffer::Impl::updateStatistics(DocIterator & from, DocIterator & to, bool skipNoOutput)
|
|
|
|
|
{
|
|
|
|
|
bool inword = false;
|
|
|
|
|
word_count_ = 0;
|
|
|
|
|
char_count_ = 0;
|
|
|
|
|
blank_count_ = 0;
|
2013-11-12 19:52:35 +00:00
|
|
|
|
|
2012-01-12 07:03:24 +00:00
|
|
|
|
for (DocIterator dit = from ; dit != to && !dit.atEnd(); ) {
|
|
|
|
|
if (!dit.inTexted()) {
|
|
|
|
|
dit.forwardPos();
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2013-11-12 19:52:35 +00:00
|
|
|
|
|
2012-01-12 07:03:24 +00:00
|
|
|
|
Paragraph const & par = dit.paragraph();
|
|
|
|
|
pos_type const pos = dit.pos();
|
2013-11-12 19:52:35 +00:00
|
|
|
|
|
2012-01-12 07:03:24 +00:00
|
|
|
|
// Copied and adapted from isWordSeparator() in Paragraph
|
|
|
|
|
if (pos == dit.lastpos()) {
|
|
|
|
|
inword = false;
|
|
|
|
|
} else {
|
|
|
|
|
Inset const * ins = par.getInset(pos);
|
|
|
|
|
if (ins && skipNoOutput && !ins->producesOutput()) {
|
|
|
|
|
// skip this inset
|
|
|
|
|
++dit.top().pos();
|
|
|
|
|
// stop if end of range was skipped
|
|
|
|
|
if (!to.atEnd() && dit >= to)
|
|
|
|
|
break;
|
|
|
|
|
continue;
|
|
|
|
|
} else if (!par.isDeleted(pos)) {
|
2013-11-12 19:52:35 +00:00
|
|
|
|
if (par.isWordSeparator(pos))
|
2012-01-12 07:03:24 +00:00
|
|
|
|
inword = false;
|
|
|
|
|
else if (!inword) {
|
|
|
|
|
++word_count_;
|
|
|
|
|
inword = true;
|
|
|
|
|
}
|
2020-08-16 15:07:04 +00:00
|
|
|
|
if (ins && ins->isLetter()) {
|
|
|
|
|
odocstringstream os;
|
|
|
|
|
ins->toString(os);
|
|
|
|
|
char_count_ += os.str().length();
|
|
|
|
|
}
|
2012-01-12 07:03:24 +00:00
|
|
|
|
else if (ins && ins->isSpace())
|
|
|
|
|
++blank_count_;
|
2021-01-10 06:43:34 +00:00
|
|
|
|
else if (ins) {
|
|
|
|
|
pair<int, int> words = ins->isWords();
|
|
|
|
|
char_count_ += words.first;
|
|
|
|
|
word_count_ += words.second;
|
|
|
|
|
inword = false;
|
|
|
|
|
}
|
2012-01-12 07:03:24 +00:00
|
|
|
|
else {
|
|
|
|
|
char_type const c = par.getChar(pos);
|
|
|
|
|
if (isPrintableNonspace(c))
|
|
|
|
|
++char_count_;
|
|
|
|
|
else if (isSpace(c))
|
|
|
|
|
++blank_count_;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
dit.forwardPos();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::updateStatistics(DocIterator & from, DocIterator & to, bool skipNoOutput) const
|
|
|
|
|
{
|
|
|
|
|
d->updateStatistics(from, to, skipNoOutput);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
int Buffer::wordCount() const
|
|
|
|
|
{
|
|
|
|
|
return d->wordCount();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
int Buffer::charCount(bool with_blanks) const
|
|
|
|
|
{
|
|
|
|
|
return d->charCount(with_blanks);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2020-01-12 19:09:41 +00:00
|
|
|
|
bool Buffer::areChangesPresent() const
|
|
|
|
|
{
|
|
|
|
|
return inset().isChanged();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2013-10-04 13:01:42 +00:00
|
|
|
|
Buffer::ReadStatus Buffer::reload()
|
2010-01-08 02:15:56 +00:00
|
|
|
|
{
|
|
|
|
|
setBusy(true);
|
2017-12-17 01:24:26 +00:00
|
|
|
|
// c.f. bug https://www.lyx.org/trac/ticket/6587
|
2010-03-31 21:29:32 +00:00
|
|
|
|
removeAutosaveFile();
|
2010-01-08 02:15:56 +00:00
|
|
|
|
// e.g., read-only status could have changed due to version control
|
|
|
|
|
d->filename.refresh();
|
2010-04-21 01:19:09 +00:00
|
|
|
|
docstring const disp_fn = makeDisplayPath(d->filename.absFileName());
|
2010-01-08 02:15:56 +00:00
|
|
|
|
|
2012-01-13 03:31:01 +00:00
|
|
|
|
// clear parent. this will get reset if need be.
|
2020-02-19 00:27:12 +00:00
|
|
|
|
d->setParent(nullptr);
|
2010-11-05 21:21:01 +00:00
|
|
|
|
ReadStatus const status = loadLyXFile();
|
2010-10-29 16:26:32 +00:00
|
|
|
|
if (status == ReadSuccess) {
|
2010-03-03 22:13:45 +00:00
|
|
|
|
updateBuffer();
|
2010-01-08 02:15:56 +00:00
|
|
|
|
changed(true);
|
2010-04-28 04:27:10 +00:00
|
|
|
|
updateTitles();
|
2010-01-08 18:48:36 +00:00
|
|
|
|
markClean();
|
2010-01-08 02:15:56 +00:00
|
|
|
|
message(bformat(_("Document %1$s reloaded."), disp_fn));
|
2013-10-04 13:01:42 +00:00
|
|
|
|
d->undo_.clear();
|
2010-01-08 02:15:56 +00:00
|
|
|
|
} else {
|
|
|
|
|
message(bformat(_("Could not reload document %1$s."), disp_fn));
|
2011-12-03 22:15:11 +00:00
|
|
|
|
}
|
2010-01-08 02:15:56 +00:00
|
|
|
|
setBusy(false);
|
2010-10-29 20:00:51 +00:00
|
|
|
|
removePreviews();
|
|
|
|
|
updatePreviews();
|
2010-01-08 18:48:36 +00:00
|
|
|
|
errors("Parse");
|
2010-10-29 16:26:32 +00:00
|
|
|
|
return status;
|
2010-01-08 02:15:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-11-07 21:26:02 +00:00
|
|
|
|
bool Buffer::saveAs(FileName const & fn)
|
|
|
|
|
{
|
|
|
|
|
FileName const old_name = fileName();
|
|
|
|
|
FileName const old_auto = getAutosaveFileName();
|
|
|
|
|
bool const old_unnamed = isUnnamed();
|
2015-05-03 21:55:19 +00:00
|
|
|
|
bool success = true;
|
|
|
|
|
d->old_position = filePath();
|
2010-11-07 21:26:02 +00:00
|
|
|
|
|
2010-11-18 17:50:33 +00:00
|
|
|
|
setFileName(fn);
|
2010-11-07 21:26:02 +00:00
|
|
|
|
markDirty();
|
|
|
|
|
setUnnamed(false);
|
|
|
|
|
|
|
|
|
|
if (save()) {
|
|
|
|
|
// bring the autosave file with us, just in case.
|
|
|
|
|
moveAutosaveFile(old_auto);
|
|
|
|
|
// validate version control data and
|
|
|
|
|
// correct buffer title
|
|
|
|
|
lyxvc().file_found_hook(fileName());
|
|
|
|
|
updateTitles();
|
|
|
|
|
// the file has now been saved to the new location.
|
|
|
|
|
// we need to check that the locations of child buffers
|
|
|
|
|
// are still valid.
|
|
|
|
|
checkChildBuffers();
|
2012-05-19 08:16:28 +00:00
|
|
|
|
checkMasterBuffer();
|
2010-11-07 21:26:02 +00:00
|
|
|
|
} else {
|
|
|
|
|
// save failed
|
|
|
|
|
// reset the old filename and unnamed state
|
2010-11-18 17:50:33 +00:00
|
|
|
|
setFileName(old_name);
|
2010-11-07 21:26:02 +00:00
|
|
|
|
setUnnamed(old_unnamed);
|
2015-05-03 21:55:19 +00:00
|
|
|
|
success = false;
|
2010-11-07 21:26:02 +00:00
|
|
|
|
}
|
2015-05-03 21:55:19 +00:00
|
|
|
|
|
|
|
|
|
d->old_position.clear();
|
|
|
|
|
return success;
|
2010-11-07 21:26:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2010-03-09 03:25:47 +00:00
|
|
|
|
void Buffer::checkChildBuffers()
|
|
|
|
|
{
|
2017-11-05 02:15:01 +00:00
|
|
|
|
for (auto const & bit : d->children_positions) {
|
|
|
|
|
DocIterator dit = bit.second;
|
|
|
|
|
Buffer * cbuf = const_cast<Buffer *>(bit.first);
|
2010-03-09 03:25:47 +00:00
|
|
|
|
if (!cbuf || !theBufferList().isLoaded(cbuf))
|
|
|
|
|
continue;
|
|
|
|
|
Inset * inset = dit.nextInset();
|
|
|
|
|
LASSERT(inset && inset->lyxCode() == INCLUDE_CODE, continue);
|
|
|
|
|
InsetInclude * inset_inc = static_cast<InsetInclude *>(inset);
|
|
|
|
|
docstring const & incfile = inset_inc->getParam("filename");
|
|
|
|
|
string oldloc = cbuf->absFileName();
|
|
|
|
|
string newloc = makeAbsPath(to_utf8(incfile),
|
2010-04-21 01:19:09 +00:00
|
|
|
|
onlyPath(absFileName())).absFileName();
|
2010-03-09 03:25:47 +00:00
|
|
|
|
if (oldloc == newloc)
|
|
|
|
|
continue;
|
|
|
|
|
// the location of the child file is incorrect.
|
2020-02-19 00:27:12 +00:00
|
|
|
|
cbuf->setParent(nullptr);
|
|
|
|
|
inset_inc->setChildBuffer(nullptr);
|
2010-03-09 03:25:47 +00:00
|
|
|
|
}
|
|
|
|
|
// invalidate cache of children
|
|
|
|
|
d->children_positions.clear();
|
|
|
|
|
d->position_to_children.clear();
|
|
|
|
|
}
|
|
|
|
|
|
2012-05-19 08:16:28 +00:00
|
|
|
|
|
|
|
|
|
// If a child has been saved under a different name/path, it might have been
|
|
|
|
|
// orphaned. Therefore the master needs to be reset (bug 8161).
|
|
|
|
|
void Buffer::checkMasterBuffer()
|
|
|
|
|
{
|
|
|
|
|
Buffer const * const master = masterBuffer();
|
|
|
|
|
if (master == this)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
// necessary to re-register the child (bug 5873)
|
|
|
|
|
// FIXME: clean up updateMacros (here, only
|
|
|
|
|
// child registering is needed).
|
|
|
|
|
master->updateMacros();
|
|
|
|
|
// (re)set master as master buffer, but only
|
|
|
|
|
// if we are a real child
|
|
|
|
|
if (master->isChild(this))
|
|
|
|
|
setParent(master);
|
|
|
|
|
else
|
2020-02-19 00:27:12 +00:00
|
|
|
|
setParent(nullptr);
|
2012-05-19 08:16:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-05-03 21:55:19 +00:00
|
|
|
|
|
2015-05-05 20:29:22 +00:00
|
|
|
|
string Buffer::includedFilePath(string const & name, string const & ext) const
|
2015-05-03 21:55:19 +00:00
|
|
|
|
{
|
2016-01-10 18:46:01 +00:00
|
|
|
|
if (d->old_position.empty() ||
|
|
|
|
|
equivalent(FileName(d->old_position), FileName(filePath())))
|
|
|
|
|
return name;
|
|
|
|
|
|
2015-05-05 20:29:22 +00:00
|
|
|
|
bool isabsolute = FileName::isAbsolute(name);
|
2016-01-10 18:46:01 +00:00
|
|
|
|
// both old_position and filePath() end with a path separator
|
|
|
|
|
string absname = isabsolute ? name : d->old_position + name;
|
|
|
|
|
|
|
|
|
|
// if old_position is set to origin, we need to do the equivalent of
|
|
|
|
|
// getReferencedFileName() (see readDocument())
|
|
|
|
|
if (!isabsolute && d->old_position == params().origin) {
|
|
|
|
|
FileName const test(addExtension(filePath() + name, ext));
|
|
|
|
|
if (test.exists())
|
|
|
|
|
absname = filePath() + name;
|
|
|
|
|
}
|
2015-05-05 20:29:22 +00:00
|
|
|
|
|
2016-01-10 18:46:01 +00:00
|
|
|
|
if (!FileName(addExtension(absname, ext)).exists())
|
2015-05-03 21:55:19 +00:00
|
|
|
|
return name;
|
|
|
|
|
|
2015-05-05 20:29:22 +00:00
|
|
|
|
if (isabsolute)
|
2015-05-03 21:55:19 +00:00
|
|
|
|
return to_utf8(makeRelPath(from_utf8(name), from_utf8(filePath())));
|
|
|
|
|
|
2015-05-05 20:29:22 +00:00
|
|
|
|
return to_utf8(makeRelPath(from_utf8(FileName(absname).realPath()),
|
|
|
|
|
from_utf8(filePath())));
|
2015-05-03 21:55:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-01-30 23:14:36 +00:00
|
|
|
|
|
2017-03-05 19:12:07 +00:00
|
|
|
|
void Buffer::Impl::refreshFileMonitor()
|
|
|
|
|
{
|
2017-05-12 18:59:24 +00:00
|
|
|
|
if (file_monitor_ && file_monitor_->filename() == filename.absFileName()) {
|
|
|
|
|
file_monitor_->refresh();
|
|
|
|
|
return;
|
|
|
|
|
}
|
2017-03-05 19:12:07 +00:00
|
|
|
|
|
|
|
|
|
// The previous file monitor is invalid
|
|
|
|
|
// This also destroys the previous file monitor and all its connections
|
|
|
|
|
file_monitor_ = FileSystemWatcher::monitor(filename);
|
|
|
|
|
// file_monitor_ will be destroyed with *this, so it is not going to call a
|
|
|
|
|
// destroyed object method.
|
2017-06-05 20:04:07 +00:00
|
|
|
|
file_monitor_->connect([this](bool exists) {
|
|
|
|
|
fileExternallyModified(exists);
|
|
|
|
|
});
|
2017-03-05 19:12:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2017-06-10 19:51:44 +00:00
|
|
|
|
void Buffer::Impl::fileExternallyModified(bool const exists)
|
2017-03-05 19:12:07 +00:00
|
|
|
|
{
|
2017-06-10 21:45:57 +00:00
|
|
|
|
// ignore notifications after our own saving operations
|
2017-06-05 20:04:07 +00:00
|
|
|
|
if (checksum_ == filename.checksum()) {
|
|
|
|
|
LYXERR(Debug::FILES, "External modification but "
|
|
|
|
|
"checksum unchanged: " << filename);
|
|
|
|
|
return;
|
2017-05-12 23:00:30 +00:00
|
|
|
|
}
|
2017-06-05 20:04:07 +00:00
|
|
|
|
// If the file has been deleted, only mark the file as dirty since it is
|
|
|
|
|
// pointless to prompt for reloading. If later a file is moved into this
|
|
|
|
|
// location, then the externally modified warning will appear then.
|
|
|
|
|
if (exists)
|
|
|
|
|
externally_modified_ = true;
|
2017-06-10 19:51:44 +00:00
|
|
|
|
// Update external modification notification.
|
|
|
|
|
// Dirty buffers must be visible at all times.
|
|
|
|
|
if (wa_ && wa_->unhide(owner_))
|
2017-03-05 19:12:07 +00:00
|
|
|
|
wa_->updateTitles();
|
2017-06-10 19:51:44 +00:00
|
|
|
|
else
|
|
|
|
|
// Unable to unhide the buffer (e.g. no GUI or not current View)
|
|
|
|
|
lyx_clean = true;
|
2017-03-05 19:12:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::notifiesExternalModification() const
|
|
|
|
|
{
|
2017-06-05 20:04:07 +00:00
|
|
|
|
return d->externally_modified_;
|
2017-03-05 19:12:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::clearExternalModification() const
|
|
|
|
|
{
|
2017-06-05 20:04:07 +00:00
|
|
|
|
d->externally_modified_ = false;
|
|
|
|
|
if (d->wa_)
|
|
|
|
|
d->wa_->updateTitles();
|
2017-03-05 19:12:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-01-30 23:14:36 +00:00
|
|
|
|
|
2020-04-25 21:34:27 +00:00
|
|
|
|
void Buffer::pushIncludedBuffer(Buffer const * buf) const
|
|
|
|
|
{
|
|
|
|
|
masterBuffer()->d->include_list_.push_back(buf);
|
|
|
|
|
if (lyxerr.debugging(Debug::FILES)) {
|
|
|
|
|
LYXERR0("Pushed. Stack now:");
|
|
|
|
|
if (masterBuffer()->d->include_list_.empty())
|
|
|
|
|
LYXERR0("EMPTY!");
|
|
|
|
|
else
|
|
|
|
|
for (auto const & b : masterBuffer()->d->include_list_)
|
|
|
|
|
LYXERR0(b->fileName());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::popIncludedBuffer() const
|
|
|
|
|
{
|
|
|
|
|
masterBuffer()->d->include_list_.pop_back();
|
|
|
|
|
if (lyxerr.debugging(Debug::FILES)) {
|
|
|
|
|
LYXERR0("Popped. Stack now:");
|
|
|
|
|
if (masterBuffer()->d->include_list_.empty())
|
|
|
|
|
LYXERR0("EMPTY!");
|
|
|
|
|
else
|
|
|
|
|
for (auto const & b : masterBuffer()->d->include_list_)
|
|
|
|
|
LYXERR0(b->fileName());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::isBufferIncluded(Buffer const * buf) const
|
|
|
|
|
{
|
|
|
|
|
if (!buf)
|
|
|
|
|
return false;
|
|
|
|
|
if (lyxerr.debugging(Debug::FILES)) {
|
|
|
|
|
LYXERR0("Checking for " << buf->fileName() << ". Stack now:");
|
|
|
|
|
if (masterBuffer()->d->include_list_.empty())
|
|
|
|
|
LYXERR0("EMPTY!");
|
|
|
|
|
else
|
|
|
|
|
for (auto const & b : masterBuffer()->d->include_list_)
|
|
|
|
|
LYXERR0(b->fileName());
|
|
|
|
|
}
|
|
|
|
|
list<Buffer const *> const & blist = masterBuffer()->d->include_list_;
|
|
|
|
|
return find(blist.begin(), blist.end(), buf) != blist.end();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::clearIncludeList() const
|
|
|
|
|
{
|
|
|
|
|
LYXERR(Debug::FILES, "Clearing include list for " << fileName());
|
|
|
|
|
d->include_list_.clear();
|
|
|
|
|
}
|
|
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
|
} // namespace lyx
|