2003-03-29 09:48:03 +00:00
|
|
|
/**
|
2007-04-26 04:41:58 +00:00
|
|
|
* \file Buffer.cpp
|
2003-03-29 09:48:03 +00:00
|
|
|
* This file is part of LyX, the document processor.
|
|
|
|
* Licence details can be found in the file COPYING.
|
2002-03-21 16:55:34 +00:00
|
|
|
*
|
2008-11-14 15:58:50 +00:00
|
|
|
* \author Lars Gullik Bjønnes
|
2007-11-01 11:13:07 +00:00
|
|
|
* \author Stefan Schimanski
|
1999-09-27 18:44:28 +00:00
|
|
|
*
|
2003-08-23 00:17:00 +00:00
|
|
|
* Full author contact details are available in file CREDITS.
|
1999-09-27 18:44:28 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
#include <config.h>
|
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Buffer.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Author.h"
|
2008-03-07 03:53:21 +00:00
|
|
|
#include "LayoutFile.h"
|
2007-08-20 17:04:36 +00:00
|
|
|
#include "BiblioInfo.h"
|
2005-03-27 13:31:04 +00:00
|
|
|
#include "BranchList.h"
|
2003-06-24 20:42:15 +00:00
|
|
|
#include "buffer_funcs.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "BufferList.h"
|
|
|
|
#include "BufferParams.h"
|
2003-09-09 17:00:19 +00:00
|
|
|
#include "Bullet.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
#include "Chktex.h"
|
2007-10-20 10:51:13 +00:00
|
|
|
#include "Converter.h"
|
|
|
|
#include "Counters.h"
|
2009-04-06 12:12:06 +00:00
|
|
|
#include "DispatchResult.h"
|
2007-08-21 07:33:46 +00:00
|
|
|
#include "DocIterator.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Encoding.h"
|
|
|
|
#include "ErrorList.h"
|
|
|
|
#include "Exporter.h"
|
|
|
|
#include "Format.h"
|
|
|
|
#include "FuncRequest.h"
|
2009-04-06 12:12:06 +00:00
|
|
|
#include "FuncStatus.h"
|
2009-04-16 07:29:01 +00:00
|
|
|
#include "IndicesList.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "InsetIterator.h"
|
2007-10-18 15:29:51 +00:00
|
|
|
#include "InsetList.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Language.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
#include "LaTeXFeatures.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
#include "LaTeX.h"
|
2007-09-29 20:02:32 +00:00
|
|
|
#include "Layout.h"
|
2007-04-26 11:30:54 +00:00
|
|
|
#include "Lexer.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
#include "LyXAction.h"
|
2007-05-16 10:39:41 +00:00
|
|
|
#include "LyX.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "LyXRC.h"
|
|
|
|
#include "LyXVC.h"
|
2003-11-05 12:06:20 +00:00
|
|
|
#include "output_docbook.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
#include "output.h"
|
2003-11-05 12:06:20 +00:00
|
|
|
#include "output_latex.h"
|
2009-06-05 17:44:35 +00:00
|
|
|
#include "output_xhtml.h"
|
2007-10-20 10:51:13 +00:00
|
|
|
#include "output_plaintext.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
#include "Paragraph.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
#include "ParagraphParameters.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "ParIterator.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
#include "PDFOptions.h"
|
2009-04-04 16:40:47 +00:00
|
|
|
#include "SpellChecker.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
#include "sgml.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "TexRow.h"
|
2007-08-12 14:54:54 +00:00
|
|
|
#include "TexStream.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
#include "Text.h"
|
2008-02-28 03:51:10 +00:00
|
|
|
#include "TextClass.h"
|
2006-11-11 00:35:14 +00:00
|
|
|
#include "TocBackend.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Undo.h"
|
2007-10-21 10:50:56 +00:00
|
|
|
#include "VCBackend.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
#include "version.h"
|
2009-04-04 16:40:47 +00:00
|
|
|
#include "WordLangTuple.h"
|
2008-02-25 01:55:50 +00:00
|
|
|
#include "WordList.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
|
2007-04-25 01:24:38 +00:00
|
|
|
#include "insets/InsetBibitem.h"
|
|
|
|
#include "insets/InsetBibtex.h"
|
2009-07-09 09:48:34 +00:00
|
|
|
#include "insets/InsetBranch.h"
|
2007-04-25 01:24:38 +00:00
|
|
|
#include "insets/InsetInclude.h"
|
2010-10-19 15:36:11 +00:00
|
|
|
#include "insets/InsetTabular.h"
|
2007-04-25 01:24:38 +00:00
|
|
|
#include "insets/InsetText.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
|
2010-07-21 13:19:52 +00:00
|
|
|
#include "mathed/InsetMathHull.h"
|
2007-04-28 20:44:46 +00:00
|
|
|
#include "mathed/MacroTable.h"
|
2007-11-01 11:13:07 +00:00
|
|
|
#include "mathed/MathMacroTemplate.h"
|
2006-09-17 09:14:18 +00:00
|
|
|
#include "mathed/MathSupport.h"
|
2004-04-13 06:27:29 +00:00
|
|
|
|
2007-04-28 20:44:46 +00:00
|
|
|
#include "frontends/alert.h"
|
2007-10-02 18:27:20 +00:00
|
|
|
#include "frontends/Delegates.h"
|
2007-10-02 09:00:08 +00:00
|
|
|
#include "frontends/WorkAreaManager.h"
|
2001-12-10 20:06:59 +00:00
|
|
|
|
2002-07-05 21:24:15 +00:00
|
|
|
#include "graphics/Previews.h"
|
|
|
|
|
2008-04-30 08:26:40 +00:00
|
|
|
#include "support/lassert.h"
|
2007-11-29 07:04:28 +00:00
|
|
|
#include "support/convert.h"
|
|
|
|
#include "support/debug.h"
|
2009-04-04 16:40:47 +00:00
|
|
|
#include "support/docstring_list.h"
|
2007-12-17 18:37:13 +00:00
|
|
|
#include "support/ExceptionMessage.h"
|
|
|
|
#include "support/FileName.h"
|
2007-12-05 10:32:49 +00:00
|
|
|
#include "support/FileNameList.h"
|
1999-10-02 16:21:10 +00:00
|
|
|
#include "support/filetools.h"
|
2007-11-29 19:19:39 +00:00
|
|
|
#include "support/ForkedCalls.h"
|
2007-11-29 07:04:28 +00:00
|
|
|
#include "support/gettext.h"
|
2007-08-01 15:16:44 +00:00
|
|
|
#include "support/gzstream.h"
|
2007-11-13 23:50:28 +00:00
|
|
|
#include "support/lstrings.h"
|
2007-11-29 07:04:28 +00:00
|
|
|
#include "support/lyxalgo.h"
|
2001-05-17 15:11:01 +00:00
|
|
|
#include "support/os.h"
|
2007-12-17 18:37:13 +00:00
|
|
|
#include "support/Package.h"
|
Rename files in src/support, step one.
src/support/package.h src/support/Package.h Package
src/support/package.C.in src/support/Package.C.in Package
src/support/path.h src/support/Path.h Path
src/support/fs_extras.h src/support/fs_extras.h NOCLASSES
src/support/RandomAccessList.h src/support/RandomAccessList.h RandomAccessList
src/support/lyxmanip.h src/support/lyxmanip.h NOCLASSES
src/support/rename.C src/support/rename.cpp NOCLASSES
src/support/abort.C src/support/abort.cpp NOCLASSES
src/support/lyxlib.h src/support/lyxlib.h NOCLASSES
src/support/ExceptionMessage.h src/support/ExceptionMessage.h ExceptionMessage
src/support/copy.C src/support/copy.cpp NOCLASSES
src/support/limited_stack.h src/support/limited_stack.h limited_stack
src/support/filefilterlist.C src/support/FileFilterList.cpp ['FileFilterList', 'Filter']
src/support/cow_ptr.h src/support/cow_ptr.h cow_ptr
src/support/os_unix.C src/support/os_unix.cpp NOCLASSES
src/support/socktools.h src/support/socktools.h NOCLASSES
src/support/forkedcontr.h src/support/ForkedcallsController.h ForkedcallsController
src/support/os.h src/support/os.h NOCLASSES
src/support/FileMonitor.h src/support/FileMonitor.h FileMonitor
src/support/copied_ptr.h src/support/copied_ptr.h copied_ptr
src/support/translator.h src/support/Translator.h Translator
src/support/filetools.C src/support/filetools.cpp NOCLASSES
src/support/unlink.C src/support/unlink.cpp NOCLASSES
src/support/os_win32.C src/support/os_win32.cpp GetFolderPath
src/support/lstrings.C src/support/lstrings.cpp NOCLASSES
src/support/qstring_helpers.C src/support/qstring_helpers.cpp NOCLASSES
src/support/getcwd.C src/support/getcwd.cpp NOCLASSES
src/support/systemcall.C src/support/Systemcall.cpp Systemcall
src/support/lyxalgo.h src/support/lyxalgo.h NOCLASSES
src/support/filefilterlist.h src/support/FileFilterList.h ['FileFilterList', 'Filter']
src/support/unicode.C src/support/unicode.cpp IconvProcessor
src/support/userinfo.C src/support/userinfo.cpp NOCLASSES
src/support/lyxtime.C src/support/lyxtime.cpp NOCLASSES
src/support/kill.C src/support/kill.cpp NOCLASSES
src/support/docstring.C src/support/docstring.cpp to_local8bit_failure
src/support/os_cygwin.C src/support/os_cygwin.cpp NOCLASSES
src/support/lyxsum.C src/support/lyxsum.cpp NOCLASSES
src/support/environment.C src/support/environment.cpp NOCLASSES
src/support/filetools.h src/support/filetools.h NOCLASSES
src/support/textutils.C src/support/textutils.cpp NOCLASSES
src/support/mkdir.C src/support/mkdir.cpp NOCLASSES
src/support/forkedcall.C src/support/Forkedcall.cpp ['ForkedProcess', 'Forkedcall']
src/support/tempname.C src/support/tempname.cpp NOCLASSES
src/support/os_win32.h src/support/os_win32.h GetFolderPath
src/support/types.h src/support/types.h NOCLASSES
src/support/lstrings.h src/support/lstrings.h NOCLASSES
src/support/forkedcallqueue.C src/support/ForkedCallQueue.cpp ForkedCallQueue
src/support/qstring_helpers.h src/support/qstring_helpers.h NOCLASSES
src/support/convert.C src/support/convert.cpp NOCLASSES
src/support/filename.C src/support/FileName.cpp ['FileName', 'DocFileName']
src/support/tests/convert.C src/support/tests/convert.cpp NOCLASSES
src/support/tests/filetools.C src/support/tests/filetools.cpp NOCLASSES
src/support/tests/lstrings.C src/support/tests/lstrings.cpp NOCLASSES
src/support/tests/boost.C src/support/tests/boost.cpp NOCLASSES
src/support/docstream.C src/support/docstream.cpp ['iconv_codecvt_facet_exception', 'idocfstream', 'odocfstream']
src/support/std_istream.h src/support/std_istream.h NOCLASSES
src/support/systemcall.h src/support/Systemcall.h Systemcall
src/support/chdir.C src/support/chdir.cpp NOCLASSES
src/support/std_ostream.h src/support/std_ostream.h NOCLASSES
src/support/unicode.h src/support/unicode.h IconvProcessor
src/support/path.C src/support/Path.cpp Path
src/support/fs_extras.C src/support/fs_extras.cpp NOCLASSES
src/support/userinfo.h src/support/userinfo.h NOCLASSES
src/support/lyxtime.h src/support/lyxtime.h NOCLASSES
src/support/docstring.h src/support/docstring.h to_local8bit_failure
src/support/debugstream.h src/support/debugstream.h basic_debugstream
src/support/environment.h src/support/environment.h NOCLASSES
src/support/textutils.h src/support/textutils.h NOCLASSES
src/support/forkedcall.h src/support/Forkedcall.h ['ForkedProcess', 'Forkedcall']
src/support/socktools.C src/support/socktools.cpp NOCLASSES
src/support/forkedcallqueue.h src/support/ForkedCallQueue.h ForkedCallQueue
src/support/forkedcontr.C src/support/ForkedcallsController.cpp ForkedcallsController
src/support/os.C src/support/os.cpp NOCLASSES
src/support/convert.h src/support/convert.h NOCLASSES
src/support/filename.h src/support/FileName.h ['FileName', 'DocFileName']
src/support/docstream.h src/support/docstream.h ['iconv_codecvt_facet_exception', 'idocfstream', 'odocfstream']
src/support/FileMonitor.C src/support/FileMonitor.cpp FileMonitor
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@18024 a592a061-630c-0410-9148-cb99ea01b6c8
2007-04-26 05:12:52 +00:00
|
|
|
#include "support/Path.h"
|
2009-04-06 12:12:06 +00:00
|
|
|
#include "support/Systemcall.h"
|
2003-09-06 18:38:02 +00:00
|
|
|
#include "support/textutils.h"
|
2007-11-29 07:04:28 +00:00
|
|
|
#include "support/types.h"
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2010-04-22 11:16:58 +00:00
|
|
|
#include "support/bind.h"
|
2010-04-22 11:37:32 +00:00
|
|
|
#include "support/shared_ptr.h"
|
2002-08-14 22:15:18 +00:00
|
|
|
|
2007-08-12 08:57:17 +00:00
|
|
|
#include <algorithm>
|
2008-03-02 15:27:35 +00:00
|
|
|
#include <fstream>
|
2001-12-08 14:20:11 +00:00
|
|
|
#include <iomanip>
|
2008-03-02 15:27:35 +00:00
|
|
|
#include <map>
|
2008-11-16 21:28:06 +00:00
|
|
|
#include <set>
|
2004-07-24 10:55:30 +00:00
|
|
|
#include <sstream>
|
2008-03-02 15:27:35 +00:00
|
|
|
#include <stack>
|
|
|
|
#include <vector>
|
2001-12-08 14:20:11 +00:00
|
|
|
|
2007-12-12 10:16:00 +00:00
|
|
|
using namespace std;
|
2007-12-12 18:57:56 +00:00
|
|
|
using namespace lyx::support;
|
2007-07-17 17:40:44 +00:00
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
namespace lyx {
|
|
|
|
|
|
|
|
namespace Alert = frontend::Alert;
|
|
|
|
namespace os = support::os;
|
2003-09-16 11:03:20 +00:00
|
|
|
|
2001-03-20 01:22:46 +00:00
|
|
|
namespace {
|
1999-12-10 00:07:59 +00:00
|
|
|
|
2008-10-13 16:01:02 +00:00
|
|
|
// Do not remove the comment below, so we get merge conflict in
|
2008-10-13 16:12:46 +00:00
|
|
|
// independent branches. Instead add your own.
|
2010-10-13 12:36:53 +00:00
|
|
|
int const LYX_FORMAT = 404; // rgh: refstyle
|
2001-03-20 01:22:46 +00:00
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
typedef map<string, bool> DepClean;
|
2008-03-02 15:27:35 +00:00
|
|
|
typedef map<docstring, pair<InsetLabel const *, Buffer::References> > RefCache;
|
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
void showPrintError(string const & name)
|
|
|
|
{
|
|
|
|
docstring str = bformat(_("Could not print the document %1$s.\n"
|
|
|
|
"Check that your printer is set up correctly."),
|
|
|
|
makeDisplayPath(name, 50));
|
|
|
|
Alert::error(_("Print document failed"), str);
|
|
|
|
}
|
|
|
|
|
2008-03-02 15:27:35 +00:00
|
|
|
} // namespace anon
|
2003-09-09 11:24:33 +00:00
|
|
|
|
2010-09-29 11:55:10 +00:00
|
|
|
|
2005-01-19 15:03:31 +00:00
|
|
|
class Buffer::Impl
|
2003-09-09 11:24:33 +00:00
|
|
|
{
|
2005-01-19 15:03:31 +00:00
|
|
|
public:
|
2010-01-25 18:39:08 +00:00
|
|
|
Impl(Buffer * owner, FileName const & file, bool readonly, Buffer const * cloned_buffer);
|
2007-11-30 20:30:09 +00:00
|
|
|
|
|
|
|
~Impl()
|
|
|
|
{
|
|
|
|
if (wa_) {
|
|
|
|
wa_->closeAll();
|
|
|
|
delete wa_;
|
|
|
|
}
|
2008-07-29 11:35:13 +00:00
|
|
|
delete inset;
|
2007-11-30 20:30:09 +00:00
|
|
|
}
|
2010-07-21 21:51:33 +00:00
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
/// search for macro in local (buffer) table or in children
|
|
|
|
MacroData const * getBufferMacro(docstring const & name,
|
|
|
|
DocIterator const & pos) const;
|
|
|
|
|
|
|
|
/// Update macro table starting with position of it \param it in some
|
|
|
|
/// text inset.
|
2010-07-21 21:58:54 +00:00
|
|
|
void updateMacros(DocIterator & it, DocIterator & scope);
|
2010-01-25 18:39:08 +00:00
|
|
|
///
|
|
|
|
void setLabel(ParIterator & it, UpdateType utype) const;
|
|
|
|
|
|
|
|
/** If we have branches that use the file suffix
|
|
|
|
feature, return the file name with suffix appended.
|
|
|
|
*/
|
|
|
|
support::FileName exportFileName() const;
|
|
|
|
|
|
|
|
Buffer * owner_;
|
|
|
|
|
2003-09-09 11:24:33 +00:00
|
|
|
BufferParams params;
|
|
|
|
LyXVC lyxvc;
|
2007-12-17 18:37:13 +00:00
|
|
|
FileName temppath;
|
2007-11-30 17:41:27 +00:00
|
|
|
mutable TexRow texrow;
|
2003-09-09 11:24:33 +00:00
|
|
|
|
2004-03-01 17:12:09 +00:00
|
|
|
/// need to regenerate .tex?
|
2003-09-09 11:24:33 +00:00
|
|
|
DepClean dep_clean;
|
|
|
|
|
2004-03-01 17:12:09 +00:00
|
|
|
/// is save needed?
|
2003-09-09 11:24:33 +00:00
|
|
|
mutable bool lyx_clean;
|
|
|
|
|
2004-03-01 17:12:09 +00:00
|
|
|
/// is autosave needed?
|
2003-09-09 11:24:33 +00:00
|
|
|
mutable bool bak_clean;
|
|
|
|
|
2004-03-01 17:12:09 +00:00
|
|
|
/// is this a unnamed file (New...)?
|
2003-09-09 11:24:33 +00:00
|
|
|
bool unnamed;
|
|
|
|
|
|
|
|
/// buffer is r/o
|
|
|
|
bool read_only;
|
|
|
|
|
|
|
|
/// name of the file the buffer is associated with.
|
2006-11-30 16:59:50 +00:00
|
|
|
FileName filename;
|
2003-09-09 11:24:33 +00:00
|
|
|
|
2004-03-01 17:12:09 +00:00
|
|
|
/** Set to true only when the file is fully loaded.
|
2003-10-22 13:15:18 +00:00
|
|
|
* Used to prevent the premature generation of previews
|
|
|
|
* and by the citation inset.
|
|
|
|
*/
|
|
|
|
bool file_fully_loaded;
|
2003-11-28 15:08:38 +00:00
|
|
|
|
2006-11-11 00:35:14 +00:00
|
|
|
///
|
2007-11-30 17:41:27 +00:00
|
|
|
mutable TocBackend toc_backend;
|
2007-06-15 13:13:49 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
/// macro tables
|
|
|
|
typedef pair<DocIterator, MacroData> ScopeMacro;
|
|
|
|
typedef map<DocIterator, ScopeMacro> PositionScopeMacroMap;
|
|
|
|
typedef map<docstring, PositionScopeMacroMap> NamePositionScopeMacroMap;
|
2008-03-10 12:49:18 +00:00
|
|
|
/// map from the macro name to the position map,
|
|
|
|
/// which maps the macro definition position to the scope and the MacroData.
|
2007-12-21 20:42:46 +00:00
|
|
|
NamePositionScopeMacroMap macros;
|
2010-01-25 18:39:08 +00:00
|
|
|
/// This seem to change the way Buffer::getMacro() works
|
|
|
|
mutable bool macro_lock;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
/// positions of child buffers in the buffer
|
|
|
|
typedef map<Buffer const * const, DocIterator> BufferPositionMap;
|
|
|
|
typedef pair<DocIterator, Buffer const *> ScopeBuffer;
|
|
|
|
typedef map<DocIterator, ScopeBuffer> PositionScopeBufferMap;
|
2008-03-10 12:49:18 +00:00
|
|
|
/// position of children buffers in this buffer
|
2007-12-21 20:42:46 +00:00
|
|
|
BufferPositionMap children_positions;
|
2008-03-10 12:49:18 +00:00
|
|
|
/// map from children inclusion positions to their scope and their buffer
|
2007-12-21 20:42:46 +00:00
|
|
|
PositionScopeBufferMap position_to_children;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-06-15 13:13:49 +00:00
|
|
|
/// Container for all sort of Buffer dependant errors.
|
|
|
|
map<string, ErrorList> errorLists;
|
2007-08-09 20:46:22 +00:00
|
|
|
|
|
|
|
/// timestamp and checksum used to test if the file has been externally
|
|
|
|
/// modified. (Used to properly enable 'File->Revert to saved', bug 4114).
|
|
|
|
time_t timestamp_;
|
|
|
|
unsigned long checksum_;
|
2007-10-02 09:00:08 +00:00
|
|
|
|
|
|
|
///
|
|
|
|
frontend::WorkAreaManager * wa_;
|
2010-01-25 14:32:39 +00:00
|
|
|
///
|
|
|
|
frontend::GuiBufferDelegate * gui_;
|
2007-10-18 11:51:17 +00:00
|
|
|
|
|
|
|
///
|
|
|
|
Undo undo_;
|
2007-12-05 10:32:49 +00:00
|
|
|
|
|
|
|
/// A cache for the bibfiles (including bibfiles of loaded child
|
|
|
|
/// documents), needed for appropriate update of natbib labels.
|
2010-01-09 16:15:46 +00:00
|
|
|
mutable support::FileNameList bibfiles_cache_;
|
2008-03-02 15:27:35 +00:00
|
|
|
|
2008-08-01 17:57:01 +00:00
|
|
|
// FIXME The caching mechanism could be improved. At present, we have a
|
2008-06-05 05:46:49 +00:00
|
|
|
// cache for each Buffer, that caches all the bibliography info for that
|
2008-08-01 17:57:01 +00:00
|
|
|
// Buffer. A more efficient solution would be to have a global cache per
|
2008-06-05 05:46:49 +00:00
|
|
|
// file, and then to construct the Buffer's bibinfo from that.
|
2008-04-25 20:03:03 +00:00
|
|
|
/// A cache for bibliography info
|
|
|
|
mutable BiblioInfo bibinfo_;
|
2008-06-05 06:42:53 +00:00
|
|
|
/// whether the bibinfo cache is valid
|
2010-08-10 14:12:48 +00:00
|
|
|
mutable bool bibinfo_cache_valid_;
|
2010-08-09 17:01:51 +00:00
|
|
|
/// whether the bibfile cache is valid
|
2010-08-10 14:12:48 +00:00
|
|
|
mutable bool bibfile_cache_valid_;
|
2008-06-05 05:46:49 +00:00
|
|
|
/// Cache of timestamps of .bib files
|
2010-01-09 16:15:46 +00:00
|
|
|
map<FileName, time_t> bibfile_status_;
|
2008-04-25 20:03:03 +00:00
|
|
|
|
2008-03-02 15:27:35 +00:00
|
|
|
mutable RefCache ref_cache_;
|
2008-03-04 22:28:18 +00:00
|
|
|
|
|
|
|
/// our Text that should be wrapped in an InsetText
|
2008-07-29 11:35:13 +00:00
|
|
|
InsetText * inset;
|
2009-04-03 00:44:33 +00:00
|
|
|
|
|
|
|
/// This is here to force the test to be done whenever parent_buffer
|
|
|
|
/// is accessed.
|
|
|
|
Buffer const * parent() const {
|
2009-04-03 16:04:45 +00:00
|
|
|
// if parent_buffer is not loaded, then it has been unloaded,
|
|
|
|
// which means that parent_buffer is an invalid pointer. So we
|
|
|
|
// set it to null in that case.
|
2010-01-20 17:53:18 +00:00
|
|
|
// however, the BufferList doesn't know about cloned buffers, so
|
|
|
|
// they will always be regarded as unloaded. in that case, we hope
|
|
|
|
// for the best.
|
|
|
|
if (!cloned_buffer_ && !theBufferList().isLoaded(parent_buffer))
|
2009-04-03 00:44:33 +00:00
|
|
|
parent_buffer = 0;
|
|
|
|
return parent_buffer;
|
|
|
|
}
|
2010-08-10 14:37:06 +00:00
|
|
|
|
2009-04-03 00:44:33 +00:00
|
|
|
///
|
2009-10-30 15:24:35 +00:00
|
|
|
void setParent(Buffer const * pb) {
|
2010-08-10 14:37:06 +00:00
|
|
|
if (parent_buffer == pb)
|
|
|
|
// nothing to do
|
|
|
|
return;
|
|
|
|
if (!cloned_buffer_ && parent_buffer && pb)
|
2009-10-30 15:24:35 +00:00
|
|
|
LYXERR0("Warning: a buffer should not have two parents!");
|
|
|
|
parent_buffer = pb;
|
2010-08-10 14:37:06 +00:00
|
|
|
if (!cloned_buffer_ && parent_buffer) {
|
|
|
|
parent_buffer->invalidateBibfileCache();
|
|
|
|
parent_buffer->invalidateBibinfoCache();
|
|
|
|
}
|
2009-10-30 15:24:35 +00:00
|
|
|
}
|
2009-12-18 22:51:06 +00:00
|
|
|
|
|
|
|
/// If non zero, this buffer is a clone of existing buffer \p cloned_buffer_
|
|
|
|
/// This one is useful for preview detached in a thread.
|
|
|
|
Buffer const * cloned_buffer_;
|
2010-07-21 21:51:33 +00:00
|
|
|
/// are we in the process of exporting this buffer?
|
|
|
|
mutable bool doing_export;
|
|
|
|
|
2009-12-24 19:19:53 +00:00
|
|
|
private:
|
|
|
|
/// So we can force access via the accessors.
|
|
|
|
mutable Buffer const * parent_buffer;
|
|
|
|
|
2003-09-09 11:24:33 +00:00
|
|
|
};
|
|
|
|
|
2008-04-10 21:49:34 +00:00
|
|
|
|
2007-12-17 18:37:13 +00:00
|
|
|
/// Creates the per buffer temporary directory
|
|
|
|
static FileName createBufferTmpDir()
|
|
|
|
{
|
|
|
|
static int count;
|
|
|
|
// We are in our own directory. Why bother to mangle name?
|
|
|
|
// In fact I wrote this code to circumvent a problematic behaviour
|
|
|
|
// (bug?) of EMX mkstemp().
|
2010-04-21 01:19:09 +00:00
|
|
|
FileName tmpfl(package().temp_dir().absFileName() + "/lyx_tmpbuf" +
|
2007-12-17 18:37:13 +00:00
|
|
|
convert<string>(count++));
|
|
|
|
|
|
|
|
if (!tmpfl.createDirectory(0777)) {
|
|
|
|
throw ExceptionMessage(WarningException, _("Disk Error: "), bformat(
|
|
|
|
_("LyX could not create the temporary directory '%1$s' (Disk is full maybe?)"),
|
2010-04-21 01:19:09 +00:00
|
|
|
from_utf8(tmpfl.absFileName())));
|
2007-12-17 18:37:13 +00:00
|
|
|
}
|
|
|
|
return tmpfl;
|
|
|
|
}
|
|
|
|
|
2003-09-09 11:24:33 +00:00
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
Buffer::Impl::Impl(Buffer * owner, FileName const & file, bool readonly_,
|
2009-12-18 22:51:06 +00:00
|
|
|
Buffer const * cloned_buffer)
|
2010-01-25 18:39:08 +00:00
|
|
|
: owner_(owner), lyx_clean(true), bak_clean(true), unnamed(false),
|
2007-11-30 17:41:27 +00:00
|
|
|
read_only(readonly_), filename(file), file_fully_loaded(false),
|
2010-01-25 18:39:08 +00:00
|
|
|
toc_backend(owner), macro_lock(false), timestamp_(0),
|
|
|
|
checksum_(0), wa_(0), gui_(0), undo_(*owner), bibinfo_cache_valid_(false),
|
2010-08-09 17:01:51 +00:00
|
|
|
bibfile_cache_valid_(false), cloned_buffer_(cloned_buffer),
|
|
|
|
doing_export(false), parent_buffer(0)
|
2003-09-09 11:24:33 +00:00
|
|
|
{
|
2009-12-18 22:51:06 +00:00
|
|
|
if (!cloned_buffer_) {
|
|
|
|
temppath = createBufferTmpDir();
|
2010-01-25 18:39:08 +00:00
|
|
|
lyxvc.setBuffer(owner_);
|
2009-12-18 22:51:06 +00:00
|
|
|
if (use_gui)
|
|
|
|
wa_ = new frontend::WorkAreaManager;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
temppath = cloned_buffer_->d->temppath;
|
|
|
|
file_fully_loaded = true;
|
|
|
|
params = cloned_buffer_->d->params;
|
2010-01-09 18:16:07 +00:00
|
|
|
bibfiles_cache_ = cloned_buffer_->d->bibfiles_cache_;
|
|
|
|
bibinfo_ = cloned_buffer_->d->bibinfo_;
|
|
|
|
bibinfo_cache_valid_ = cloned_buffer_->d->bibinfo_cache_valid_;
|
2010-08-09 17:01:51 +00:00
|
|
|
bibfile_cache_valid_ = cloned_buffer_->d->bibfile_cache_valid_;
|
2010-01-09 18:16:07 +00:00
|
|
|
bibfile_status_ = cloned_buffer_->d->bibfile_status_;
|
2003-09-09 11:24:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-12-18 22:51:06 +00:00
|
|
|
Buffer::Buffer(string const & file, bool readonly, Buffer const * cloned_buffer)
|
2010-01-25 18:39:08 +00:00
|
|
|
: d(new Impl(this, FileName(file), readonly, cloned_buffer))
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::INFO, "Buffer::Buffer()");
|
2009-12-19 15:46:45 +00:00
|
|
|
if (cloned_buffer) {
|
2010-01-05 13:16:55 +00:00
|
|
|
d->inset = new InsetText(*cloned_buffer->d->inset);
|
2009-12-19 15:46:45 +00:00
|
|
|
d->inset->setBuffer(*this);
|
2010-01-05 13:20:57 +00:00
|
|
|
// FIXME: optimize this loop somewhat, maybe by creating a new
|
|
|
|
// general recursive Inset::setId().
|
2010-01-05 13:16:55 +00:00
|
|
|
DocIterator it = doc_iterator_begin(this);
|
|
|
|
DocIterator cloned_it = doc_iterator_begin(cloned_buffer);
|
|
|
|
for (; !it.atEnd(); it.forwardPar(), cloned_it.forwardPar())
|
|
|
|
it.paragraph().setId(cloned_it.paragraph().id());
|
|
|
|
} else
|
2009-12-19 15:46:45 +00:00
|
|
|
d->inset = new InsetText(this);
|
2008-07-29 11:35:13 +00:00
|
|
|
d->inset->setAutoBreakRows(true);
|
|
|
|
d->inset->getText(0)->setMacrocontextPosition(par_iterator_begin());
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Buffer::~Buffer()
|
|
|
|
{
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::INFO, "Buffer::~Buffer()");
|
1999-09-27 18:44:28 +00:00
|
|
|
// here the buffer should take care that it is
|
|
|
|
// saved properly, before it goes into the void.
|
|
|
|
|
2007-12-01 12:17:00 +00:00
|
|
|
// GuiView already destroyed
|
2010-01-25 14:32:39 +00:00
|
|
|
d->gui_ = 0;
|
2007-12-01 12:17:00 +00:00
|
|
|
|
2009-09-08 01:29:07 +00:00
|
|
|
if (isInternal()) {
|
2008-10-13 20:40:58 +00:00
|
|
|
// No need to do additional cleanups for internal buffer.
|
|
|
|
delete d;
|
|
|
|
return;
|
|
|
|
}
|
2008-07-20 17:52:55 +00:00
|
|
|
|
2009-08-19 20:31:49 +00:00
|
|
|
// loop over children
|
2008-07-20 17:52:55 +00:00
|
|
|
Impl::BufferPositionMap::iterator it = d->children_positions.begin();
|
|
|
|
Impl::BufferPositionMap::iterator end = d->children_positions.end();
|
2009-02-14 16:49:28 +00:00
|
|
|
for (; it != end; ++it) {
|
2009-02-14 16:57:52 +00:00
|
|
|
Buffer * child = const_cast<Buffer *>(it->first);
|
2010-01-09 16:21:37 +00:00
|
|
|
if (d->cloned_buffer_)
|
|
|
|
delete child;
|
2009-02-14 16:49:28 +00:00
|
|
|
// The child buffer might have been closed already.
|
2010-01-09 16:21:37 +00:00
|
|
|
else if (theBufferList().isLoaded(child))
|
2009-02-14 16:57:52 +00:00
|
|
|
theBufferList().releaseChild(this, child);
|
2009-08-19 20:31:49 +00:00
|
|
|
}
|
2009-08-15 16:39:36 +00:00
|
|
|
|
|
|
|
if (!isClean()) {
|
2009-09-04 13:53:12 +00:00
|
|
|
docstring msg = _("LyX attempted to close a document that had unsaved changes!\n");
|
2009-08-15 16:39:36 +00:00
|
|
|
msg += emergencyWrite();
|
2009-12-21 13:11:17 +00:00
|
|
|
Alert::warning(_("Attempting to close changed document!"), msg);
|
2009-08-15 16:39:36 +00:00
|
|
|
}
|
|
|
|
|
2008-03-06 20:58:59 +00:00
|
|
|
// clear references to children in macro tables
|
|
|
|
d->children_positions.clear();
|
|
|
|
d->position_to_children.clear();
|
2007-11-30 17:41:27 +00:00
|
|
|
|
2009-12-18 22:51:06 +00:00
|
|
|
if (!d->cloned_buffer_ && !d->temppath.destroyDirectory()) {
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::warning(_("Could not remove temporary directory"),
|
|
|
|
bformat(_("Could not remove the temporary directory %1$s"),
|
2010-04-21 01:19:09 +00:00
|
|
|
from_utf8(d->temppath.absFileName())));
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2003-08-22 08:47:32 +00:00
|
|
|
// Remove any previewed LaTeX snippets associated with this buffer.
|
2010-07-21 13:19:52 +00:00
|
|
|
if (!isClone())
|
|
|
|
thePreviews().removeLoader(*this);
|
2007-12-01 09:51:45 +00:00
|
|
|
|
|
|
|
delete d;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-12-13 21:00:46 +00:00
|
|
|
Buffer * Buffer::clone() const
|
|
|
|
{
|
2010-04-21 01:19:09 +00:00
|
|
|
Buffer * buffer_clone = new Buffer(fileName().absFileName(), false, this);
|
2010-01-09 14:47:47 +00:00
|
|
|
buffer_clone->d->macro_lock = true;
|
|
|
|
buffer_clone->d->children_positions.clear();
|
|
|
|
// FIXME (Abdel 09/01/2010): this is too complicated. The whole children_positions and
|
|
|
|
// math macro caches need to be rethought and simplified.
|
|
|
|
// I am not sure wether we should handle Buffer cloning here or in BufferList.
|
|
|
|
// Right now BufferList knows nothing about buffer clones.
|
|
|
|
Impl::BufferPositionMap::iterator it = d->children_positions.begin();
|
|
|
|
Impl::BufferPositionMap::iterator end = d->children_positions.end();
|
|
|
|
for (; it != end; ++it) {
|
|
|
|
DocIterator dit = it->second.clone(buffer_clone);
|
|
|
|
dit.setBuffer(buffer_clone);
|
|
|
|
Buffer * child = const_cast<Buffer *>(it->first);
|
|
|
|
Buffer * child_clone = child->clone();
|
|
|
|
Inset * inset = dit.nextInset();
|
|
|
|
LASSERT(inset && inset->lyxCode() == INCLUDE_CODE, continue);
|
|
|
|
InsetInclude * inset_inc = static_cast<InsetInclude *>(inset);
|
|
|
|
inset_inc->setChildBuffer(child_clone);
|
|
|
|
child_clone->d->setParent(buffer_clone);
|
|
|
|
buffer_clone->setChild(dit, child_clone);
|
|
|
|
}
|
2010-01-12 15:25:04 +00:00
|
|
|
buffer_clone->d->macro_lock = false;
|
2010-01-09 14:47:47 +00:00
|
|
|
return buffer_clone;
|
2009-12-13 21:00:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-12-18 22:59:59 +00:00
|
|
|
bool Buffer::isClone() const
|
|
|
|
{
|
|
|
|
return d->cloned_buffer_;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-08 02:03:54 +00:00
|
|
|
void Buffer::changed(bool update_metrics) const
|
2007-10-02 09:00:08 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
if (d->wa_)
|
2010-01-08 02:03:54 +00:00
|
|
|
d->wa_->redrawAll(update_metrics);
|
2007-10-02 09:00:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-02 14:39:48 +00:00
|
|
|
frontend::WorkAreaManager & Buffer::workAreaManager() const
|
2007-10-02 09:00:08 +00:00
|
|
|
{
|
2008-04-10 21:49:34 +00:00
|
|
|
LASSERT(d->wa_, /**/);
|
2007-11-30 17:46:49 +00:00
|
|
|
return *d->wa_;
|
2007-10-02 09:00:08 +00:00
|
|
|
}
|
|
|
|
|
2007-10-02 14:39:48 +00:00
|
|
|
|
2007-04-29 23:33:02 +00:00
|
|
|
Text & Buffer::text() const
|
2003-11-28 15:08:38 +00:00
|
|
|
{
|
2008-07-29 12:07:08 +00:00
|
|
|
return d->inset->text();
|
2004-03-18 12:53:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-04-29 13:39:47 +00:00
|
|
|
Inset & Buffer::inset() const
|
2004-03-18 12:53:43 +00:00
|
|
|
{
|
2008-07-29 11:35:13 +00:00
|
|
|
return *d->inset;
|
2003-11-28 15:08:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
BufferParams & Buffer::params()
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->params;
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
BufferParams const & Buffer::params() const
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->params;
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ParagraphList & Buffer::paragraphs()
|
|
|
|
{
|
2004-03-18 12:53:43 +00:00
|
|
|
return text().paragraphs();
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ParagraphList const & Buffer::paragraphs() const
|
|
|
|
{
|
2004-03-18 12:53:43 +00:00
|
|
|
return text().paragraphs();
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
LyXVC & Buffer::lyxvc()
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->lyxvc;
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
LyXVC const & Buffer::lyxvc() const
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->lyxvc;
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-12-17 18:37:13 +00:00
|
|
|
string const Buffer::temppath() const
|
2003-09-09 09:47:59 +00:00
|
|
|
{
|
2010-04-21 01:19:09 +00:00
|
|
|
return d->temppath.absFileName();
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-16 00:12:21 +00:00
|
|
|
TexRow & Buffer::texrow()
|
|
|
|
{
|
|
|
|
return d->texrow;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
TexRow const & Buffer::texrow() const
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->texrow;
|
2003-09-09 09:47:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-30 17:41:27 +00:00
|
|
|
TocBackend & Buffer::tocBackend() const
|
2006-11-11 00:35:14 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->toc_backend;
|
2006-11-11 00:35:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-18 11:51:17 +00:00
|
|
|
Undo & Buffer::undo()
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->undo_;
|
2007-10-18 11:51:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-09 14:47:47 +00:00
|
|
|
void Buffer::setChild(DocIterator const & dit, Buffer * child)
|
|
|
|
{
|
|
|
|
d->children_positions[child] = dit;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
string Buffer::latexName(bool const no_path) const
|
2000-04-08 17:02:02 +00:00
|
|
|
{
|
2009-07-13 14:30:08 +00:00
|
|
|
FileName latex_name =
|
2010-01-25 18:39:08 +00:00
|
|
|
makeLatexName(d->exportFileName());
|
2007-11-28 18:07:09 +00:00
|
|
|
return no_path ? latex_name.onlyFileName()
|
2010-04-21 01:19:09 +00:00
|
|
|
: latex_name.absFileName();
|
2000-04-08 17:02:02 +00:00
|
|
|
}
|
|
|
|
|
2001-03-14 14:54:30 +00:00
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
FileName Buffer::Impl::exportFileName() const
|
2009-07-13 14:30:08 +00:00
|
|
|
{
|
|
|
|
docstring const branch_suffix =
|
2010-04-21 01:19:35 +00:00
|
|
|
params.branchlist().getFileNameSuffix();
|
2009-07-13 14:30:08 +00:00
|
|
|
if (branch_suffix.empty())
|
2010-01-25 18:39:08 +00:00
|
|
|
return filename;
|
2009-07-13 14:30:08 +00:00
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
string const name = filename.onlyFileNameWithoutExt()
|
2009-07-13 14:30:08 +00:00
|
|
|
+ to_utf8(branch_suffix);
|
2010-04-21 01:19:09 +00:00
|
|
|
FileName res(filename.onlyPath().absFileName() + "/" + name);
|
2010-01-25 18:39:08 +00:00
|
|
|
res.changeExtension(filename.extension());
|
2009-07-13 14:30:08 +00:00
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-01 22:17:22 +00:00
|
|
|
string Buffer::logName(LogType * type) const
|
2001-02-06 17:41:42 +00:00
|
|
|
{
|
2007-10-20 10:03:45 +00:00
|
|
|
string const filename = latexName(false);
|
2001-02-06 17:41:42 +00:00
|
|
|
|
2007-11-01 22:17:22 +00:00
|
|
|
if (filename.empty()) {
|
|
|
|
if (type)
|
|
|
|
*type = latexlog;
|
|
|
|
return string();
|
|
|
|
}
|
2001-02-06 17:41:42 +00:00
|
|
|
|
2004-02-25 12:00:53 +00:00
|
|
|
string const path = temppath();
|
2001-02-06 17:41:42 +00:00
|
|
|
|
2006-11-30 16:59:50 +00:00
|
|
|
FileName const fname(addName(temppath(),
|
2010-04-21 01:19:28 +00:00
|
|
|
onlyFileName(changeExtension(filename,
|
2006-11-30 16:59:50 +00:00
|
|
|
".log"))));
|
2009-05-28 08:59:25 +00:00
|
|
|
|
|
|
|
// FIXME: how do we know this is the name of the build log?
|
2006-11-30 16:59:50 +00:00
|
|
|
FileName const bname(
|
2010-04-21 01:19:28 +00:00
|
|
|
addName(path, onlyFileName(
|
2006-04-08 22:31:11 +00:00
|
|
|
changeExtension(filename,
|
2009-05-28 08:59:25 +00:00
|
|
|
formats.extension(bufferFormat()) + ".out"))));
|
2001-02-06 17:41:42 +00:00
|
|
|
|
2009-06-21 09:44:10 +00:00
|
|
|
// Also consider the master buffer log file
|
|
|
|
FileName masterfname = fname;
|
|
|
|
LogType mtype;
|
|
|
|
if (masterBuffer() != this) {
|
|
|
|
string const mlogfile = masterBuffer()->logName(&mtype);
|
|
|
|
masterfname = FileName(mlogfile);
|
|
|
|
}
|
2001-02-09 15:54:30 +00:00
|
|
|
|
2009-06-21 09:44:10 +00:00
|
|
|
// If no Latex log or Build log is newer, show Build log
|
2007-10-18 19:29:32 +00:00
|
|
|
if (bname.exists() &&
|
2009-06-21 09:44:10 +00:00
|
|
|
((!fname.exists() && !masterfname.exists())
|
|
|
|
|| (fname.lastModified() < bname.lastModified()
|
|
|
|
&& masterfname.lastModified() < bname.lastModified()))) {
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::FILES, "Log name calculated as: " << bname);
|
2007-11-01 22:17:22 +00:00
|
|
|
if (type)
|
|
|
|
*type = buildlog;
|
2010-04-21 01:19:09 +00:00
|
|
|
return bname.absFileName();
|
2009-06-21 09:44:10 +00:00
|
|
|
// If we have a newer master file log or only a master log, show this
|
|
|
|
} else if (fname != masterfname
|
2009-07-11 06:50:18 +00:00
|
|
|
&& (!fname.exists() && (masterfname.exists()
|
|
|
|
|| fname.lastModified() < masterfname.lastModified()))) {
|
2009-06-21 09:44:10 +00:00
|
|
|
LYXERR(Debug::FILES, "Log name calculated as: " << masterfname);
|
|
|
|
if (type)
|
|
|
|
*type = mtype;
|
2010-04-21 01:19:09 +00:00
|
|
|
return masterfname.absFileName();
|
2001-02-09 15:54:30 +00:00
|
|
|
}
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::FILES, "Log name calculated as: " << fname);
|
2007-11-01 22:17:22 +00:00
|
|
|
if (type)
|
|
|
|
*type = latexlog;
|
2010-04-21 01:19:09 +00:00
|
|
|
return fname.absFileName();
|
2001-02-06 17:41:42 +00:00
|
|
|
}
|
2000-04-08 17:02:02 +00:00
|
|
|
|
2001-03-14 14:54:30 +00:00
|
|
|
|
2009-10-25 14:00:29 +00:00
|
|
|
void Buffer::setReadonly(bool const flag)
|
2000-04-08 17:02:02 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
if (d->read_only != flag) {
|
|
|
|
d->read_only = flag;
|
2010-01-25 13:31:07 +00:00
|
|
|
changed(false);
|
2000-04-08 17:02:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2000-10-12 00:11:06 +00:00
|
|
|
void Buffer::setFileName(string const & newfile)
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
d->filename = makeAbsPath(newfile);
|
|
|
|
setReadonly(d->filename.isReadOnly());
|
1999-09-27 18:44:28 +00:00
|
|
|
updateTitles();
|
|
|
|
}
|
|
|
|
|
1999-12-10 00:07:59 +00:00
|
|
|
|
2007-04-26 11:30:54 +00:00
|
|
|
int Buffer::readHeader(Lexer & lex)
|
2003-03-12 02:39:12 +00:00
|
|
|
{
|
|
|
|
int unknown_tokens = 0;
|
2004-08-14 18:41:27 +00:00
|
|
|
int line = -1;
|
|
|
|
int begin_header_line = -1;
|
2003-03-12 02:39:12 +00:00
|
|
|
|
2005-03-27 13:31:04 +00:00
|
|
|
// Initialize parameters that may be/go lacking in header:
|
|
|
|
params().branchlist().clear();
|
2006-02-22 17:34:43 +00:00
|
|
|
params().preamble.erase();
|
2005-03-27 13:31:04 +00:00
|
|
|
params().options.erase();
|
2008-04-28 16:38:56 +00:00
|
|
|
params().master.erase();
|
2005-03-27 13:31:04 +00:00
|
|
|
params().float_placement.erase();
|
|
|
|
params().paperwidth.erase();
|
|
|
|
params().paperheight.erase();
|
|
|
|
params().leftmargin.erase();
|
|
|
|
params().rightmargin.erase();
|
|
|
|
params().topmargin.erase();
|
|
|
|
params().bottommargin.erase();
|
|
|
|
params().headheight.erase();
|
|
|
|
params().headsep.erase();
|
|
|
|
params().footskip.erase();
|
2008-02-19 05:24:48 +00:00
|
|
|
params().columnsep.erase();
|
2008-06-05 06:18:34 +00:00
|
|
|
params().fontsCJK.erase();
|
2007-05-25 12:32:08 +00:00
|
|
|
params().listings_params.clear();
|
This is one of a series of patches that will merge the layout modules development in personal/branches/rgheck back into the tree.
Design goal: Allow the use of layout "modules", which are to LaTeX packages as layout files are to LaTeX document classes. Thus, one could have a module that defined certain character styles, environments, commands, or what have you, and include it in various documents, each of which uses a different document class, without having to modify the layout files themselves. For example, a theorems.module could be used with article.layout to provide support for theorem-type environments, without having to modify article.layout itself, and the same module could be used with book.layout, etc.
This patch adds the backend. The ModuleList class holds a list of the available modules, which are retrieved from lyxmodules.lst, itself generated by configure.py. There are two LFUNs available: modules-clear and module-add, which do the obvious thing; you can test by typing these into the minibuffer, along with the name of one of the available modules: URL (a CharStyle), Endnote (a Custom Inset), and---with the spaces---End To Foot (View>LaTeX and look at the user preamble), which are themselves in lib/layouts. There are some others, too, that allow theorems to be added to classes like article and book.
The GUI will come next.
Issues: (i) The configure.py script could be improved. It'd be nice, for example, if it tested for the presence of the LaTeX packages a particular module needs. But this would mean re-working the LaTeX script, and I don't know how to do that. Note that at present, the packages are ignored. This will change shortly. (ii) I've used std::string in LyXModule, following what seemed to be a precedent in TextClass. If some of these should be docstrings, please let me know, and I'll change them. (iii) There is at present no distinction between LaTeX and DocBook modules. Should there be? That is: Should there be modules that are available when the document class is a LaTeX class and others that are available only when it is DocBook? Or should there just be one set of modules? Each module can of course indicate for what it is suitable in its description.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@19893 a592a061-630c-0410-9148-cb99ea01b6c8
2007-08-29 17:59:49 +00:00
|
|
|
params().clearLayoutModules();
|
2008-07-27 17:46:27 +00:00
|
|
|
params().clearRemovedModules();
|
2010-01-07 10:01:26 +00:00
|
|
|
params().clearIncludedChildren();
|
2007-09-20 22:31:18 +00:00
|
|
|
params().pdfoptions().clear();
|
2009-04-16 07:29:01 +00:00
|
|
|
params().indiceslist().clear();
|
2009-05-18 10:53:02 +00:00
|
|
|
params().backgroundcolor = lyx::rgbFromHexName("#ffffff");
|
2010-04-05 20:31:10 +00:00
|
|
|
params().isbackgroundcolor = false;
|
2010-04-02 23:39:36 +00:00
|
|
|
params().fontcolor = lyx::rgbFromHexName("#000000");
|
|
|
|
params().isfontcolor = false;
|
2010-03-31 00:46:50 +00:00
|
|
|
params().notefontcolor = lyx::rgbFromHexName("#cccccc");
|
2010-04-08 00:14:08 +00:00
|
|
|
params().boxbgcolor = lyx::rgbFromHexName("#ff0000");
|
2010-05-27 15:00:33 +00:00
|
|
|
params().html_latex_start.clear();
|
|
|
|
params().html_latex_end.clear();
|
|
|
|
params().html_math_img_scale = 1.0;
|
2010-05-25 11:36:00 +00:00
|
|
|
params().output_sync_macro.erase();
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-03-12 15:15:21 +00:00
|
|
|
for (int i = 0; i < 4; ++i) {
|
|
|
|
params().user_defined_bullet(i) = ITEMIZE_DEFAULTS[i];
|
|
|
|
params().temp_bullet(i) = ITEMIZE_DEFAULTS[i];
|
|
|
|
}
|
2005-03-27 13:31:04 +00:00
|
|
|
|
2007-11-30 17:46:49 +00:00
|
|
|
ErrorList & errorList = d->errorLists["Parse"];
|
2006-08-13 16:16:43 +00:00
|
|
|
|
2003-03-12 02:39:12 +00:00
|
|
|
while (lex.isOK()) {
|
2008-04-05 12:23:27 +00:00
|
|
|
string token;
|
|
|
|
lex >> token;
|
2003-03-12 02:39:12 +00:00
|
|
|
|
|
|
|
if (token.empty())
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (token == "\\end_header")
|
|
|
|
break;
|
|
|
|
|
2004-08-14 18:41:27 +00:00
|
|
|
++line;
|
|
|
|
if (token == "\\begin_header") {
|
|
|
|
begin_header_line = line;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::PARSER, "Handling document header token: `"
|
|
|
|
<< token << '\'');
|
2003-03-12 02:39:12 +00:00
|
|
|
|
2008-04-20 03:08:11 +00:00
|
|
|
string unknown = params().readToken(lex, token, d->filename.onlyPath());
|
2003-03-12 02:39:12 +00:00
|
|
|
if (!unknown.empty()) {
|
2005-01-19 15:26:41 +00:00
|
|
|
if (unknown[0] != '\\' && token == "\\textclass") {
|
2007-10-20 10:03:45 +00:00
|
|
|
Alert::warning(_("Unknown document class"),
|
|
|
|
bformat(_("Using the default document class, because the "
|
|
|
|
"class %1$s is unknown."), from_utf8(unknown)));
|
2003-03-12 02:39:12 +00:00
|
|
|
} else {
|
2003-03-29 10:29:38 +00:00
|
|
|
++unknown_tokens;
|
2006-09-11 08:54:10 +00:00
|
|
|
docstring const s = bformat(_("Unknown token: "
|
|
|
|
"%1$s %2$s\n"),
|
2006-10-21 00:16:43 +00:00
|
|
|
from_utf8(token),
|
2006-11-22 09:15:38 +00:00
|
|
|
lex.getDocString());
|
2006-09-11 08:54:10 +00:00
|
|
|
errorList.push_back(ErrorItem(_("Document header error"),
|
2006-07-15 22:43:37 +00:00
|
|
|
s, -1, 0, 0));
|
2003-03-12 02:39:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2004-08-14 18:41:27 +00:00
|
|
|
if (begin_header_line) {
|
2006-09-11 08:54:10 +00:00
|
|
|
docstring const s = _("\\begin_header is missing");
|
|
|
|
errorList.push_back(ErrorItem(_("Document header error"),
|
2006-07-15 22:43:37 +00:00
|
|
|
s, -1, 0, 0));
|
2004-08-14 18:41:27 +00:00
|
|
|
}
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2008-02-28 01:42:02 +00:00
|
|
|
params().makeDocumentClass();
|
2006-07-15 22:43:37 +00:00
|
|
|
|
2003-03-12 02:39:12 +00:00
|
|
|
return unknown_tokens;
|
|
|
|
}
|
|
|
|
|
2001-08-30 07:13:15 +00:00
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
// Uwe C. Schroeder
|
|
|
|
// changed to be public and have one parameter
|
2009-02-04 09:52:10 +00:00
|
|
|
// Returns true if "\end_document" is not read (Asger)
|
2007-04-26 11:30:54 +00:00
|
|
|
bool Buffer::readDocument(Lexer & lex)
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
ErrorList & errorList = d->errorLists["Parse"];
|
2006-08-13 16:16:43 +00:00
|
|
|
errorList.clear();
|
2006-07-15 22:43:37 +00:00
|
|
|
|
2010-10-25 13:04:13 +00:00
|
|
|
// remove dummy empty par
|
|
|
|
paragraphs().clear();
|
|
|
|
|
2008-04-05 12:23:27 +00:00
|
|
|
if (!lex.checkFor("\\begin_document")) {
|
2006-09-11 08:54:10 +00:00
|
|
|
docstring const s = _("\\begin_document is missing");
|
|
|
|
errorList.push_back(ErrorItem(_("Document header error"),
|
2006-07-15 22:43:37 +00:00
|
|
|
s, -1, 0, 0));
|
2004-08-14 18:41:27 +00:00
|
|
|
}
|
|
|
|
|
2005-11-29 15:08:35 +00:00
|
|
|
readHeader(lex);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2007-05-08 17:46:03 +00:00
|
|
|
if (params().outputChanges) {
|
2007-05-13 15:17:57 +00:00
|
|
|
bool dvipost = LaTeXFeatures::isAvailable("dvipost");
|
2009-01-12 08:01:28 +00:00
|
|
|
bool xcolorulem = LaTeXFeatures::isAvailable("ulem") &&
|
2007-05-28 22:27:45 +00:00
|
|
|
LaTeXFeatures::isAvailable("xcolor");
|
|
|
|
|
2009-01-12 08:01:28 +00:00
|
|
|
if (!dvipost && !xcolorulem) {
|
2007-05-08 17:46:03 +00:00
|
|
|
Alert::warning(_("Changes not shown in LaTeX output"),
|
2007-05-28 22:27:45 +00:00
|
|
|
_("Changes will not be highlighted in LaTeX output, "
|
2009-01-12 08:01:28 +00:00
|
|
|
"because neither dvipost nor xcolor/ulem are installed.\n"
|
2007-05-28 22:27:45 +00:00
|
|
|
"Please install these packages or redefine "
|
|
|
|
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
|
2009-01-12 08:01:28 +00:00
|
|
|
} else if (!xcolorulem) {
|
2007-05-08 17:46:03 +00:00
|
|
|
Alert::warning(_("Changes not shown in LaTeX output"),
|
2007-05-28 22:27:45 +00:00
|
|
|
_("Changes will not be highlighted in LaTeX output "
|
2009-01-12 08:01:28 +00:00
|
|
|
"when using pdflatex, because xcolor and ulem are not installed.\n"
|
2007-05-28 22:27:45 +00:00
|
|
|
"Please install both packages or redefine "
|
|
|
|
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
|
2007-05-08 17:46:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-04-28 16:38:56 +00:00
|
|
|
if (!params().master.empty()) {
|
|
|
|
FileName const master_file = makeAbsPath(params().master,
|
|
|
|
onlyPath(absFileName()));
|
2010-04-21 01:19:31 +00:00
|
|
|
if (isLyXFileName(master_file.absFileName())) {
|
2009-01-07 07:53:28 +00:00
|
|
|
Buffer * master =
|
|
|
|
checkAndLoadLyXFile(master_file, true);
|
2009-02-14 14:21:59 +00:00
|
|
|
if (master) {
|
2009-04-03 11:07:53 +00:00
|
|
|
// necessary e.g. after a reload
|
|
|
|
// to re-register the child (bug 5873)
|
|
|
|
// FIXME: clean up updateMacros (here, only
|
|
|
|
// child registering is needed).
|
|
|
|
master->updateMacros();
|
2009-02-14 14:21:59 +00:00
|
|
|
// set master as master buffer, but only
|
|
|
|
// if we are a real child
|
|
|
|
if (master->isChild(this))
|
|
|
|
setParent(master);
|
|
|
|
// if the master is not fully loaded
|
|
|
|
// it is probably just loading this
|
|
|
|
// child. No warning needed then.
|
|
|
|
else if (master->isFullyLoaded())
|
|
|
|
LYXERR0("The master '"
|
|
|
|
<< params().master
|
2009-04-03 11:07:53 +00:00
|
|
|
<< "' assigned to this document ("
|
2009-02-14 14:21:59 +00:00
|
|
|
<< absFileName()
|
2009-04-03 11:07:53 +00:00
|
|
|
<< ") does not include "
|
2009-02-14 14:21:59 +00:00
|
|
|
"this document. Ignoring the master assignment.");
|
|
|
|
}
|
2008-04-28 16:38:56 +00:00
|
|
|
}
|
|
|
|
}
|
2009-08-17 08:28:57 +00:00
|
|
|
|
|
|
|
// assure we have a default index
|
|
|
|
params().indiceslist().addDefault(B_("Index"));
|
2008-04-28 16:38:56 +00:00
|
|
|
|
2007-09-11 14:23:12 +00:00
|
|
|
// read main text
|
2009-08-09 15:29:34 +00:00
|
|
|
bool const res = text().read(lex, errorList, d->inset);
|
2006-07-15 22:43:37 +00:00
|
|
|
|
2009-11-08 18:47:33 +00:00
|
|
|
usermacros.clear();
|
2007-12-21 20:42:46 +00:00
|
|
|
updateMacros();
|
|
|
|
updateMacroInstances();
|
2005-11-29 15:08:35 +00:00
|
|
|
return res;
|
2000-03-01 14:13:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
bool Buffer::readString(string const & s)
|
2007-01-13 18:29:50 +00:00
|
|
|
{
|
|
|
|
params().compressed = false;
|
|
|
|
|
2008-04-02 23:06:22 +00:00
|
|
|
Lexer lex;
|
2007-12-12 19:28:07 +00:00
|
|
|
istringstream is(s);
|
2007-01-13 18:29:50 +00:00
|
|
|
lex.setStream(is);
|
2010-10-25 14:14:44 +00:00
|
|
|
FileName const fn = FileName::tempName("Buffer_readString");
|
2010-10-25 12:18:09 +00:00
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
int file_format;
|
|
|
|
ReadStatus const ret_plf = parseLyXFormat(lex, fn, file_format);
|
|
|
|
if (ret_plf != ReadSuccess)
|
|
|
|
return ret_plf;
|
|
|
|
|
|
|
|
if (file_format != LYX_FORMAT) {
|
2007-01-13 18:29:50 +00:00
|
|
|
// We need to call lyx2lyx, so write the input to a file
|
2010-10-25 14:14:44 +00:00
|
|
|
ofstream os(fn.toFilesystemEncoding().c_str());
|
2007-01-13 18:29:50 +00:00
|
|
|
os << s;
|
|
|
|
os.close();
|
2010-10-25 14:14:44 +00:00
|
|
|
// lyxvc in readFile
|
|
|
|
return readFile(fn) == ReadSuccess;
|
2007-01-13 18:29:50 +00:00
|
|
|
}
|
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
if (readDocument(lex))
|
|
|
|
return false;
|
2007-01-13 18:29:50 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-10-25 13:04:13 +00:00
|
|
|
Buffer::ReadStatus Buffer::readFile(FileName const & fn)
|
2003-03-12 05:46:35 +00:00
|
|
|
{
|
2010-10-25 13:04:13 +00:00
|
|
|
FileName fname(fn);
|
2008-04-02 23:06:22 +00:00
|
|
|
Lexer lex;
|
2007-08-30 20:46:42 +00:00
|
|
|
lex.setFile(fname);
|
2003-04-24 23:19:41 +00:00
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
int file_format;
|
|
|
|
ReadStatus const ret_plf = parseLyXFormat(lex, fn, file_format);
|
|
|
|
if (ret_plf != ReadSuccess)
|
|
|
|
return ret_plf;
|
|
|
|
|
|
|
|
if (file_format != LYX_FORMAT) {
|
|
|
|
FileName tmpFile;
|
|
|
|
ReadStatus const ret_clf = convertLyXFormat(fn, tmpFile, file_format);
|
|
|
|
if (ret_clf != ReadSuccess)
|
|
|
|
return ret_clf;
|
|
|
|
return readFile(tmpFile);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (readDocument(lex)) {
|
|
|
|
Alert::error(_("Document format failure"),
|
|
|
|
bformat(_("%1$s ended unexpectedly, which means"
|
|
|
|
" that it is probably corrupted."),
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
return ReadDocumentFailure;
|
|
|
|
}
|
2010-10-25 13:04:13 +00:00
|
|
|
|
2010-10-25 14:14:44 +00:00
|
|
|
d->file_fully_loaded = true;
|
2010-10-25 13:04:13 +00:00
|
|
|
// InsetInfo needs to know if file is under VCS
|
|
|
|
lyxvc().file_found_hook(fn);
|
2010-10-25 11:57:56 +00:00
|
|
|
d->read_only = !fname.isWritable();
|
2010-10-25 13:04:13 +00:00
|
|
|
params().compressed = fname.isZippedFile();
|
|
|
|
return ReadSuccess;
|
2003-03-12 05:46:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
bool Buffer::isFullyLoaded() const
|
2003-10-22 13:15:18 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->file_fully_loaded;
|
2003-10-22 13:15:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
void Buffer::setFullyLoaded(bool value)
|
2003-10-22 14:40:24 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
d->file_fully_loaded = value;
|
2003-10-22 14:40:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-10-25 13:29:50 +00:00
|
|
|
Buffer::ReadStatus Buffer::parseLyXFormat(Lexer & lex,
|
|
|
|
FileName const & fn, int & file_format) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2010-10-25 13:29:50 +00:00
|
|
|
if(!lex.checkFor("\\lyxformat")) {
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::error(_("Document format failure"),
|
2010-10-25 13:29:50 +00:00
|
|
|
bformat(_("%1$s is not a readable LyX document."),
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
return ReadNoLyXFormat;
|
|
|
|
}
|
2003-03-29 11:34:53 +00:00
|
|
|
|
2008-04-05 12:23:27 +00:00
|
|
|
string tmp_format;
|
|
|
|
lex >> tmp_format;
|
2010-10-25 13:29:50 +00:00
|
|
|
|
|
|
|
// LyX formats 217 and earlier were written as 2.17. This corresponds
|
|
|
|
// to files from LyX versions < 1.1.6.3. We just remove the dot in
|
|
|
|
// these cases. See also: www.lyx.org/trac/changeset/1313.
|
2008-04-05 12:23:27 +00:00
|
|
|
size_t dot = tmp_format.find_first_of(".,");
|
2003-03-29 11:34:53 +00:00
|
|
|
if (dot != string::npos)
|
2010-10-25 13:29:50 +00:00
|
|
|
tmp_format.erase(dot, 1);
|
|
|
|
|
|
|
|
file_format = convert<int>(tmp_format);
|
|
|
|
return ReadSuccess;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-10-25 13:55:39 +00:00
|
|
|
Buffer::ReadStatus Buffer::convertLyXFormat(FileName const & fn,
|
|
|
|
FileName & tmpfile, int from_format)
|
|
|
|
{
|
|
|
|
tmpfile = FileName::tempName("Buffer_convertLyXFormat");
|
|
|
|
if(tmpfile.empty()) {
|
|
|
|
Alert::error(_("Conversion failed"),
|
|
|
|
bformat(_("%1$s is from a different"
|
|
|
|
" version of LyX, but a temporary"
|
|
|
|
" file for converting it could"
|
|
|
|
" not be created."),
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
return LyX2LyXNoTempFile;
|
|
|
|
}
|
|
|
|
|
|
|
|
FileName const lyx2lyx = libFileSearch("lyx2lyx", "lyx2lyx");
|
|
|
|
if (lyx2lyx.empty()) {
|
|
|
|
Alert::error(_("Conversion script not found"),
|
|
|
|
bformat(_("%1$s is from a different"
|
|
|
|
" version of LyX, but the"
|
|
|
|
" conversion script lyx2lyx"
|
|
|
|
" could not be found."),
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
return LyX2LyXNotFound;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Run lyx2lyx:
|
|
|
|
// $python$ "$lyx2lyx$" -t $LYX_FORMAT$ -o "$tempfile$" "$filetoread$"
|
|
|
|
ostringstream command;
|
|
|
|
command << os::python()
|
|
|
|
<< ' ' << quoteName(lyx2lyx.toFilesystemEncoding())
|
|
|
|
<< " -t " << convert<string>(LYX_FORMAT)
|
|
|
|
<< " -o " << quoteName(tmpfile.toFilesystemEncoding())
|
|
|
|
<< ' ' << quoteName(fn.toSafeFilesystemEncoding());
|
|
|
|
string const command_str = command.str();
|
|
|
|
|
|
|
|
LYXERR(Debug::INFO, "Running '" << command_str << '\'');
|
|
|
|
|
|
|
|
cmd_ret const ret = runCommand(command_str);
|
|
|
|
if (ret.first != 0) {
|
|
|
|
if (from_format < LYX_FORMAT) {
|
|
|
|
Alert::error(_("Conversion script failed"),
|
|
|
|
bformat(_("%1$s is from an older version"
|
|
|
|
" of LyX, but the lyx2lyx script"
|
|
|
|
" failed to convert it."),
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
return LyX2LyXOlderFormat;
|
|
|
|
} else {
|
|
|
|
Alert::error(_("Conversion script failed"),
|
|
|
|
bformat(_("%1$s is from an newer version"
|
|
|
|
" of LyX, but the lyx2lyx script"
|
|
|
|
" failed to convert it."),
|
|
|
|
from_utf8(fn.absFileName())));
|
|
|
|
return LyX2LyXNewerFormat;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ReadSuccess;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-02-09 17:06:40 +00:00
|
|
|
// Should probably be moved to somewhere else: BufferView? GuiView?
|
2000-03-20 14:49:54 +00:00
|
|
|
bool Buffer::save() const
|
2000-02-22 00:36:17 +00:00
|
|
|
{
|
2010-03-26 23:15:37 +00:00
|
|
|
// ask if the disk file has been externally modified (use checksum method)
|
|
|
|
if (fileName().exists() && isExternallyModified(checksum_method)) {
|
|
|
|
docstring const file = makeDisplayPath(absFileName(), 20);
|
|
|
|
docstring text = bformat(_("Document %1$s has been externally modified. Are you sure "
|
|
|
|
"you want to overwrite this file?"), file);
|
|
|
|
int const ret = Alert::prompt(_("Overwrite modified file?"),
|
|
|
|
text, 1, 1, _("&Overwrite"), _("&Cancel"));
|
|
|
|
if (ret == 1)
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2000-02-22 00:36:17 +00:00
|
|
|
// We don't need autosaves in the immediate future. (Asger)
|
|
|
|
resetAutosaveTimers();
|
|
|
|
|
2007-01-14 17:31:15 +00:00
|
|
|
FileName backupName;
|
|
|
|
bool madeBackup = false;
|
|
|
|
|
2006-11-30 15:21:23 +00:00
|
|
|
// make a backup if the file already exists
|
2007-11-03 18:30:05 +00:00
|
|
|
if (lyxrc.make_backup && fileName().exists()) {
|
2007-10-20 10:03:45 +00:00
|
|
|
backupName = FileName(absFileName() + '~');
|
2007-10-21 17:57:13 +00:00
|
|
|
if (!lyxrc.backupdir_path.empty()) {
|
|
|
|
string const mangledName =
|
2010-04-21 01:19:09 +00:00
|
|
|
subst(subst(backupName.absFileName(), '/', '!'), ':', '!');
|
2007-01-14 17:31:15 +00:00
|
|
|
backupName = FileName(addName(lyxrc.backupdir_path,
|
2007-10-21 17:57:13 +00:00
|
|
|
mangledName));
|
|
|
|
}
|
2010-05-18 09:45:25 +00:00
|
|
|
// do not copy because of #6587
|
2010-03-26 23:15:37 +00:00
|
|
|
if (fileName().moveTo(backupName)) {
|
2007-01-14 17:31:15 +00:00
|
|
|
madeBackup = true;
|
2007-11-07 19:52:11 +00:00
|
|
|
} else {
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::error(_("Backup failure"),
|
2007-05-28 22:27:45 +00:00
|
|
|
bformat(_("Cannot create backup file %1$s.\n"
|
|
|
|
"Please check whether the directory exists and is writeable."),
|
2010-04-21 01:19:09 +00:00
|
|
|
from_utf8(backupName.absFileName())));
|
2007-11-15 20:04:51 +00:00
|
|
|
//LYXERR(Debug::DEBUG, "Fs error: " << fe.what());
|
2000-02-22 00:36:17 +00:00
|
|
|
}
|
|
|
|
}
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2007-11-30 17:46:49 +00:00
|
|
|
if (writeFile(d->filename)) {
|
2002-08-04 23:11:50 +00:00
|
|
|
markClean();
|
2007-01-14 17:31:15 +00:00
|
|
|
return true;
|
2000-02-22 00:36:17 +00:00
|
|
|
} else {
|
|
|
|
// Saving failed, so backup is not backup
|
2007-01-14 17:31:15 +00:00
|
|
|
if (madeBackup)
|
2007-12-14 18:16:25 +00:00
|
|
|
backupName.moveTo(d->filename);
|
2000-02-22 00:36:17 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-11-30 16:59:50 +00:00
|
|
|
bool Buffer::writeFile(FileName const & fname) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
if (d->read_only && fname == d->filename)
|
1999-09-27 18:44:28 +00:00
|
|
|
return false;
|
|
|
|
|
2004-07-25 00:04:42 +00:00
|
|
|
bool retval = false;
|
2003-07-27 23:40:08 +00:00
|
|
|
|
2007-12-04 22:21:25 +00:00
|
|
|
docstring const str = bformat(_("Saving document %1$s..."),
|
2010-04-21 01:19:09 +00:00
|
|
|
makeDisplayPath(fname.absFileName()));
|
2007-12-04 22:21:25 +00:00
|
|
|
message(str);
|
|
|
|
|
2010-02-22 19:34:25 +00:00
|
|
|
string const encoded_fname = fname.toSafeFilesystemEncoding(os::CREATE);
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
if (params().compressed) {
|
2010-02-22 19:34:25 +00:00
|
|
|
gz::ogzstream ofs(encoded_fname.c_str(), ios::out|ios::trunc);
|
2007-12-04 22:21:25 +00:00
|
|
|
retval = ofs && write(ofs);
|
2003-07-28 14:40:29 +00:00
|
|
|
} else {
|
2010-02-22 19:34:25 +00:00
|
|
|
ofstream ofs(encoded_fname.c_str(), ios::out|ios::trunc);
|
2007-12-04 22:21:25 +00:00
|
|
|
retval = ofs && write(ofs);
|
|
|
|
}
|
2003-07-27 23:40:08 +00:00
|
|
|
|
2007-12-04 22:21:25 +00:00
|
|
|
if (!retval) {
|
2008-03-16 13:05:42 +00:00
|
|
|
message(str + _(" could not write file!"));
|
2007-12-04 22:21:25 +00:00
|
|
|
return false;
|
2003-07-28 14:40:29 +00:00
|
|
|
}
|
2000-09-14 17:53:12 +00:00
|
|
|
|
2010-03-26 23:15:37 +00:00
|
|
|
// see bug 6587
|
|
|
|
// removeAutosaveFile();
|
2008-01-06 16:21:57 +00:00
|
|
|
|
2007-12-04 22:21:25 +00:00
|
|
|
saveCheckSum(d->filename);
|
|
|
|
message(str + _(" done."));
|
|
|
|
|
|
|
|
return true;
|
2003-07-27 23:40:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-08-15 16:34:54 +00:00
|
|
|
docstring Buffer::emergencyWrite()
|
|
|
|
{
|
|
|
|
// No need to save if the buffer has not changed.
|
|
|
|
if (isClean())
|
|
|
|
return docstring();
|
|
|
|
|
2010-04-21 01:19:28 +00:00
|
|
|
string const doc = isUnnamed() ? onlyFileName(absFileName()) : absFileName();
|
2009-08-15 16:34:54 +00:00
|
|
|
|
|
|
|
docstring user_message = bformat(
|
|
|
|
_("LyX: Attempting to save document %1$s\n"), from_utf8(doc));
|
|
|
|
|
|
|
|
// We try to save three places:
|
|
|
|
// 1) Same place as document. Unless it is an unnamed doc.
|
|
|
|
if (!isUnnamed()) {
|
|
|
|
string s = absFileName();
|
|
|
|
s += ".emergency";
|
|
|
|
LYXERR0(" " << s);
|
|
|
|
if (writeFile(FileName(s))) {
|
|
|
|
markClean();
|
2009-08-15 20:47:46 +00:00
|
|
|
user_message += bformat(_(" Saved to %1$s. Phew.\n"), from_utf8(s));
|
2009-08-15 16:34:54 +00:00
|
|
|
return user_message;
|
|
|
|
} else {
|
|
|
|
user_message += _(" Save failed! Trying again...\n");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// 2) In HOME directory.
|
2010-04-21 01:19:09 +00:00
|
|
|
string s = addName(package().home_dir().absFileName(), absFileName());
|
2009-08-15 16:34:54 +00:00
|
|
|
s += ".emergency";
|
|
|
|
lyxerr << ' ' << s << endl;
|
|
|
|
if (writeFile(FileName(s))) {
|
|
|
|
markClean();
|
2009-08-15 20:47:46 +00:00
|
|
|
user_message += bformat(_(" Saved to %1$s. Phew.\n"), from_utf8(s));
|
2009-08-15 16:34:54 +00:00
|
|
|
return user_message;
|
|
|
|
}
|
|
|
|
|
|
|
|
user_message += _(" Save failed! Trying yet again...\n");
|
|
|
|
|
|
|
|
// 3) In "/tmp" directory.
|
|
|
|
// MakeAbsPath to prepend the current
|
|
|
|
// drive letter on OS/2
|
2010-04-21 01:19:09 +00:00
|
|
|
s = addName(package().temp_dir().absFileName(), absFileName());
|
2009-08-15 16:34:54 +00:00
|
|
|
s += ".emergency";
|
|
|
|
lyxerr << ' ' << s << endl;
|
|
|
|
if (writeFile(FileName(s))) {
|
|
|
|
markClean();
|
2009-08-15 20:47:46 +00:00
|
|
|
user_message += bformat(_(" Saved to %1$s. Phew.\n"), from_utf8(s));
|
2009-08-15 16:34:54 +00:00
|
|
|
return user_message;
|
|
|
|
}
|
|
|
|
|
|
|
|
user_message += _(" Save failed! Bummer. Document is lost.");
|
|
|
|
// Don't try again.
|
|
|
|
markClean();
|
|
|
|
return user_message;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-01-13 18:29:50 +00:00
|
|
|
bool Buffer::write(ostream & ofs) const
|
2003-07-27 23:40:08 +00:00
|
|
|
{
|
2000-09-14 17:53:12 +00:00
|
|
|
#ifdef HAVE_LOCALE
|
|
|
|
// Use the standard "C" locale for file output.
|
2007-12-12 19:28:07 +00:00
|
|
|
ofs.imbue(locale::classic());
|
2000-09-14 17:53:12 +00:00
|
|
|
#endif
|
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
// The top of the file should not be written by params().
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
// write out a comment in the top of the file
|
2003-07-28 14:09:05 +00:00
|
|
|
ofs << "#LyX " << lyx_version
|
2001-02-14 19:22:41 +00:00
|
|
|
<< " created this file. For more info see http://www.lyx.org/\n"
|
2004-08-14 18:41:27 +00:00
|
|
|
<< "\\lyxformat " << LYX_FORMAT << "\n"
|
|
|
|
<< "\\begin_document\n";
|
2001-02-14 19:22:41 +00:00
|
|
|
|
2007-07-09 20:52:34 +00:00
|
|
|
/// For each author, set 'used' to true if there is a change
|
|
|
|
/// by this author in the document; otherwise set it to 'false'.
|
|
|
|
AuthorList::Authors::const_iterator a_it = params().authors().begin();
|
|
|
|
AuthorList::Authors::const_iterator a_end = params().authors().end();
|
|
|
|
for (; a_it != a_end; ++a_it)
|
2009-07-23 20:08:05 +00:00
|
|
|
a_it->setUsed(false);
|
2007-07-09 20:52:34 +00:00
|
|
|
|
2008-01-12 21:38:51 +00:00
|
|
|
ParIterator const end = const_cast<Buffer *>(this)->par_iterator_end();
|
|
|
|
ParIterator it = const_cast<Buffer *>(this)->par_iterator_begin();
|
2007-07-09 20:52:34 +00:00
|
|
|
for ( ; it != end; ++it)
|
|
|
|
it->checkAuthors(params().authors());
|
|
|
|
|
2003-12-02 12:39:14 +00:00
|
|
|
// now write out the buffer parameters.
|
2004-08-16 11:27:51 +00:00
|
|
|
ofs << "\\begin_header\n";
|
2003-09-09 09:47:59 +00:00
|
|
|
params().writeFile(ofs);
|
2003-03-12 02:39:12 +00:00
|
|
|
ofs << "\\end_header\n";
|
2004-10-05 10:11:42 +00:00
|
|
|
|
2003-12-02 12:39:14 +00:00
|
|
|
// write the text
|
2004-08-14 18:41:27 +00:00
|
|
|
ofs << "\n\\begin_body\n";
|
2009-08-09 15:29:34 +00:00
|
|
|
text().write(ofs);
|
2004-08-14 18:41:27 +00:00
|
|
|
ofs << "\n\\end_body\n";
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
// Write marker that shows file is complete
|
2004-08-14 18:41:27 +00:00
|
|
|
ofs << "\\end_document" << endl;
|
2000-10-13 12:20:38 +00:00
|
|
|
|
2003-07-27 23:40:08 +00:00
|
|
|
// Shouldn't really be needed....
|
|
|
|
//ofs.close();
|
2000-10-13 12:20:38 +00:00
|
|
|
|
1999-12-07 00:44:53 +00:00
|
|
|
// how to check if close went ok?
|
2000-10-13 12:20:38 +00:00
|
|
|
// Following is an attempt... (BE 20001011)
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2000-10-13 12:20:38 +00:00
|
|
|
// good() returns false if any error occured, including some
|
|
|
|
// formatting error.
|
|
|
|
// bad() returns true if something bad happened in the buffer,
|
|
|
|
// which should include file system full errors.
|
|
|
|
|
|
|
|
bool status = true;
|
2004-02-11 14:45:44 +00:00
|
|
|
if (!ofs) {
|
2000-10-13 12:20:38 +00:00
|
|
|
status = false;
|
2004-02-11 14:45:44 +00:00
|
|
|
lyxerr << "File was not closed properly." << endl;
|
2000-10-13 12:20:38 +00:00
|
|
|
}
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2000-10-13 12:20:38 +00:00
|
|
|
return status;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-12-04 15:46:57 +00:00
|
|
|
bool Buffer::makeLaTeXFile(FileName const & fname,
|
2000-03-06 02:42:40 +00:00
|
|
|
string const & original_path,
|
2009-04-06 06:58:30 +00:00
|
|
|
OutputParams const & runparams_in,
|
2007-11-30 17:41:27 +00:00
|
|
|
bool output_preamble, bool output_body) const
|
2000-03-06 02:42:40 +00:00
|
|
|
{
|
2009-04-06 06:58:30 +00:00
|
|
|
OutputParams runparams = runparams_in;
|
|
|
|
if (params().useXetex)
|
|
|
|
runparams.flavor = OutputParams::XETEX;
|
|
|
|
|
2007-03-25 16:31:16 +00:00
|
|
|
string const encoding = runparams.encoding->iconvName();
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::LATEX, "makeLaTeXFile encoding: " << encoding << "...");
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2008-11-16 12:21:29 +00:00
|
|
|
ofdocstream ofs;
|
2007-12-05 13:56:53 +00:00
|
|
|
try { ofs.reset(encoding); }
|
|
|
|
catch (iconv_codecvt_facet_exception & e) {
|
|
|
|
lyxerr << "Caught iconv exception: " << e.what() << endl;
|
|
|
|
Alert::error(_("Iconv software exception Detected"), bformat(_("Please "
|
|
|
|
"verify that the support software for your encoding (%1$s) is "
|
|
|
|
"properly installed"), from_ascii(encoding)));
|
|
|
|
return false;
|
|
|
|
}
|
2003-11-05 12:06:20 +00:00
|
|
|
if (!openFileWrite(ofs, fname))
|
2006-10-26 15:01:45 +00:00
|
|
|
return false;
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2007-08-12 14:54:54 +00:00
|
|
|
//TexStream ts(ofs.rdbuf(), &texrow());
|
2007-12-18 17:51:20 +00:00
|
|
|
ErrorList & errorList = d->errorLists["Export"];
|
|
|
|
errorList.clear();
|
2007-05-16 10:39:41 +00:00
|
|
|
bool failed_export = false;
|
2006-10-26 15:01:45 +00:00
|
|
|
try {
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.reset();
|
2006-10-26 15:01:45 +00:00
|
|
|
writeLaTeXSource(ofs, original_path,
|
2003-07-26 21:37:10 +00:00
|
|
|
runparams, output_preamble, output_body);
|
2006-10-26 15:01:45 +00:00
|
|
|
}
|
2007-12-18 17:51:20 +00:00
|
|
|
catch (EncodingException & e) {
|
2007-12-24 13:55:01 +00:00
|
|
|
odocstringstream ods;
|
|
|
|
ods.put(e.failed_char);
|
|
|
|
ostringstream oss;
|
|
|
|
oss << "0x" << hex << e.failed_char << dec;
|
|
|
|
docstring msg = bformat(_("Could not find LaTeX command for character '%1$s'"
|
|
|
|
" (code point %2$s)"),
|
|
|
|
ods.str(), from_utf8(oss.str()));
|
2007-12-18 17:51:20 +00:00
|
|
|
errorList.push_back(ErrorItem(msg, _("Some characters of your document are probably not "
|
|
|
|
"representable in the chosen encoding.\n"
|
|
|
|
"Changing the document encoding to utf8 could help."),
|
|
|
|
e.par_id, e.pos, e.pos + 1));
|
2008-08-01 17:57:01 +00:00
|
|
|
failed_export = true;
|
2007-12-18 17:51:20 +00:00
|
|
|
}
|
2007-05-16 10:39:41 +00:00
|
|
|
catch (iconv_codecvt_facet_exception & e) {
|
2007-12-18 17:51:20 +00:00
|
|
|
errorList.push_back(ErrorItem(_("iconv conversion failed"),
|
|
|
|
_(e.what()), -1, 0, 0));
|
2007-05-16 10:39:41 +00:00
|
|
|
failed_export = true;
|
|
|
|
}
|
2007-12-12 19:28:07 +00:00
|
|
|
catch (exception const & e) {
|
2007-12-18 17:51:20 +00:00
|
|
|
errorList.push_back(ErrorItem(_("conversion failed"),
|
|
|
|
_(e.what()), -1, 0, 0));
|
2007-05-16 10:39:41 +00:00
|
|
|
failed_export = true;
|
|
|
|
}
|
|
|
|
catch (...) {
|
|
|
|
lyxerr << "Caught some really weird exception..." << endl;
|
2008-07-14 08:35:00 +00:00
|
|
|
lyx_exit(1);
|
2006-10-26 15:01:45 +00:00
|
|
|
}
|
2002-07-05 19:21:29 +00:00
|
|
|
|
|
|
|
ofs.close();
|
2006-10-26 15:01:45 +00:00
|
|
|
if (ofs.fail()) {
|
2007-05-16 10:39:41 +00:00
|
|
|
failed_export = true;
|
2004-02-11 14:45:44 +00:00
|
|
|
lyxerr << "File '" << fname << "' was not closed properly." << endl;
|
2007-05-16 10:39:41 +00:00
|
|
|
}
|
|
|
|
|
2007-12-18 17:51:20 +00:00
|
|
|
errors("Export");
|
|
|
|
return !failed_export;
|
2002-07-05 19:21:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-10-19 16:51:30 +00:00
|
|
|
void Buffer::writeLaTeXSource(odocstream & os,
|
2002-07-05 19:21:29 +00:00
|
|
|
string const & original_path,
|
2003-11-05 12:06:20 +00:00
|
|
|
OutputParams const & runparams_in,
|
2007-11-30 17:41:27 +00:00
|
|
|
bool const output_preamble, bool const output_body) const
|
2002-07-05 19:21:29 +00:00
|
|
|
{
|
2008-03-06 18:13:04 +00:00
|
|
|
// The child documents, if any, shall be already loaded at this point.
|
|
|
|
|
2003-11-05 12:06:20 +00:00
|
|
|
OutputParams runparams = runparams_in;
|
2002-03-07 16:03:36 +00:00
|
|
|
|
2008-09-26 15:53:15 +00:00
|
|
|
// Classify the unicode characters appearing in math insets
|
|
|
|
Encodings::initUnicodeMath(*this);
|
|
|
|
|
2000-03-06 02:42:40 +00:00
|
|
|
// validate the buffer.
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::LATEX, " Validating buffer...");
|
2006-03-28 18:49:46 +00:00
|
|
|
LaTeXFeatures features(*this, params(), runparams);
|
2000-03-06 02:42:40 +00:00
|
|
|
validate(features);
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::LATEX, " Buffer validation done.");
|
2002-03-21 16:55:34 +00:00
|
|
|
|
|
|
|
// The starting paragraph of the coming rows is the
|
2000-03-06 02:42:40 +00:00
|
|
|
// first paragraph of the document. (Asger)
|
2003-07-26 21:37:10 +00:00
|
|
|
if (output_preamble && runparams.nice) {
|
2003-07-28 14:09:05 +00:00
|
|
|
os << "%% LyX " << lyx_version << " created this file. "
|
2000-03-06 02:42:40 +00:00
|
|
|
"For more info, see http://www.lyx.org/.\n"
|
|
|
|
"%% Do not edit unless you really know what "
|
|
|
|
"you are doing.\n";
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.newline();
|
|
|
|
d->texrow.newline();
|
2000-03-06 02:42:40 +00:00
|
|
|
}
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::INFO, "lyx document header finished");
|
2008-03-26 12:55:36 +00:00
|
|
|
|
|
|
|
// Don't move this behind the parent_buffer=0 code below,
|
|
|
|
// because then the macros will not get the right "redefinition"
|
|
|
|
// flag as they don't see the parent macros which are output before.
|
|
|
|
updateMacros();
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
// fold macros if possible, still with parent buffer as the
|
|
|
|
// macros will be put in the prefix anyway.
|
|
|
|
updateMacroInstances();
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2000-03-06 02:42:40 +00:00
|
|
|
// There are a few differences between nice LaTeX and usual files:
|
2002-03-21 16:55:34 +00:00
|
|
|
// usual is \batchmode and has a
|
2000-03-06 02:42:40 +00:00
|
|
|
// special input@path to allow the including of figures
|
|
|
|
// with either \input or \includegraphics (what figinsets do).
|
2002-08-09 00:42:12 +00:00
|
|
|
// input@path is set when the actual parameter
|
2000-03-06 02:42:40 +00:00
|
|
|
// original_path is set. This is done for usual tex-file, but not
|
|
|
|
// for nice-latex-file. (Matthias 250696)
|
2004-03-25 10:12:44 +00:00
|
|
|
// Note that input@path is only needed for something the user does
|
|
|
|
// in the preamble, included .tex files or ERT, files included by
|
|
|
|
// LyX work without it.
|
2003-07-26 21:37:10 +00:00
|
|
|
if (output_preamble) {
|
2003-05-22 21:10:22 +00:00
|
|
|
if (!runparams.nice) {
|
2000-03-06 02:42:40 +00:00
|
|
|
// code for usual, NOT nice-latex-file
|
Revert qprocess code. Revisions reverted: 22026, 22030, 22044, 22048,
22077, 22078, 22079, 22080, 22081.
Sorry Peter, but this was breaking too many things, see discussions
on lyx-devel.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@22101 a592a061-630c-0410-9148-cb99ea01b6c8
2007-12-12 18:25:53 +00:00
|
|
|
os << "\\batchmode\n"; // changed
|
|
|
|
// from \nonstopmode
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.newline();
|
2000-03-06 02:42:40 +00:00
|
|
|
}
|
|
|
|
if (!original_path.empty()) {
|
2006-10-19 16:51:30 +00:00
|
|
|
// FIXME UNICODE
|
|
|
|
// We don't know the encoding of inputpath
|
2010-03-06 16:18:16 +00:00
|
|
|
docstring const inputpath = from_utf8(support::latex_path(original_path));
|
2010-02-19 14:47:19 +00:00
|
|
|
docstring uncodable_glyphs;
|
2010-02-19 16:31:15 +00:00
|
|
|
Encoding const * const enc = runparams.encoding;
|
|
|
|
if (enc) {
|
|
|
|
for (size_t n = 0; n < inputpath.size(); ++n) {
|
|
|
|
docstring const glyph =
|
|
|
|
docstring(1, inputpath[n]);
|
|
|
|
if (enc->latexChar(inputpath[n], true) != glyph) {
|
2010-02-19 14:47:19 +00:00
|
|
|
LYXERR0("Uncodable character '"
|
|
|
|
<< glyph
|
|
|
|
<< "' in input path!");
|
|
|
|
uncodable_glyphs += glyph;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// warn user if we found uncodable glyphs.
|
|
|
|
if (!uncodable_glyphs.empty()) {
|
2010-02-28 14:50:02 +00:00
|
|
|
frontend::Alert::warning(_("Uncodable character in file path"),
|
2010-02-19 14:47:19 +00:00
|
|
|
support::bformat(_("The path of your document\n"
|
|
|
|
"(%1$s)\n"
|
|
|
|
"contains glyphs that are unknown in the\n"
|
|
|
|
"current document encoding (namely %2$s).\n"
|
|
|
|
"This will likely result in incomplete output.\n\n"
|
2010-03-10 15:09:44 +00:00
|
|
|
"Choose an appropriate document encoding (such as utf8)\n"
|
2010-02-28 14:50:02 +00:00
|
|
|
"or change the file path name."), inputpath, uncodable_glyphs));
|
2010-02-19 14:47:19 +00:00
|
|
|
} else {
|
|
|
|
os << "\\makeatletter\n"
|
2010-02-19 16:31:15 +00:00
|
|
|
<< "\\def\\input@path{{"
|
|
|
|
<< inputpath << "/}}\n"
|
|
|
|
<< "\\makeatother\n";
|
2010-02-19 14:47:19 +00:00
|
|
|
d->texrow.newline();
|
|
|
|
d->texrow.newline();
|
|
|
|
d->texrow.newline();
|
|
|
|
}
|
2000-03-06 02:42:40 +00:00
|
|
|
}
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
// get parent macros (if this buffer has a parent) which will be
|
|
|
|
// written at the document begin further down.
|
|
|
|
MacroSet parentMacros;
|
|
|
|
listParentMacros(parentMacros, features);
|
|
|
|
|
2003-02-16 00:54:43 +00:00
|
|
|
// Write the preamble
|
2010-01-07 10:01:26 +00:00
|
|
|
runparams.use_babel = params().writeLaTeX(os, features,
|
|
|
|
d->texrow,
|
|
|
|
d->filename.onlyPath());
|
2002-07-05 19:21:29 +00:00
|
|
|
|
2008-08-18 17:26:09 +00:00
|
|
|
runparams.use_japanese = features.isRequired("japanese");
|
|
|
|
|
2003-07-26 21:37:10 +00:00
|
|
|
if (!output_body)
|
2002-07-05 19:21:29 +00:00
|
|
|
return;
|
2002-05-10 12:58:07 +00:00
|
|
|
|
2000-03-06 02:42:40 +00:00
|
|
|
// make the body.
|
2002-07-05 19:21:29 +00:00
|
|
|
os << "\\begin{document}\n";
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.newline();
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
// output the parent macros
|
|
|
|
MacroSet::iterator it = parentMacros.begin();
|
|
|
|
MacroSet::iterator end = parentMacros.end();
|
2010-02-08 17:39:55 +00:00
|
|
|
for (; it != end; ++it) {
|
|
|
|
int num_lines = (*it)->write(os, true);
|
|
|
|
d->texrow.newlines(num_lines);
|
|
|
|
}
|
|
|
|
|
2003-07-26 21:37:10 +00:00
|
|
|
} // output_preamble
|
2007-08-13 14:24:49 +00:00
|
|
|
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.start(paragraphs().begin()->id(), 0);
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::INFO, "preamble finished, now the body.");
|
2001-03-01 15:57:10 +00:00
|
|
|
|
2004-03-25 10:12:44 +00:00
|
|
|
// if we are doing a real file with body, even if this is the
|
|
|
|
// child of some other buffer, let's cut the link here.
|
|
|
|
// This happens for example if only a child document is printed.
|
2007-11-30 17:41:27 +00:00
|
|
|
Buffer const * save_parent = 0;
|
2004-03-25 10:12:44 +00:00
|
|
|
if (output_preamble) {
|
2009-04-03 00:44:33 +00:00
|
|
|
save_parent = d->parent();
|
|
|
|
d->setParent(0);
|
2004-03-25 10:12:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// the real stuff
|
2008-01-25 13:27:08 +00:00
|
|
|
latexParagraphs(*this, text(), os, d->texrow, runparams);
|
2000-05-04 08:14:34 +00:00
|
|
|
|
2004-03-25 10:12:44 +00:00
|
|
|
// Restore the parenthood if needed
|
2008-03-26 12:55:36 +00:00
|
|
|
if (output_preamble)
|
2009-04-03 00:44:33 +00:00
|
|
|
d->setParent(save_parent);
|
2004-03-25 10:12:44 +00:00
|
|
|
|
2000-05-04 08:14:34 +00:00
|
|
|
// add this just in case after all the paragraphs
|
2002-07-05 19:21:29 +00:00
|
|
|
os << endl;
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.newline();
|
2000-05-04 08:14:34 +00:00
|
|
|
|
2003-07-26 21:37:10 +00:00
|
|
|
if (output_preamble) {
|
2002-07-05 19:21:29 +00:00
|
|
|
os << "\\end{document}\n";
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.newline();
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::LATEX, "makeLaTeXFile...done");
|
2000-05-04 08:14:34 +00:00
|
|
|
} else {
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::LATEX, "LaTeXFile for inclusion made.");
|
2000-05-04 08:14:34 +00:00
|
|
|
}
|
2007-03-18 10:59:16 +00:00
|
|
|
runparams_in.encoding = runparams.encoding;
|
2000-05-04 08:14:34 +00:00
|
|
|
|
|
|
|
// Just to be sure. (Asger)
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.newline();
|
2000-05-04 08:14:34 +00:00
|
|
|
|
2010-04-16 18:30:52 +00:00
|
|
|
//for (int i = 0; i<d->texrow.rows(); i++) {
|
|
|
|
// int id,pos;
|
|
|
|
// if (d->texrow.getIdFromRow(i+1,id,pos) && id>0)
|
|
|
|
// lyxerr << i+1 << ":" << id << ":" << getParFromID(id).paragraph().asString()<<"\n";
|
|
|
|
//}
|
|
|
|
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::INFO, "Finished making LaTeX file.");
|
2007-11-30 17:46:49 +00:00
|
|
|
LYXERR(Debug::INFO, "Row count was " << d->texrow.rows() - 1 << '.');
|
2000-05-04 08:14:34 +00:00
|
|
|
}
|
|
|
|
|
2000-07-04 19:16:35 +00:00
|
|
|
|
1999-11-15 12:01:38 +00:00
|
|
|
bool Buffer::isLatex() const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2008-02-28 01:42:02 +00:00
|
|
|
return params().documentClass().outputType() == LATEX;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
1999-11-15 12:01:38 +00:00
|
|
|
bool Buffer::isLiterate() const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2008-02-28 01:42:02 +00:00
|
|
|
return params().documentClass().outputType() == LITERATE;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
1999-11-15 12:01:38 +00:00
|
|
|
bool Buffer::isDocBook() const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2008-02-28 01:42:02 +00:00
|
|
|
return params().documentClass().outputType() == DOCBOOK;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-12-04 15:46:57 +00:00
|
|
|
void Buffer::makeDocBookFile(FileName const & fname,
|
2006-08-04 13:59:12 +00:00
|
|
|
OutputParams const & runparams,
|
2007-11-30 17:41:27 +00:00
|
|
|
bool const body_only) const
|
2006-08-04 13:59:12 +00:00
|
|
|
{
|
2007-11-15 20:04:51 +00:00
|
|
|
LYXERR(Debug::LATEX, "makeDocBookFile...");
|
2006-08-04 13:59:12 +00:00
|
|
|
|
2008-11-16 12:21:29 +00:00
|
|
|
ofdocstream ofs;
|
2006-08-04 13:59:12 +00:00
|
|
|
if (!openFileWrite(ofs, fname))
|
|
|
|
return;
|
|
|
|
|
2010-04-21 01:19:09 +00:00
|
|
|
writeDocBookSource(ofs, fname.absFileName(), runparams, body_only);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
1999-12-07 00:44:53 +00:00
|
|
|
ofs.close();
|
2004-02-11 14:45:44 +00:00
|
|
|
if (ofs.fail())
|
|
|
|
lyxerr << "File '" << fname << "' was not closed properly." << endl;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-10-19 21:00:33 +00:00
|
|
|
void Buffer::writeDocBookSource(odocstream & os, string const & fname,
|
2003-11-05 12:06:20 +00:00
|
|
|
OutputParams const & runparams,
|
2007-11-30 17:41:27 +00:00
|
|
|
bool const only_body) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2006-03-28 18:49:46 +00:00
|
|
|
LaTeXFeatures features(*this, params(), runparams);
|
2000-07-01 12:54:45 +00:00
|
|
|
validate(features);
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.reset();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2008-02-28 01:42:02 +00:00
|
|
|
DocumentClass const & tclass = params().documentClass();
|
2006-10-19 21:00:33 +00:00
|
|
|
string const top_element = tclass.latexname();
|
2001-03-23 08:37:44 +00:00
|
|
|
|
2000-11-04 10:00:12 +00:00
|
|
|
if (!only_body) {
|
2004-05-13 11:21:58 +00:00
|
|
|
if (runparams.flavor == OutputParams::XML)
|
2006-10-19 21:00:33 +00:00
|
|
|
os << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
|
2004-05-13 11:21:58 +00:00
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// FIXME UNICODE
|
2006-10-21 00:16:43 +00:00
|
|
|
os << "<!DOCTYPE " << from_ascii(top_element) << ' ';
|
2004-05-14 15:47:35 +00:00
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// FIXME UNICODE
|
2006-10-19 21:00:33 +00:00
|
|
|
if (! tclass.class_header().empty())
|
2007-05-28 22:27:45 +00:00
|
|
|
os << from_ascii(tclass.class_header());
|
2004-05-14 15:47:35 +00:00
|
|
|
else if (runparams.flavor == OutputParams::XML)
|
2006-08-04 13:59:12 +00:00
|
|
|
os << "PUBLIC \"-//OASIS//DTD DocBook XML//EN\" "
|
2004-05-14 15:47:35 +00:00
|
|
|
<< "\"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd\"";
|
|
|
|
else
|
2006-08-04 13:59:12 +00:00
|
|
|
os << " PUBLIC \"-//OASIS//DTD DocBook V4.2//EN\"";
|
2004-07-24 10:55:30 +00:00
|
|
|
|
2006-10-21 19:40:29 +00:00
|
|
|
docstring preamble = from_utf8(params().preamble);
|
2004-10-21 22:55:04 +00:00
|
|
|
if (runparams.flavor != OutputParams::XML ) {
|
|
|
|
preamble += "<!ENTITY % output.print.png \"IGNORE\">\n";
|
|
|
|
preamble += "<!ENTITY % output.print.pdf \"IGNORE\">\n";
|
|
|
|
preamble += "<!ENTITY % output.print.eps \"IGNORE\">\n";
|
|
|
|
preamble += "<!ENTITY % output.print.bmp \"IGNORE\">\n";
|
|
|
|
}
|
2004-10-26 21:16:44 +00:00
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
string const name = runparams.nice
|
|
|
|
? changeExtension(absFileName(), ".sgml") : fname;
|
2002-03-05 13:38:40 +00:00
|
|
|
preamble += features.getIncludedFiles(name);
|
2001-10-23 09:42:14 +00:00
|
|
|
preamble += features.getLyXSGMLEntities();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2001-10-23 09:42:14 +00:00
|
|
|
if (!preamble.empty()) {
|
2007-05-28 22:27:45 +00:00
|
|
|
os << "\n [ " << preamble << " ]";
|
2001-10-23 09:42:14 +00:00
|
|
|
}
|
2006-08-04 13:59:12 +00:00
|
|
|
os << ">\n\n";
|
2000-11-13 15:43:36 +00:00
|
|
|
}
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2002-03-21 16:55:34 +00:00
|
|
|
string top = top_element;
|
2001-01-28 21:54:15 +00:00
|
|
|
top += " lang=\"";
|
2004-05-14 15:47:35 +00:00
|
|
|
if (runparams.flavor == OutputParams::XML)
|
|
|
|
top += params().language->code();
|
|
|
|
else
|
2008-09-08 01:18:33 +00:00
|
|
|
top += params().language->code().substr(0, 2);
|
2002-11-27 10:30:28 +00:00
|
|
|
top += '"';
|
2001-01-28 21:54:15 +00:00
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
if (!params().options.empty()) {
|
2002-11-27 10:30:28 +00:00
|
|
|
top += ' ';
|
2003-09-09 09:47:59 +00:00
|
|
|
top += params().options;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
2006-08-04 13:59:12 +00:00
|
|
|
os << "<!-- " << ((runparams.flavor == OutputParams::XML)? "XML" : "SGML")
|
2004-05-13 11:21:58 +00:00
|
|
|
<< " file was created by LyX " << lyx_version
|
2000-07-01 12:54:45 +00:00
|
|
|
<< "\n See http://www.lyx.org/ for more information -->\n";
|
|
|
|
|
2008-02-28 01:42:02 +00:00
|
|
|
params().documentClass().counters().reset();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2008-03-06 18:13:04 +00:00
|
|
|
updateMacros();
|
2007-08-15 08:55:36 +00:00
|
|
|
|
2006-08-04 13:59:12 +00:00
|
|
|
sgml::openTag(os, top);
|
|
|
|
os << '\n';
|
2009-08-09 18:35:39 +00:00
|
|
|
docbookParagraphs(text(), *this, os, runparams);
|
2006-08-04 13:59:12 +00:00
|
|
|
sgml::closeTag(os, top_element);
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-05 17:44:35 +00:00
|
|
|
void Buffer::makeLyXHTMLFile(FileName const & fname,
|
|
|
|
OutputParams const & runparams,
|
|
|
|
bool const body_only) const
|
|
|
|
{
|
2009-10-25 01:48:14 +00:00
|
|
|
LYXERR(Debug::LATEX, "makeLyXHTMLFile...");
|
2009-06-05 17:44:35 +00:00
|
|
|
|
|
|
|
ofdocstream ofs;
|
|
|
|
if (!openFileWrite(ofs, fname))
|
|
|
|
return;
|
|
|
|
|
|
|
|
writeLyXHTMLSource(ofs, runparams, body_only);
|
|
|
|
|
|
|
|
ofs.close();
|
|
|
|
if (ofs.fail())
|
|
|
|
lyxerr << "File '" << fname << "' was not closed properly." << endl;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::writeLyXHTMLSource(odocstream & os,
|
|
|
|
OutputParams const & runparams,
|
|
|
|
bool const only_body) const
|
|
|
|
{
|
|
|
|
LaTeXFeatures features(*this, params(), runparams);
|
|
|
|
validate(features);
|
2010-03-03 22:13:45 +00:00
|
|
|
updateBuffer(UpdateMaster, OutputUpdate);
|
2010-01-08 18:19:37 +00:00
|
|
|
d->bibinfo_.makeCitationLabels(*this);
|
2010-07-21 21:58:54 +00:00
|
|
|
updateMacros();
|
2010-01-12 15:25:04 +00:00
|
|
|
updateMacroInstances();
|
2009-06-05 17:44:35 +00:00
|
|
|
|
|
|
|
if (!only_body) {
|
2010-04-05 22:02:43 +00:00
|
|
|
os << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
|
|
|
<< "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1 plus MathML 2.0//EN\" \"http://www.w3.org/TR/MathML2/dtd/xhtml-math11-f.dtd\">\n"
|
|
|
|
// FIXME Language should be set properly.
|
|
|
|
<< "<html xmlns=\"http://www.w3.org/1999/xhtml\">\n"
|
|
|
|
<< "<head>\n"
|
|
|
|
<< "<meta name=\"GENERATOR\" content=\"" << PACKAGE_STRING << "\" />\n"
|
|
|
|
// FIXME Presumably need to set this right
|
|
|
|
<< "<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\" />\n";
|
|
|
|
|
|
|
|
docstring const & doctitle = features.htmlTitle();
|
|
|
|
os << "<title>"
|
|
|
|
<< (doctitle.empty() ? from_ascii("LyX Document") : doctitle)
|
|
|
|
<< "</title>\n";
|
2009-06-05 17:44:35 +00:00
|
|
|
|
2009-12-03 21:06:41 +00:00
|
|
|
os << "\n<!-- Text Class Preamble -->\n"
|
2010-04-05 22:02:43 +00:00
|
|
|
<< features.getTClassHTMLPreamble()
|
|
|
|
<< "\n<!-- Premable Snippets -->\n"
|
|
|
|
<< from_utf8(features.getPreambleSnippets());
|
2009-06-05 19:42:56 +00:00
|
|
|
|
2009-12-03 21:06:41 +00:00
|
|
|
os << "\n<!-- Layout-provided Styles -->\n";
|
2009-06-05 19:42:56 +00:00
|
|
|
docstring const styleinfo = features.getTClassHTMLStyles();
|
2009-06-05 17:44:35 +00:00
|
|
|
if (!styleinfo.empty()) {
|
2009-12-03 21:06:41 +00:00
|
|
|
os << "<style type='text/css'>\n"
|
|
|
|
<< styleinfo
|
|
|
|
<< "</style>\n";
|
2009-06-05 17:44:35 +00:00
|
|
|
}
|
|
|
|
os << "</head>\n<body>\n";
|
|
|
|
}
|
|
|
|
|
2009-11-19 17:51:06 +00:00
|
|
|
XHTMLStream xs(os);
|
2009-06-05 17:44:35 +00:00
|
|
|
params().documentClass().counters().reset();
|
2009-11-19 17:51:06 +00:00
|
|
|
xhtmlParagraphs(text(), *this, xs, runparams);
|
2009-06-05 17:44:35 +00:00
|
|
|
if (!only_body)
|
|
|
|
os << "</body>\n</html>\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
// chktex should be run with these flags disabled: 3, 22, 25, 30, 38(?)
|
|
|
|
// Other flags: -wall -v0 -x
|
|
|
|
int Buffer::runChktex()
|
|
|
|
{
|
2007-10-21 10:50:56 +00:00
|
|
|
setBusy(true);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
// get LaTeX-Filename
|
2007-04-06 18:03:29 +00:00
|
|
|
FileName const path(temppath());
|
2010-04-21 01:19:09 +00:00
|
|
|
string const name = addName(path.absFileName(), latexName());
|
2004-02-25 12:00:53 +00:00
|
|
|
string const org_path = filePath();
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2007-12-12 19:57:42 +00:00
|
|
|
PathChanger p(path); // path to LaTeX file
|
2006-09-11 08:54:10 +00:00
|
|
|
message(_("Running chktex..."));
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
// Generate the LaTeX file if neccessary
|
2007-03-18 10:59:16 +00:00
|
|
|
OutputParams runparams(¶ms().encoding());
|
2003-11-05 12:06:20 +00:00
|
|
|
runparams.flavor = OutputParams::LATEX;
|
2003-05-22 21:10:22 +00:00
|
|
|
runparams.nice = false;
|
2010-03-04 08:13:42 +00:00
|
|
|
runparams.linelen = lyxrc.plaintext_linelen;
|
2006-12-04 15:46:57 +00:00
|
|
|
makeLaTeXFile(FileName(name), org_path, runparams);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
TeXErrors terr;
|
2010-04-21 01:19:28 +00:00
|
|
|
Chktex chktex(lyxrc.chktex_command, onlyFileName(name), filePath());
|
2005-01-05 20:21:27 +00:00
|
|
|
int const res = chktex.run(terr); // run chktex
|
1999-09-27 18:44:28 +00:00
|
|
|
|
|
|
|
if (res == -1) {
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::error(_("chktex failure"),
|
|
|
|
_("Could not run chktex successfully."));
|
1999-09-27 18:44:28 +00:00
|
|
|
} else if (res > 0) {
|
2007-11-30 17:46:49 +00:00
|
|
|
ErrorList & errlist = d->errorLists["ChkTeX"];
|
2007-10-21 10:50:56 +00:00
|
|
|
errlist.clear();
|
|
|
|
bufferErrors(terr, errlist);
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
2007-10-21 10:50:56 +00:00
|
|
|
setBusy(false);
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2006-08-13 16:16:43 +00:00
|
|
|
errors("ChkTeX");
|
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2000-02-04 09:38:32 +00:00
|
|
|
void Buffer::validate(LaTeXFeatures & features) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-12-14 14:51:47 +00:00
|
|
|
params().validate(features);
|
2002-03-21 16:55:34 +00:00
|
|
|
|
2008-03-06 18:13:04 +00:00
|
|
|
updateMacros();
|
2007-08-15 08:55:36 +00:00
|
|
|
|
2003-09-09 09:47:59 +00:00
|
|
|
for_each(paragraphs().begin(), paragraphs().end(),
|
2010-05-24 15:21:17 +00:00
|
|
|
bind(&Paragraph::validate, _1, ref(features)));
|
1999-09-27 18:44:28 +00:00
|
|
|
|
1999-10-07 18:44:17 +00:00
|
|
|
if (lyxerr.debugging(Debug::LATEX)) {
|
2000-04-10 21:40:13 +00:00
|
|
|
features.showStruct();
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-10-12 10:50:45 +00:00
|
|
|
void Buffer::getLabelList(vector<docstring> & list) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2010-09-29 12:55:39 +00:00
|
|
|
// If this is a child document, use the master's list instead.
|
|
|
|
if (parent()) {
|
|
|
|
masterBuffer()->getLabelList(list);
|
2004-03-25 10:12:44 +00:00
|
|
|
return;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
2008-03-07 19:46:04 +00:00
|
|
|
list.clear();
|
|
|
|
Toc & toc = d->toc_backend.toc("label");
|
|
|
|
TocIterator toc_it = toc.begin();
|
|
|
|
TocIterator end = toc.end();
|
2008-03-07 19:52:44 +00:00
|
|
|
for (; toc_it != end; ++toc_it) {
|
|
|
|
if (toc_it->depth() == 0)
|
|
|
|
list.push_back(toc_it->str());
|
|
|
|
}
|
2000-05-19 16:46:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-26 00:48:32 +00:00
|
|
|
void Buffer::updateBibfilesCache(UpdateScope scope) const
|
2006-04-15 11:46:17 +00:00
|
|
|
{
|
2010-08-10 14:12:48 +00:00
|
|
|
// FIXME This is probably unnecssary, given where we call this.
|
2008-03-07 20:51:56 +00:00
|
|
|
// If this is a child document, use the parent's cache instead.
|
2010-09-29 12:55:39 +00:00
|
|
|
if (parent() && scope != UpdateChildOnly) {
|
|
|
|
masterBuffer()->updateBibfilesCache();
|
2006-04-15 11:46:17 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2010-01-09 16:15:46 +00:00
|
|
|
d->bibfiles_cache_.clear();
|
2006-04-15 11:46:17 +00:00
|
|
|
for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
|
2007-10-13 09:04:52 +00:00
|
|
|
if (it->lyxCode() == BIBTEX_CODE) {
|
2006-04-15 11:46:17 +00:00
|
|
|
InsetBibtex const & inset =
|
2007-03-25 00:56:01 +00:00
|
|
|
static_cast<InsetBibtex const &>(*it);
|
2008-04-19 15:52:35 +00:00
|
|
|
support::FileNameList const bibfiles = inset.getBibFiles();
|
2010-01-09 16:15:46 +00:00
|
|
|
d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
|
2006-04-15 11:46:17 +00:00
|
|
|
bibfiles.begin(),
|
|
|
|
bibfiles.end());
|
2007-10-13 09:04:52 +00:00
|
|
|
} else if (it->lyxCode() == INCLUDE_CODE) {
|
2006-04-15 11:46:17 +00:00
|
|
|
InsetInclude & inset =
|
2007-03-25 00:56:01 +00:00
|
|
|
static_cast<InsetInclude &>(*it);
|
2010-08-09 17:01:51 +00:00
|
|
|
Buffer const * const incbuf = inset.getChildBuffer();
|
|
|
|
if (!incbuf)
|
|
|
|
continue;
|
2008-04-19 15:52:35 +00:00
|
|
|
support::FileNameList const & bibfiles =
|
2010-08-09 17:01:51 +00:00
|
|
|
incbuf->getBibfilesCache(UpdateChildOnly);
|
2010-08-09 16:18:57 +00:00
|
|
|
if (!bibfiles.empty()) {
|
|
|
|
d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
|
|
|
|
bibfiles.begin(),
|
|
|
|
bibfiles.end());
|
2010-08-09 17:01:51 +00:00
|
|
|
}
|
2006-04-15 11:46:17 +00:00
|
|
|
}
|
|
|
|
}
|
2010-08-09 17:01:51 +00:00
|
|
|
d->bibfile_cache_valid_ = true;
|
2010-08-10 14:12:48 +00:00
|
|
|
d->bibinfo_cache_valid_ = false;
|
2008-06-05 06:42:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-10 14:12:48 +00:00
|
|
|
void Buffer::invalidateBibinfoCache() const
|
2008-06-05 06:42:53 +00:00
|
|
|
{
|
2010-01-09 16:15:46 +00:00
|
|
|
d->bibinfo_cache_valid_ = false;
|
2010-08-10 14:12:48 +00:00
|
|
|
// also invalidate the cache for the parent buffer
|
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
if (pbuf)
|
|
|
|
pbuf->invalidateBibinfoCache();
|
2006-04-15 11:46:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-10 14:12:48 +00:00
|
|
|
void Buffer::invalidateBibfileCache() const
|
2010-08-09 17:01:51 +00:00
|
|
|
{
|
|
|
|
d->bibfile_cache_valid_ = false;
|
2010-08-10 14:12:48 +00:00
|
|
|
d->bibinfo_cache_valid_ = false;
|
|
|
|
// also invalidate the cache for the parent buffer
|
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
if (pbuf)
|
|
|
|
pbuf->invalidateBibfileCache();
|
2010-08-09 17:01:51 +00:00
|
|
|
}
|
|
|
|
|
2010-08-10 14:12:48 +00:00
|
|
|
|
2009-03-26 00:48:32 +00:00
|
|
|
support::FileNameList const & Buffer::getBibfilesCache(UpdateScope scope) const
|
2006-04-15 11:46:17 +00:00
|
|
|
{
|
2010-08-10 14:12:48 +00:00
|
|
|
// FIXME This is probably unnecessary, given where we call this.
|
|
|
|
// If this is a child document, use the master's cache instead.
|
|
|
|
Buffer const * const pbuf = masterBuffer();
|
|
|
|
if (pbuf != this && scope != UpdateChildOnly)
|
2009-04-03 00:44:33 +00:00
|
|
|
return pbuf->getBibfilesCache();
|
2006-04-15 11:46:17 +00:00
|
|
|
|
2010-08-09 17:01:51 +00:00
|
|
|
if (!d->bibfile_cache_valid_)
|
|
|
|
this->updateBibfilesCache(scope);
|
2007-01-08 13:36:01 +00:00
|
|
|
|
2010-01-09 16:15:46 +00:00
|
|
|
return d->bibfiles_cache_;
|
2006-04-15 11:46:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-04-25 20:03:03 +00:00
|
|
|
BiblioInfo const & Buffer::masterBibInfo() const
|
2008-08-01 17:57:01 +00:00
|
|
|
{
|
2008-04-25 20:03:03 +00:00
|
|
|
Buffer const * const tmp = masterBuffer();
|
|
|
|
if (tmp != this)
|
|
|
|
return tmp->masterBibInfo();
|
2010-01-08 16:40:41 +00:00
|
|
|
return d->bibinfo_;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::checkBibInfoCache() const
|
2008-06-05 05:46:49 +00:00
|
|
|
{
|
2010-08-10 14:12:48 +00:00
|
|
|
// use the master's cache
|
|
|
|
Buffer const * const tmp = masterBuffer();
|
|
|
|
if (tmp != this) {
|
|
|
|
tmp->checkBibInfoCache();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// this will also reload the cache if it is invalid
|
|
|
|
support::FileNameList const & bibfiles_cache = getBibfilesCache();
|
|
|
|
|
2010-01-08 18:18:05 +00:00
|
|
|
// compare the cached timestamps with the actual ones.
|
2010-08-10 14:12:48 +00:00
|
|
|
support::FileNameList::const_iterator ei = bibfiles_cache.begin();
|
|
|
|
support::FileNameList::const_iterator en = bibfiles_cache.end();
|
2010-01-08 18:18:05 +00:00
|
|
|
for (; ei != en; ++ ei) {
|
|
|
|
time_t lastw = ei->lastModified();
|
2010-01-09 16:15:46 +00:00
|
|
|
time_t prevw = d->bibfile_status_[*ei];
|
2010-01-08 18:18:05 +00:00
|
|
|
if (lastw != prevw) {
|
2010-01-09 16:15:46 +00:00
|
|
|
d->bibinfo_cache_valid_ = false;
|
|
|
|
d->bibfile_status_[*ei] = lastw;
|
2008-04-25 20:03:03 +00:00
|
|
|
}
|
|
|
|
}
|
2010-08-10 14:12:48 +00:00
|
|
|
|
|
|
|
// if not valid, then reload the info
|
2010-01-09 16:15:46 +00:00
|
|
|
if (!d->bibinfo_cache_valid_) {
|
2008-06-05 06:42:53 +00:00
|
|
|
d->bibinfo_.clear();
|
2010-08-10 14:12:48 +00:00
|
|
|
fillWithBibKeys(d->bibinfo_);
|
2010-01-09 16:15:46 +00:00
|
|
|
d->bibinfo_cache_valid_ = true;
|
2010-08-10 14:12:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::fillWithBibKeys(BiblioInfo & keys) const
|
|
|
|
{
|
|
|
|
for (InsetIterator it = inset_iterator_begin(inset()); it; ++it)
|
|
|
|
it->fillWithBibKeys(keys, it);
|
2008-04-25 20:03:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
1999-10-02 16:21:10 +00:00
|
|
|
bool Buffer::isDepClean(string const & name) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
DepClean::const_iterator const it = d->dep_clean.find(name);
|
|
|
|
if (it == d->dep_clean.end())
|
2003-02-09 00:27:52 +00:00
|
|
|
return true;
|
|
|
|
return it->second;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
1999-10-02 16:21:10 +00:00
|
|
|
void Buffer::markDepClean(string const & name)
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
d->dep_clean[name] = true;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
1999-12-10 00:07:59 +00:00
|
|
|
|
2000-01-08 21:02:58 +00:00
|
|
|
|
2009-12-18 22:51:06 +00:00
|
|
|
bool Buffer::isExportableFormat(string const & format) const
|
|
|
|
{
|
|
|
|
typedef vector<Format const *> Formats;
|
|
|
|
Formats formats;
|
|
|
|
formats = exportableFormats(true);
|
|
|
|
Formats::const_iterator fit = formats.begin();
|
|
|
|
Formats::const_iterator end = formats.end();
|
|
|
|
for (; fit != end ; ++fit) {
|
|
|
|
if ((*fit)->name() == format)
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
bool Buffer::getStatus(FuncRequest const & cmd, FuncStatus & flag)
|
|
|
|
{
|
2009-09-19 14:05:52 +00:00
|
|
|
if (isInternal()) {
|
|
|
|
// FIXME? if there is an Buffer LFUN that can be dispatched even
|
|
|
|
// if internal, put a switch '(cmd.action)' here.
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool enable = true;
|
|
|
|
|
2010-04-09 19:00:42 +00:00
|
|
|
switch (cmd.action()) {
|
2009-09-19 14:05:52 +00:00
|
|
|
|
2009-09-19 11:59:39 +00:00
|
|
|
case LFUN_BUFFER_TOGGLE_READ_ONLY:
|
|
|
|
flag.setOnOff(isReadonly());
|
|
|
|
break;
|
|
|
|
|
2009-09-19 14:05:52 +00:00
|
|
|
// FIXME: There is need for a command-line import.
|
|
|
|
//case LFUN_BUFFER_IMPORT:
|
|
|
|
|
|
|
|
case LFUN_BUFFER_AUTO_SAVE:
|
|
|
|
break;
|
|
|
|
|
|
|
|
case LFUN_BUFFER_EXPORT_CUSTOM:
|
|
|
|
// FIXME: Nothing to check here?
|
|
|
|
break;
|
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
case LFUN_BUFFER_EXPORT: {
|
|
|
|
docstring const arg = cmd.argument();
|
2009-09-19 14:05:52 +00:00
|
|
|
enable = arg == "custom" || isExportable(to_utf8(arg));
|
2009-04-06 12:12:06 +00:00
|
|
|
if (!enable)
|
|
|
|
flag.message(bformat(
|
|
|
|
_("Don't know how to export to format: %1$s"), arg));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-09-19 14:05:52 +00:00
|
|
|
case LFUN_BUFFER_CHKTEX:
|
|
|
|
enable = isLatex() && !lyxrc.chktex_command.empty();
|
|
|
|
break;
|
|
|
|
|
|
|
|
case LFUN_BUILD_PROGRAM:
|
|
|
|
enable = isExportable("program");
|
|
|
|
break;
|
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
case LFUN_BRANCH_ACTIVATE:
|
|
|
|
case LFUN_BRANCH_DEACTIVATE: {
|
2009-07-16 18:29:16 +00:00
|
|
|
BranchList const & branchList = params().branchlist();
|
|
|
|
docstring const branchName = cmd.argument();
|
2009-09-19 14:05:52 +00:00
|
|
|
enable = !branchName.empty() && branchList.find(branchName);
|
2009-04-06 12:12:06 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-07-09 09:48:34 +00:00
|
|
|
case LFUN_BRANCH_ADD:
|
2009-07-10 06:49:51 +00:00
|
|
|
case LFUN_BRANCHES_RENAME:
|
2009-04-06 12:12:06 +00:00
|
|
|
case LFUN_BUFFER_PRINT:
|
|
|
|
// if no Buffer is present, then of course we won't be called!
|
|
|
|
break;
|
|
|
|
|
2009-09-19 14:20:05 +00:00
|
|
|
case LFUN_BUFFER_LANGUAGE:
|
|
|
|
enable = !isReadonly();
|
|
|
|
break;
|
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
2009-09-19 14:05:52 +00:00
|
|
|
flag.setEnabled(enable);
|
2009-04-06 12:12:06 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::dispatch(string const & command, DispatchResult & result)
|
1999-12-10 00:07:59 +00:00
|
|
|
{
|
2003-09-21 23:00:47 +00:00
|
|
|
return dispatch(lyxaction.lookupFunc(command), result);
|
1999-12-10 00:07:59 +00:00
|
|
|
}
|
|
|
|
|
2000-01-08 21:02:58 +00:00
|
|
|
|
2009-04-06 12:12:06 +00:00
|
|
|
// NOTE We can end up here even if we have no GUI, because we are called
|
|
|
|
// by LyX::exec to handled command-line requests. So we may need to check
|
|
|
|
// whether we have a GUI or not. The boolean use_gui holds this information.
|
|
|
|
void Buffer::dispatch(FuncRequest const & func, DispatchResult & dr)
|
1999-12-10 00:07:59 +00:00
|
|
|
{
|
2009-09-19 14:05:52 +00:00
|
|
|
if (isInternal()) {
|
|
|
|
// FIXME? if there is an Buffer LFUN that can be dispatched even
|
2010-04-09 19:00:42 +00:00
|
|
|
// if internal, put a switch '(cmd.action())' here.
|
2009-09-19 14:05:52 +00:00
|
|
|
dr.dispatched(false);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
string const argument = to_utf8(func.argument());
|
2009-04-06 12:12:06 +00:00
|
|
|
// We'll set this back to false if need be.
|
2000-04-28 11:18:04 +00:00
|
|
|
bool dispatched = true;
|
2009-10-03 19:12:21 +00:00
|
|
|
undo().beginUndoGroup();
|
2002-06-24 20:28:12 +00:00
|
|
|
|
2010-04-09 19:00:42 +00:00
|
|
|
switch (func.action()) {
|
2009-09-19 11:59:39 +00:00
|
|
|
case LFUN_BUFFER_TOGGLE_READ_ONLY:
|
|
|
|
if (lyxvc().inUse())
|
|
|
|
lyxvc().toggleReadOnly();
|
|
|
|
else
|
|
|
|
setReadonly(!isReadonly());
|
|
|
|
break;
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
case LFUN_BUFFER_EXPORT: {
|
2010-01-10 13:25:41 +00:00
|
|
|
bool success = doExport(argument, false, false);
|
2010-02-24 17:02:52 +00:00
|
|
|
dr.setError(!success);
|
2009-04-24 12:37:56 +00:00
|
|
|
if (!success)
|
|
|
|
dr.setMessage(bformat(_("Error exporting to format: %1$s."),
|
|
|
|
func.argument()));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-09-19 14:05:52 +00:00
|
|
|
case LFUN_BUILD_PROGRAM:
|
2010-01-10 13:25:41 +00:00
|
|
|
doExport("program", true, false);
|
2009-09-19 14:05:52 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
case LFUN_BUFFER_CHKTEX:
|
|
|
|
runChktex();
|
|
|
|
break;
|
|
|
|
|
|
|
|
case LFUN_BUFFER_EXPORT_CUSTOM: {
|
|
|
|
string format_name;
|
|
|
|
string command = split(argument, format_name, ' ');
|
|
|
|
Format const * format = formats.getFormat(format_name);
|
|
|
|
if (!format) {
|
|
|
|
lyxerr << "Format \"" << format_name
|
|
|
|
<< "\" not recognized!"
|
|
|
|
<< endl;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The name of the file created by the conversion process
|
|
|
|
string filename;
|
|
|
|
|
|
|
|
// Output to filename
|
|
|
|
if (format->name() == "lyx") {
|
|
|
|
string const latexname = latexName(false);
|
|
|
|
filename = changeExtension(latexname,
|
|
|
|
format->extension());
|
|
|
|
filename = addName(temppath(), filename);
|
|
|
|
|
|
|
|
if (!writeFile(FileName(filename)))
|
|
|
|
break;
|
|
|
|
|
|
|
|
} else {
|
2010-01-10 13:25:41 +00:00
|
|
|
doExport(format_name, true, false, filename);
|
2009-09-19 14:05:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Substitute $$FName for filename
|
|
|
|
if (!contains(command, "$$FName"))
|
|
|
|
command = "( " + command + " ) < $$FName";
|
|
|
|
command = subst(command, "$$FName", filename);
|
|
|
|
|
|
|
|
// Execute the command in the background
|
|
|
|
Systemcall call;
|
|
|
|
call.startscript(Systemcall::DontWait, command);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME: There is need for a command-line import.
|
|
|
|
/*
|
|
|
|
case LFUN_BUFFER_IMPORT:
|
|
|
|
doImport(argument);
|
|
|
|
break;
|
|
|
|
*/
|
|
|
|
|
|
|
|
case LFUN_BUFFER_AUTO_SAVE:
|
|
|
|
autoSave();
|
|
|
|
break;
|
|
|
|
|
2009-07-09 09:48:34 +00:00
|
|
|
case LFUN_BRANCH_ADD: {
|
2010-06-14 13:39:08 +00:00
|
|
|
docstring branch_name = func.argument();
|
2009-10-29 22:06:27 +00:00
|
|
|
if (branch_name.empty()) {
|
2009-07-09 09:48:34 +00:00
|
|
|
dispatched = false;
|
|
|
|
break;
|
|
|
|
}
|
2009-10-29 22:06:27 +00:00
|
|
|
BranchList & branch_list = params().branchlist();
|
2010-06-14 13:39:08 +00:00
|
|
|
vector<docstring> const branches =
|
|
|
|
getVectorFromString(branch_name, branch_list.separator());
|
|
|
|
docstring msg;
|
|
|
|
for (vector<docstring>::const_iterator it = branches.begin();
|
|
|
|
it != branches.end(); ++it) {
|
|
|
|
branch_name = *it;
|
|
|
|
Branch * branch = branch_list.find(branch_name);
|
|
|
|
if (branch) {
|
|
|
|
LYXERR0("Branch " << branch_name << " already exists.");
|
|
|
|
dr.setError(true);
|
|
|
|
if (!msg.empty())
|
|
|
|
msg += ("\n");
|
|
|
|
msg += bformat(_("Branch \"%1$s\" already exists."), branch_name);
|
|
|
|
} else {
|
|
|
|
branch_list.add(branch_name);
|
|
|
|
branch = branch_list.find(branch_name);
|
|
|
|
string const x11hexname = X11hexname(branch->color());
|
|
|
|
docstring const str = branch_name + ' ' + from_ascii(x11hexname);
|
|
|
|
lyx::dispatch(FuncRequest(LFUN_SET_COLOR, str));
|
|
|
|
dr.setError(false);
|
2010-10-13 17:28:55 +00:00
|
|
|
dr.screenUpdate(Update::Force);
|
2010-06-14 13:39:08 +00:00
|
|
|
}
|
2009-07-09 09:48:34 +00:00
|
|
|
}
|
2010-06-14 13:39:08 +00:00
|
|
|
if (!msg.empty())
|
|
|
|
dr.setMessage(msg);
|
2009-07-09 09:48:34 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
case LFUN_BRANCH_ACTIVATE:
|
|
|
|
case LFUN_BRANCH_DEACTIVATE: {
|
|
|
|
BranchList & branchList = params().branchlist();
|
|
|
|
docstring const branchName = func.argument();
|
|
|
|
// the case without a branch name is handled elsewhere
|
|
|
|
if (branchName.empty()) {
|
|
|
|
dispatched = false;
|
1999-12-15 17:42:22 +00:00
|
|
|
break;
|
2002-05-30 19:49:00 +00:00
|
|
|
}
|
2009-04-24 12:37:56 +00:00
|
|
|
Branch * branch = branchList.find(branchName);
|
|
|
|
if (!branch) {
|
|
|
|
LYXERR0("Branch " << branchName << " does not exist.");
|
|
|
|
dr.setError(true);
|
|
|
|
docstring const msg =
|
|
|
|
bformat(_("Branch \"%1$s\" does not exist."), branchName);
|
|
|
|
dr.setMessage(msg);
|
|
|
|
} else {
|
2010-04-09 19:00:42 +00:00
|
|
|
branch->setSelected(func.action() == LFUN_BRANCH_ACTIVATE);
|
2009-04-24 12:37:56 +00:00
|
|
|
dr.setError(false);
|
2010-10-13 17:28:55 +00:00
|
|
|
dr.screenUpdate(Update::Force);
|
2010-07-09 14:37:00 +00:00
|
|
|
dr.forceBufferUpdate();
|
2009-04-24 12:37:56 +00:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
1999-12-10 00:07:59 +00:00
|
|
|
|
2009-07-10 06:49:51 +00:00
|
|
|
case LFUN_BRANCHES_RENAME: {
|
2009-07-11 08:18:26 +00:00
|
|
|
if (func.argument().empty())
|
|
|
|
break;
|
|
|
|
|
2009-07-10 06:49:51 +00:00
|
|
|
docstring const oldname = from_utf8(func.getArg(0));
|
|
|
|
docstring const newname = from_utf8(func.getArg(1));
|
2009-07-11 08:18:26 +00:00
|
|
|
InsetIterator it = inset_iterator_begin(inset());
|
|
|
|
InsetIterator const end = inset_iterator_end(inset());
|
|
|
|
bool success = false;
|
|
|
|
for (; it != end; ++it) {
|
|
|
|
if (it->lyxCode() == BRANCH_CODE) {
|
2010-10-25 09:49:24 +00:00
|
|
|
InsetBranch & ins = dynamic_cast<InsetBranch &>(*it);
|
2009-07-11 08:18:26 +00:00
|
|
|
if (ins.branch() == oldname) {
|
|
|
|
undo().recordUndo(it);
|
|
|
|
ins.rename(newname);
|
|
|
|
success = true;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (it->lyxCode() == INCLUDE_CODE) {
|
|
|
|
// get buffer of external file
|
|
|
|
InsetInclude const & ins =
|
|
|
|
static_cast<InsetInclude const &>(*it);
|
|
|
|
Buffer * child = ins.getChildBuffer();
|
|
|
|
if (!child)
|
|
|
|
continue;
|
|
|
|
child->dispatch(func, dr);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-07-09 14:37:00 +00:00
|
|
|
if (success) {
|
2010-10-13 17:28:55 +00:00
|
|
|
dr.screenUpdate(Update::Force);
|
2010-07-09 14:37:00 +00:00
|
|
|
dr.forceBufferUpdate();
|
|
|
|
}
|
2009-07-10 06:49:51 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
case LFUN_BUFFER_PRINT: {
|
|
|
|
// we'll assume there's a problem until we succeed
|
|
|
|
dr.setError(true);
|
|
|
|
string target = func.getArg(0);
|
|
|
|
string target_name = func.getArg(1);
|
|
|
|
string command = func.getArg(2);
|
|
|
|
|
|
|
|
if (target.empty()
|
|
|
|
|| target_name.empty()
|
|
|
|
|| command.empty()) {
|
|
|
|
LYXERR0("Unable to parse " << func.argument());
|
|
|
|
docstring const msg =
|
|
|
|
bformat(_("Unable to parse \"%1$s\""), func.argument());
|
|
|
|
dr.setMessage(msg);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (target != "printer" && target != "file") {
|
|
|
|
LYXERR0("Unrecognized target \"" << target << '"');
|
|
|
|
docstring const msg =
|
|
|
|
bformat(_("Unrecognized target \"%1$s\""), from_utf8(target));
|
|
|
|
dr.setMessage(msg);
|
2009-04-06 12:12:06 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2010-01-10 13:25:41 +00:00
|
|
|
bool const update_unincluded =
|
|
|
|
params().maintain_unincluded_children
|
|
|
|
&& !params().getIncludedChildren().empty();
|
|
|
|
if (!doExport("dvi", true, update_unincluded)) {
|
2009-04-24 12:37:56 +00:00
|
|
|
showPrintError(absFileName());
|
|
|
|
dr.setMessage(_("Error exporting to DVI."));
|
|
|
|
break;
|
|
|
|
}
|
2009-04-06 12:12:06 +00:00
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
// Push directory path.
|
|
|
|
string const path = temppath();
|
|
|
|
// Prevent the compiler from optimizing away p
|
|
|
|
FileName pp(path);
|
|
|
|
PathChanger p(pp);
|
|
|
|
|
|
|
|
// there are three cases here:
|
|
|
|
// 1. we print to a file
|
|
|
|
// 2. we print directly to a printer
|
|
|
|
// 3. we print using a spool command (print to file first)
|
|
|
|
Systemcall one;
|
|
|
|
int res = 0;
|
|
|
|
string const dviname = changeExtension(latexName(true), "dvi");
|
|
|
|
|
|
|
|
if (target == "printer") {
|
|
|
|
if (!lyxrc.print_spool_command.empty()) {
|
|
|
|
// case 3: print using a spool
|
|
|
|
string const psname = changeExtension(dviname,".ps");
|
|
|
|
command += ' ' + lyxrc.print_to_file
|
|
|
|
+ quoteName(psname)
|
|
|
|
+ ' '
|
|
|
|
+ quoteName(dviname);
|
2009-04-06 12:12:06 +00:00
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
string command2 = lyxrc.print_spool_command + ' ';
|
|
|
|
if (target_name != "default") {
|
|
|
|
command2 += lyxrc.print_spool_printerprefix
|
|
|
|
+ target_name
|
|
|
|
+ ' ';
|
|
|
|
}
|
|
|
|
command2 += quoteName(psname);
|
|
|
|
// First run dvips.
|
|
|
|
// If successful, then spool command
|
|
|
|
res = one.startscript(Systemcall::Wait, command);
|
|
|
|
|
|
|
|
if (res == 0) {
|
|
|
|
// If there's no GUI, we have to wait on this command. Otherwise,
|
|
|
|
// LyX deletes the temporary directory, and with it the spooled
|
|
|
|
// file, before it can be printed!!
|
2009-04-06 12:12:06 +00:00
|
|
|
Systemcall::Starttype stype = use_gui ?
|
|
|
|
Systemcall::DontWait : Systemcall::Wait;
|
2009-04-24 12:37:56 +00:00
|
|
|
res = one.startscript(stype, command2);
|
2009-04-06 12:12:06 +00:00
|
|
|
}
|
|
|
|
} else {
|
2009-04-24 12:37:56 +00:00
|
|
|
// case 2: print directly to a printer
|
|
|
|
if (target_name != "default")
|
|
|
|
command += ' ' + lyxrc.print_to_printer + target_name + ' ';
|
2009-04-06 12:12:06 +00:00
|
|
|
// as above....
|
|
|
|
Systemcall::Starttype stype = use_gui ?
|
|
|
|
Systemcall::DontWait : Systemcall::Wait;
|
2009-04-24 12:37:56 +00:00
|
|
|
res = one.startscript(stype, command + quoteName(dviname));
|
2009-04-06 12:12:06 +00:00
|
|
|
}
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
} else {
|
|
|
|
// case 1: print to a file
|
|
|
|
FileName const filename(makeAbsPath(target_name, filePath()));
|
|
|
|
FileName const dvifile(makeAbsPath(dviname, path));
|
|
|
|
if (filename.exists()) {
|
|
|
|
docstring text = bformat(
|
|
|
|
_("The file %1$s already exists.\n\n"
|
|
|
|
"Do you want to overwrite that file?"),
|
2010-04-21 01:19:09 +00:00
|
|
|
makeDisplayPath(filename.absFileName()));
|
2009-04-24 12:37:56 +00:00
|
|
|
if (Alert::prompt(_("Overwrite file?"),
|
|
|
|
text, 0, 1, _("&Overwrite"), _("&Cancel")) != 0)
|
|
|
|
break;
|
2009-04-06 12:12:06 +00:00
|
|
|
}
|
2009-04-24 12:37:56 +00:00
|
|
|
command += ' ' + lyxrc.print_to_file
|
|
|
|
+ quoteName(filename.toFilesystemEncoding())
|
|
|
|
+ ' '
|
|
|
|
+ quoteName(dvifile.toFilesystemEncoding());
|
|
|
|
// as above....
|
|
|
|
Systemcall::Starttype stype = use_gui ?
|
|
|
|
Systemcall::DontWait : Systemcall::Wait;
|
|
|
|
res = one.startscript(stype, command);
|
2008-05-28 16:02:09 +00:00
|
|
|
}
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
if (res == 0)
|
|
|
|
dr.setError(false);
|
|
|
|
else {
|
|
|
|
dr.setMessage(_("Error running external commands."));
|
|
|
|
showPrintError(absFileName());
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-09-19 14:20:05 +00:00
|
|
|
case LFUN_BUFFER_LANGUAGE: {
|
|
|
|
Language const * oldL = params().language;
|
|
|
|
Language const * newL = languages.getLanguage(argument);
|
|
|
|
if (!newL || oldL == newL)
|
|
|
|
break;
|
2010-07-09 14:37:00 +00:00
|
|
|
if (oldL->rightToLeft() == newL->rightToLeft() && !isMultiLingual()) {
|
2009-09-19 14:20:05 +00:00
|
|
|
changeLanguage(oldL, newL);
|
2010-07-09 14:37:00 +00:00
|
|
|
dr.forceBufferUpdate();
|
|
|
|
}
|
2009-09-19 14:20:05 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-04-24 12:37:56 +00:00
|
|
|
default:
|
|
|
|
dispatched = false;
|
|
|
|
break;
|
2000-04-28 11:18:04 +00:00
|
|
|
}
|
2009-04-06 12:12:06 +00:00
|
|
|
dr.dispatched(dispatched);
|
2009-10-03 19:12:21 +00:00
|
|
|
undo().endUndoGroup();
|
1999-12-10 00:07:59 +00:00
|
|
|
}
|
2000-04-10 21:40:13 +00:00
|
|
|
|
2000-04-26 13:57:28 +00:00
|
|
|
|
2001-06-28 10:25:20 +00:00
|
|
|
void Buffer::changeLanguage(Language const * from, Language const * to)
|
2000-04-10 21:40:13 +00:00
|
|
|
{
|
2008-04-10 21:49:34 +00:00
|
|
|
LASSERT(from, /**/);
|
|
|
|
LASSERT(to, /**/);
|
2005-01-05 20:21:27 +00:00
|
|
|
|
2004-11-06 15:23:12 +00:00
|
|
|
for_each(par_iterator_begin(),
|
|
|
|
par_iterator_end(),
|
|
|
|
bind(&Paragraph::changeLanguage, _1, params(), from, to));
|
2000-04-10 21:40:13 +00:00
|
|
|
}
|
|
|
|
|
2003-04-28 21:58:53 +00:00
|
|
|
|
2004-02-25 12:00:53 +00:00
|
|
|
bool Buffer::isMultiLingual() const
|
2000-04-10 21:40:13 +00:00
|
|
|
{
|
2004-02-25 12:00:53 +00:00
|
|
|
ParConstIterator end = par_iterator_end();
|
|
|
|
for (ParConstIterator it = par_iterator_begin(); it != end; ++it)
|
2003-09-09 09:47:59 +00:00
|
|
|
if (it->isMultiLingual(params()))
|
2000-04-10 21:40:13 +00:00
|
|
|
return true;
|
2001-09-01 21:26:34 +00:00
|
|
|
|
2000-04-10 21:40:13 +00:00
|
|
|
return false;
|
|
|
|
}
|
2000-05-19 16:46:01 +00:00
|
|
|
|
|
|
|
|
2010-02-08 17:15:00 +00:00
|
|
|
std::set<Language const *> Buffer::getLanguages() const
|
|
|
|
{
|
|
|
|
std::set<Language const *> languages;
|
|
|
|
getLanguages(languages);
|
|
|
|
return languages;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::getLanguages(std::set<Language const *> & languages) const
|
|
|
|
{
|
|
|
|
ParConstIterator end = par_iterator_end();
|
2010-02-25 18:00:43 +00:00
|
|
|
// add the buffer language, even if it's not actively used
|
|
|
|
languages.insert(language());
|
|
|
|
// iterate over the paragraphs
|
2010-02-08 17:15:00 +00:00
|
|
|
for (ParConstIterator it = par_iterator_begin(); it != end; ++it)
|
|
|
|
it->getLanguages(languages);
|
|
|
|
// also children
|
2010-09-29 13:05:57 +00:00
|
|
|
ListOfBuffers clist = getDescendents();
|
2010-09-29 11:55:10 +00:00
|
|
|
ListOfBuffers::const_iterator cit = clist.begin();
|
|
|
|
ListOfBuffers::const_iterator const cen = clist.end();
|
|
|
|
for (; cit != cen; ++cit)
|
2010-02-08 17:15:00 +00:00
|
|
|
(*cit)->getLanguages(languages);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-02-09 15:23:05 +00:00
|
|
|
DocIterator Buffer::getParFromID(int const id) const
|
2001-07-09 09:16:00 +00:00
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
Buffer * buf = const_cast<Buffer *>(this);
|
2008-01-12 21:38:51 +00:00
|
|
|
if (id < 0) {
|
|
|
|
// John says this is called with id == -1 from undo
|
|
|
|
lyxerr << "getParFromID(), id: " << id << endl;
|
2008-11-17 11:46:07 +00:00
|
|
|
return doc_iterator_end(buf);
|
2008-01-12 21:38:51 +00:00
|
|
|
}
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
for (DocIterator it = doc_iterator_begin(buf); !it.atEnd(); it.forwardPar())
|
2008-02-09 15:23:05 +00:00
|
|
|
if (it.paragraph().id() == id)
|
2008-01-12 21:38:51 +00:00
|
|
|
return it;
|
|
|
|
|
2008-11-17 11:46:07 +00:00
|
|
|
return doc_iterator_end(buf);
|
2008-01-12 21:38:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-05 20:21:27 +00:00
|
|
|
bool Buffer::hasParWithID(int const id) const
|
2003-05-05 17:28:21 +00:00
|
|
|
{
|
2008-02-09 15:23:05 +00:00
|
|
|
return !getParFromID(id).atEnd();
|
2003-05-05 17:28:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2001-09-01 21:26:34 +00:00
|
|
|
ParIterator Buffer::par_iterator_begin()
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
return ParIterator(doc_iterator_begin(this));
|
2001-09-01 21:26:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ParIterator Buffer::par_iterator_end()
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
return ParIterator(doc_iterator_end(this));
|
2001-09-01 21:26:34 +00:00
|
|
|
}
|
2002-08-20 17:18:21 +00:00
|
|
|
|
2003-08-26 14:50:16 +00:00
|
|
|
|
2002-11-08 01:08:27 +00:00
|
|
|
ParConstIterator Buffer::par_iterator_begin() const
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
return ParConstIterator(doc_iterator_begin(this));
|
2002-11-08 01:08:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ParConstIterator Buffer::par_iterator_end() const
|
|
|
|
{
|
2008-11-17 11:46:07 +00:00
|
|
|
return ParConstIterator(doc_iterator_end(this));
|
2002-11-08 01:08:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
Language const * Buffer::language() const
|
2002-08-20 17:18:21 +00:00
|
|
|
{
|
2003-09-09 09:47:59 +00:00
|
|
|
return params().language;
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-09-09 11:16:28 +00:00
|
|
|
docstring const Buffer::B_(string const & l10n) const
|
2003-04-24 23:19:41 +00:00
|
|
|
{
|
2007-05-01 08:26:40 +00:00
|
|
|
return params().B_(l10n);
|
2003-04-24 23:19:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2002-08-20 17:18:21 +00:00
|
|
|
bool Buffer::isClean() const
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->lyx_clean;
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-08-09 20:46:22 +00:00
|
|
|
bool Buffer::isExternallyModified(CheckMethod method) const
|
|
|
|
{
|
2008-04-10 21:49:34 +00:00
|
|
|
LASSERT(d->filename.exists(), /**/);
|
2007-08-09 20:46:22 +00:00
|
|
|
// if method == timestamp, check timestamp before checksum
|
2008-08-01 17:57:01 +00:00
|
|
|
return (method == checksum_method
|
2007-11-30 17:46:49 +00:00
|
|
|
|| d->timestamp_ != d->filename.lastModified())
|
|
|
|
&& d->checksum_ != d->filename.checksum();
|
2007-08-09 20:46:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
void Buffer::saveCheckSum(FileName const & file) const
|
2007-09-06 15:54:17 +00:00
|
|
|
{
|
2007-10-20 10:03:45 +00:00
|
|
|
if (file.exists()) {
|
2007-11-30 17:46:49 +00:00
|
|
|
d->timestamp_ = file.lastModified();
|
|
|
|
d->checksum_ = file.checksum();
|
2007-09-06 15:54:17 +00:00
|
|
|
} else {
|
|
|
|
// in the case of save to a new file.
|
2007-11-30 17:46:49 +00:00
|
|
|
d->timestamp_ = 0;
|
|
|
|
d->checksum_ = 0;
|
2007-09-06 15:54:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2002-08-20 17:18:21 +00:00
|
|
|
void Buffer::markClean() const
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
if (!d->lyx_clean) {
|
|
|
|
d->lyx_clean = true;
|
2002-08-20 17:18:21 +00:00
|
|
|
updateTitles();
|
|
|
|
}
|
|
|
|
// if the .lyx file has been saved, we don't need an
|
|
|
|
// autosave
|
2007-11-30 17:46:49 +00:00
|
|
|
d->bak_clean = true;
|
2010-08-06 23:54:04 +00:00
|
|
|
d->undo_.markDirty();
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::setUnnamed(bool flag)
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
d->unnamed = flag;
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-02-25 12:00:53 +00:00
|
|
|
bool Buffer::isUnnamed() const
|
2002-08-20 17:18:21 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->unnamed;
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-09-08 01:29:07 +00:00
|
|
|
/// \note
|
|
|
|
/// Don't check unnamed, here: isInternal() is used in
|
|
|
|
/// newBuffer(), where the unnamed flag has not been set by anyone
|
|
|
|
/// yet. Also, for an internal buffer, there should be no need for
|
|
|
|
/// retrieving fileName() nor for checking if it is unnamed or not.
|
|
|
|
bool Buffer::isInternal() const
|
|
|
|
{
|
|
|
|
return fileName().extension() == "internal";
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2002-08-20 17:18:21 +00:00
|
|
|
void Buffer::markDirty()
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
if (d->lyx_clean) {
|
|
|
|
d->lyx_clean = false;
|
2002-08-20 17:18:21 +00:00
|
|
|
updateTitles();
|
|
|
|
}
|
2007-11-30 17:46:49 +00:00
|
|
|
d->bak_clean = false;
|
2003-02-09 00:27:52 +00:00
|
|
|
|
2007-11-30 17:46:49 +00:00
|
|
|
DepClean::iterator it = d->dep_clean.begin();
|
|
|
|
DepClean::const_iterator const end = d->dep_clean.end();
|
2003-02-09 00:27:52 +00:00
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
for (; it != end; ++it)
|
2003-02-09 00:27:52 +00:00
|
|
|
it->second = false;
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-03 17:37:37 +00:00
|
|
|
FileName Buffer::fileName() const
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->filename;
|
2007-11-03 17:37:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
string Buffer::absFileName() const
|
2002-08-20 17:18:21 +00:00
|
|
|
{
|
2010-04-21 01:19:09 +00:00
|
|
|
return d->filename.absFileName();
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-28 15:25:07 +00:00
|
|
|
string Buffer::filePath() const
|
2002-08-20 17:18:21 +00:00
|
|
|
{
|
2010-04-21 01:19:09 +00:00
|
|
|
return d->filename.onlyPath().absFileName() + "/";
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-25 14:00:29 +00:00
|
|
|
bool Buffer::isReadonly() const
|
2002-08-20 17:18:21 +00:00
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
return d->read_only;
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-30 17:41:27 +00:00
|
|
|
void Buffer::setParent(Buffer const * buffer)
|
2002-08-20 17:18:21 +00:00
|
|
|
{
|
2007-11-30 17:41:27 +00:00
|
|
|
// Avoids recursive include.
|
2009-04-03 00:44:33 +00:00
|
|
|
d->setParent(buffer == this ? 0 : buffer);
|
2007-12-21 20:42:46 +00:00
|
|
|
updateMacros();
|
2002-08-20 17:18:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-16 21:28:06 +00:00
|
|
|
Buffer const * Buffer::parent() const
|
2004-03-25 10:12:44 +00:00
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
return d->parent();
|
2004-03-25 10:12:44 +00:00
|
|
|
}
|
2004-04-13 06:27:29 +00:00
|
|
|
|
|
|
|
|
2010-09-29 11:55:10 +00:00
|
|
|
ListOfBuffers Buffer::allRelatives() const
|
2008-11-16 21:28:06 +00:00
|
|
|
{
|
2010-10-05 15:21:14 +00:00
|
|
|
ListOfBuffers lb = masterBuffer()->getDescendents();
|
|
|
|
lb.push_front(const_cast<Buffer *>(this));
|
|
|
|
return lb;
|
2008-11-16 21:28:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-11-30 17:41:27 +00:00
|
|
|
Buffer const * Buffer::masterBuffer() const
|
2006-04-15 11:46:17 +00:00
|
|
|
{
|
2010-09-29 12:55:39 +00:00
|
|
|
// FIXME Should be make sure we are not in some kind
|
|
|
|
// of recursive include? A -> B -> A will crash this.
|
2009-04-03 00:44:33 +00:00
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
if (!pbuf)
|
2007-11-30 17:41:27 +00:00
|
|
|
return this;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2009-04-03 00:44:33 +00:00
|
|
|
return pbuf->masterBuffer();
|
2006-04-15 11:46:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-20 17:52:55 +00:00
|
|
|
bool Buffer::isChild(Buffer * child) const
|
|
|
|
{
|
|
|
|
return d->children_positions.find(child) != d->children_positions.end();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-16 21:28:06 +00:00
|
|
|
DocIterator Buffer::firstChildPosition(Buffer const * child)
|
|
|
|
{
|
|
|
|
Impl::BufferPositionMap::iterator it;
|
|
|
|
it = d->children_positions.find(child);
|
|
|
|
if (it == d->children_positions.end())
|
2008-11-17 11:46:07 +00:00
|
|
|
return DocIterator(this);
|
2008-11-16 21:28:06 +00:00
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-09-29 12:17:24 +00:00
|
|
|
bool Buffer::hasChildren() const
|
|
|
|
{
|
|
|
|
return !d->children_positions.empty();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-09-29 13:05:57 +00:00
|
|
|
void Buffer::collectChildren(ListOfBuffers & clist, bool grand_children) const
|
2009-03-07 16:33:03 +00:00
|
|
|
{
|
|
|
|
// loop over children
|
|
|
|
Impl::BufferPositionMap::iterator it = d->children_positions.begin();
|
|
|
|
Impl::BufferPositionMap::iterator end = d->children_positions.end();
|
|
|
|
for (; it != end; ++it) {
|
|
|
|
Buffer * child = const_cast<Buffer *>(it->first);
|
2010-09-29 12:30:31 +00:00
|
|
|
// No duplicates
|
|
|
|
ListOfBuffers::const_iterator bit = find(clist.begin(), clist.end(), child);
|
|
|
|
if (bit != clist.end())
|
|
|
|
continue;
|
2009-03-07 16:33:03 +00:00
|
|
|
clist.push_back(child);
|
2010-09-29 11:59:31 +00:00
|
|
|
if (grand_children)
|
2009-10-29 23:28:08 +00:00
|
|
|
// there might be grandchildren
|
2010-09-29 13:05:57 +00:00
|
|
|
child->collectChildren(clist, true);
|
2009-03-07 16:33:03 +00:00
|
|
|
}
|
2009-12-30 18:40:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-09-29 13:05:57 +00:00
|
|
|
ListOfBuffers Buffer::getChildren() const
|
2009-12-30 18:40:18 +00:00
|
|
|
{
|
2010-09-29 11:55:10 +00:00
|
|
|
ListOfBuffers v;
|
2010-09-29 13:05:57 +00:00
|
|
|
collectChildren(v, false);
|
|
|
|
return v;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ListOfBuffers Buffer::getDescendents() const
|
|
|
|
{
|
|
|
|
ListOfBuffers v;
|
|
|
|
collectChildren(v, true);
|
2009-12-30 18:40:18 +00:00
|
|
|
return v;
|
2009-03-07 16:33:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
template<typename M>
|
2010-01-25 18:39:08 +00:00
|
|
|
typename M::const_iterator greatest_below(M & m, typename M::key_type const & x)
|
2004-04-13 06:27:29 +00:00
|
|
|
{
|
2007-12-21 20:42:46 +00:00
|
|
|
if (m.empty())
|
|
|
|
return m.end();
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
typename M::const_iterator it = m.lower_bound(x);
|
2007-12-21 20:42:46 +00:00
|
|
|
if (it == m.begin())
|
|
|
|
return m.end();
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
it--;
|
2008-08-01 17:57:01 +00:00
|
|
|
return it;
|
2004-04-13 06:27:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-25 18:39:08 +00:00
|
|
|
MacroData const * Buffer::Impl::getBufferMacro(docstring const & name,
|
2007-12-21 20:42:46 +00:00
|
|
|
DocIterator const & pos) const
|
2004-04-13 06:27:29 +00:00
|
|
|
{
|
2007-12-21 20:43:21 +00:00
|
|
|
LYXERR(Debug::MACROS, "Searching for " << to_ascii(name) << " at " << pos);
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// if paragraphs have no macro context set, pos will be empty
|
|
|
|
if (pos.empty())
|
|
|
|
return 0;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// we haven't found anything yet
|
2010-01-25 18:39:08 +00:00
|
|
|
DocIterator bestPos = owner_->par_iterator_begin();
|
2007-12-21 20:42:46 +00:00
|
|
|
MacroData const * bestData = 0;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// find macro definitions for name
|
2010-01-25 18:39:08 +00:00
|
|
|
NamePositionScopeMacroMap::const_iterator nameIt = macros.find(name);
|
|
|
|
if (nameIt != macros.end()) {
|
2007-12-21 20:42:46 +00:00
|
|
|
// find last definition in front of pos or at pos itself
|
2010-01-25 18:39:08 +00:00
|
|
|
PositionScopeMacroMap::const_iterator it
|
2008-11-17 11:46:07 +00:00
|
|
|
= greatest_below(nameIt->second, pos);
|
2007-12-21 20:42:46 +00:00
|
|
|
if (it != nameIt->second.end()) {
|
|
|
|
while (true) {
|
|
|
|
// scope ends behind pos?
|
|
|
|
if (pos < it->second.first) {
|
|
|
|
// Looks good, remember this. If there
|
|
|
|
// is no external macro behind this,
|
|
|
|
// we found the right one already.
|
|
|
|
bestPos = it->first;
|
|
|
|
bestData = &it->second.second;
|
|
|
|
break;
|
|
|
|
}
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// try previous macro if there is one
|
|
|
|
if (it == nameIt->second.begin())
|
|
|
|
break;
|
|
|
|
it--;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// find macros in included files
|
2010-01-25 18:39:08 +00:00
|
|
|
PositionScopeBufferMap::const_iterator it
|
|
|
|
= greatest_below(position_to_children, pos);
|
|
|
|
if (it == position_to_children.end())
|
2008-02-06 21:32:31 +00:00
|
|
|
// no children before
|
|
|
|
return bestData;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
2008-02-06 21:32:31 +00:00
|
|
|
while (true) {
|
|
|
|
// do we know something better (i.e. later) already?
|
|
|
|
if (it->first < bestPos )
|
|
|
|
break;
|
|
|
|
|
|
|
|
// scope ends behind pos?
|
|
|
|
if (pos < it->second.first) {
|
|
|
|
// look for macro in external file
|
2010-01-25 18:39:08 +00:00
|
|
|
macro_lock = true;
|
2008-02-06 21:32:31 +00:00
|
|
|
MacroData const * data
|
2010-01-25 18:39:08 +00:00
|
|
|
= it->second.second->getMacro(name, false);
|
|
|
|
macro_lock = false;
|
2008-02-06 21:32:31 +00:00
|
|
|
if (data) {
|
|
|
|
bestPos = it->first;
|
|
|
|
bestData = data;
|
2007-12-21 20:42:46 +00:00
|
|
|
break;
|
2008-02-06 21:32:31 +00:00
|
|
|
}
|
2007-12-21 20:42:46 +00:00
|
|
|
}
|
2008-02-06 21:32:31 +00:00
|
|
|
|
|
|
|
// try previous file if there is one
|
2010-01-25 18:39:08 +00:00
|
|
|
if (it == position_to_children.begin())
|
2008-02-06 21:32:31 +00:00
|
|
|
break;
|
|
|
|
--it;
|
2007-12-21 20:42:46 +00:00
|
|
|
}
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// return the best macro we have found
|
|
|
|
return bestData;
|
2007-11-01 11:13:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
MacroData const * Buffer::getMacro(docstring const & name,
|
|
|
|
DocIterator const & pos, bool global) const
|
2007-11-01 11:13:07 +00:00
|
|
|
{
|
2007-12-21 20:42:46 +00:00
|
|
|
if (d->macro_lock)
|
2008-08-01 17:57:01 +00:00
|
|
|
return 0;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
// query buffer macros
|
2010-01-25 18:39:08 +00:00
|
|
|
MacroData const * data = d->getBufferMacro(name, pos);
|
2007-12-21 20:42:46 +00:00
|
|
|
if (data != 0)
|
|
|
|
return data;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
|
|
// If there is a master buffer, query that
|
2009-04-03 00:44:33 +00:00
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
if (pbuf) {
|
2007-12-21 20:42:46 +00:00
|
|
|
d->macro_lock = true;
|
2009-04-03 00:44:33 +00:00
|
|
|
MacroData const * macro = pbuf->getMacro(
|
2008-03-07 20:51:56 +00:00
|
|
|
name, *this, false);
|
2007-12-21 20:42:46 +00:00
|
|
|
d->macro_lock = false;
|
|
|
|
if (macro)
|
|
|
|
return macro;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (global) {
|
|
|
|
data = MacroTable::globalMacros().get(name);
|
|
|
|
if (data != 0)
|
|
|
|
return data;
|
|
|
|
}
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
return 0;
|
2004-04-13 06:27:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
MacroData const * Buffer::getMacro(docstring const & name, bool global) const
|
2004-04-13 06:27:29 +00:00
|
|
|
{
|
2007-12-21 20:42:46 +00:00
|
|
|
// set scope end behind the last paragraph
|
|
|
|
DocIterator scope = par_iterator_begin();
|
|
|
|
scope.pit() = scope.lastpit() + 1;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
return getMacro(name, scope, global);
|
|
|
|
}
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
|
2008-03-07 20:51:56 +00:00
|
|
|
MacroData const * Buffer::getMacro(docstring const & name,
|
|
|
|
Buffer const & child, bool global) const
|
2007-12-21 20:42:46 +00:00
|
|
|
{
|
|
|
|
// look where the child buffer is included first
|
2008-03-07 20:51:56 +00:00
|
|
|
Impl::BufferPositionMap::iterator it = d->children_positions.find(&child);
|
2007-12-21 20:42:46 +00:00
|
|
|
if (it == d->children_positions.end())
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
// check for macros at the inclusion position
|
|
|
|
return getMacro(name, it->second, global);
|
2004-04-13 06:27:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-07-21 21:58:54 +00:00
|
|
|
void Buffer::Impl::updateMacros(DocIterator & it, DocIterator & scope)
|
2004-04-13 06:27:29 +00:00
|
|
|
{
|
2010-01-12 15:25:04 +00:00
|
|
|
pit_type const lastpit = it.lastpit();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
// look for macros in each paragraph
|
|
|
|
while (it.pit() <= lastpit) {
|
|
|
|
Paragraph & par = it.paragraph();
|
|
|
|
|
|
|
|
// iterate over the insets of the current paragraph
|
|
|
|
InsetList const & insets = par.insetList();
|
|
|
|
InsetList::const_iterator iit = insets.begin();
|
2005-07-18 14:25:20 +00:00
|
|
|
InsetList::const_iterator end = insets.end();
|
2007-12-21 20:42:46 +00:00
|
|
|
for (; iit != end; ++iit) {
|
|
|
|
it.pos() = iit->pos;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// is it a nested text inset?
|
|
|
|
if (iit->inset->asInsetText()) {
|
|
|
|
// Inset needs its own scope?
|
2009-03-20 19:36:37 +00:00
|
|
|
InsetText const * itext = iit->inset->asInsetText();
|
2008-02-27 20:43:16 +00:00
|
|
|
bool newScope = itext->isMacroScope();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
2008-08-01 17:57:01 +00:00
|
|
|
// scope which ends just behind the inset
|
2007-12-21 20:42:46 +00:00
|
|
|
DocIterator insetScope = it;
|
2008-02-27 20:43:16 +00:00
|
|
|
++insetScope.pos();
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
// collect macros in inset
|
|
|
|
it.push_back(CursorSlice(*iit->inset));
|
2008-03-10 12:49:02 +00:00
|
|
|
updateMacros(it, newScope ? insetScope : scope);
|
2007-12-21 20:42:46 +00:00
|
|
|
it.pop_back();
|
|
|
|
continue;
|
|
|
|
}
|
2010-10-19 15:36:11 +00:00
|
|
|
|
|
|
|
if (iit->inset->asInsetTabular()) {
|
|
|
|
CursorSlice slice(*iit->inset);
|
|
|
|
size_t const numcells = slice.nargs();
|
|
|
|
for (; slice.idx() < numcells; slice.forwardIdx()) {
|
|
|
|
it.push_back(slice);
|
|
|
|
updateMacros(it, scope);
|
|
|
|
it.pop_back();
|
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// is it an external file?
|
|
|
|
if (iit->inset->lyxCode() == INCLUDE_CODE) {
|
|
|
|
// get buffer of external file
|
2009-03-20 19:36:37 +00:00
|
|
|
InsetInclude const & inset =
|
|
|
|
static_cast<InsetInclude const &>(*iit->inset);
|
2010-01-25 18:39:08 +00:00
|
|
|
macro_lock = true;
|
2009-03-26 00:48:32 +00:00
|
|
|
Buffer * child = inset.getChildBuffer();
|
2010-01-25 18:39:08 +00:00
|
|
|
macro_lock = false;
|
2007-12-21 20:42:46 +00:00
|
|
|
if (!child)
|
2008-08-01 17:57:01 +00:00
|
|
|
continue;
|
2007-12-21 20:42:46 +00:00
|
|
|
|
2008-02-08 16:19:36 +00:00
|
|
|
// register its position, but only when it is
|
2007-12-21 20:42:46 +00:00
|
|
|
// included first in the buffer
|
2010-01-25 18:39:08 +00:00
|
|
|
if (children_positions.find(child) ==
|
|
|
|
children_positions.end())
|
|
|
|
children_positions[child] = it;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// register child with its scope
|
2010-01-25 18:39:08 +00:00
|
|
|
position_to_children[it] = Impl::ScopeBuffer(scope, child);
|
2007-12-21 20:42:46 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2010-07-21 21:58:54 +00:00
|
|
|
if (doing_export && iit->inset->asInsetMath()) {
|
2010-07-21 13:19:52 +00:00
|
|
|
InsetMath * im = static_cast<InsetMath *>(iit->inset);
|
|
|
|
if (im->asHullInset()) {
|
|
|
|
InsetMathHull * hull = static_cast<InsetMathHull *>(im);
|
|
|
|
hull->recordLocation(it);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
if (iit->inset->lyxCode() != MATHMACRO_CODE)
|
2007-11-01 11:13:07 +00:00
|
|
|
continue;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-11-01 11:13:07 +00:00
|
|
|
// get macro data
|
2009-03-20 19:36:37 +00:00
|
|
|
MathMacroTemplate & macroTemplate =
|
|
|
|
static_cast<MathMacroTemplate &>(*iit->inset);
|
2010-01-25 18:39:08 +00:00
|
|
|
MacroContext mc(owner_, it);
|
2007-12-21 20:42:46 +00:00
|
|
|
macroTemplate.updateToContext(mc);
|
2007-11-01 11:13:07 +00:00
|
|
|
|
|
|
|
// valid?
|
2007-12-21 20:42:46 +00:00
|
|
|
bool valid = macroTemplate.validMacro();
|
|
|
|
// FIXME: Should be fixNameAndCheckIfValid() in fact,
|
|
|
|
// then the BufferView's cursor will be invalid in
|
|
|
|
// some cases which leads to crashes.
|
|
|
|
if (!valid)
|
|
|
|
continue;
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// register macro
|
2009-11-08 11:45:46 +00:00
|
|
|
// FIXME (Abdel), I don't understandt why we pass 'it' here
|
|
|
|
// instead of 'macroTemplate' defined above... is this correct?
|
2010-01-25 18:39:08 +00:00
|
|
|
macros[macroTemplate.name()][it] =
|
|
|
|
Impl::ScopeMacro(scope, MacroData(const_cast<Buffer *>(owner_), it));
|
2007-12-21 20:42:46 +00:00
|
|
|
}
|
2007-11-01 11:13:07 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// next paragraph
|
|
|
|
it.pit()++;
|
|
|
|
it.pos() = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-07-21 21:58:54 +00:00
|
|
|
void Buffer::updateMacros() const
|
2007-12-21 20:42:46 +00:00
|
|
|
{
|
|
|
|
if (d->macro_lock)
|
|
|
|
return;
|
|
|
|
|
2007-12-21 20:43:21 +00:00
|
|
|
LYXERR(Debug::MACROS, "updateMacro of " << d->filename.onlyFileName());
|
2007-12-21 20:42:46 +00:00
|
|
|
|
|
|
|
// start with empty table
|
|
|
|
d->macros.clear();
|
|
|
|
d->children_positions.clear();
|
|
|
|
d->position_to_children.clear();
|
|
|
|
|
|
|
|
// Iterate over buffer, starting with first paragraph
|
|
|
|
// The scope must be bigger than any lookup DocIterator
|
|
|
|
// later. For the global lookup, lastpit+1 is used, hence
|
|
|
|
// we use lastpit+2 here.
|
|
|
|
DocIterator it = par_iterator_begin();
|
|
|
|
DocIterator outerScope = it;
|
|
|
|
outerScope.pit() = outerScope.lastpit() + 2;
|
2010-07-21 21:58:54 +00:00
|
|
|
d->updateMacros(it, outerScope);
|
2007-12-21 20:42:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-07-09 09:48:34 +00:00
|
|
|
void Buffer::getUsedBranches(std::list<docstring> & result, bool const from_master) const
|
|
|
|
{
|
2009-07-11 08:29:30 +00:00
|
|
|
InsetIterator it = inset_iterator_begin(inset());
|
|
|
|
InsetIterator const end = inset_iterator_end(inset());
|
|
|
|
for (; it != end; ++it) {
|
|
|
|
if (it->lyxCode() == BRANCH_CODE) {
|
2010-10-25 09:49:24 +00:00
|
|
|
InsetBranch & br = dynamic_cast<InsetBranch &>(*it);
|
2009-07-11 08:29:30 +00:00
|
|
|
docstring const name = br.branch();
|
|
|
|
if (!from_master && !params().branchlist().find(name))
|
|
|
|
result.push_back(name);
|
|
|
|
else if (from_master && !masterBuffer()->params().branchlist().find(name))
|
|
|
|
result.push_back(name);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (it->lyxCode() == INCLUDE_CODE) {
|
|
|
|
// get buffer of external file
|
|
|
|
InsetInclude const & ins =
|
|
|
|
static_cast<InsetInclude const &>(*it);
|
|
|
|
Buffer * child = ins.getChildBuffer();
|
|
|
|
if (!child)
|
2009-07-09 09:48:34 +00:00
|
|
|
continue;
|
2009-07-11 08:29:30 +00:00
|
|
|
child->getUsedBranches(result, true);
|
2009-07-09 09:48:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// remove duplicates
|
|
|
|
result.unique();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
void Buffer::updateMacroInstances() const
|
|
|
|
{
|
2008-01-12 21:38:51 +00:00
|
|
|
LYXERR(Debug::MACROS, "updateMacroInstances for "
|
|
|
|
<< d->filename.onlyFileName());
|
2008-11-17 11:46:07 +00:00
|
|
|
DocIterator it = doc_iterator_begin(this);
|
2009-12-08 22:41:10 +00:00
|
|
|
it.forwardInset();
|
|
|
|
DocIterator const end = doc_iterator_end(this);
|
|
|
|
for (; it != end; it.forwardInset()) {
|
|
|
|
// look for MathData cells in InsetMathNest insets
|
|
|
|
InsetMath * minset = it.nextInset()->asInsetMath();
|
2007-12-21 20:42:46 +00:00
|
|
|
if (!minset)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// update macro in all cells of the InsetMathNest
|
|
|
|
DocIterator::idx_type n = minset->nargs();
|
2009-11-08 11:45:46 +00:00
|
|
|
MacroContext mc = MacroContext(this, it);
|
2007-12-21 20:42:46 +00:00
|
|
|
for (DocIterator::idx_type i = 0; i < n; ++i) {
|
|
|
|
MathData & data = minset->cell(i);
|
|
|
|
data.updateMacros(0, mc);
|
2004-04-13 06:27:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2005-07-17 14:29:35 +00:00
|
|
|
|
2005-07-17 23:03:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
void Buffer::listMacroNames(MacroNameSet & macros) const
|
|
|
|
{
|
|
|
|
if (d->macro_lock)
|
|
|
|
return;
|
|
|
|
|
|
|
|
d->macro_lock = true;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// loop over macro names
|
2008-03-07 20:51:56 +00:00
|
|
|
Impl::NamePositionScopeMacroMap::iterator nameIt = d->macros.begin();
|
|
|
|
Impl::NamePositionScopeMacroMap::iterator nameEnd = d->macros.end();
|
2007-12-21 20:42:46 +00:00
|
|
|
for (; nameIt != nameEnd; ++nameIt)
|
|
|
|
macros.insert(nameIt->first);
|
|
|
|
|
|
|
|
// loop over children
|
2008-03-07 20:51:56 +00:00
|
|
|
Impl::BufferPositionMap::iterator it = d->children_positions.begin();
|
|
|
|
Impl::BufferPositionMap::iterator end = d->children_positions.end();
|
2007-12-21 20:42:46 +00:00
|
|
|
for (; it != end; ++it)
|
|
|
|
it->first->listMacroNames(macros);
|
|
|
|
|
|
|
|
// call parent
|
2009-04-03 00:44:33 +00:00
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
if (pbuf)
|
|
|
|
pbuf->listMacroNames(macros);
|
2007-12-21 20:42:46 +00:00
|
|
|
|
2008-08-01 17:57:01 +00:00
|
|
|
d->macro_lock = false;
|
2007-12-21 20:42:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
void Buffer::listParentMacros(MacroSet & macros, LaTeXFeatures & features) const
|
2007-12-21 20:42:46 +00:00
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
Buffer const * const pbuf = d->parent();
|
|
|
|
if (!pbuf)
|
2007-12-21 20:42:46 +00:00
|
|
|
return;
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
MacroNameSet names;
|
2009-04-03 00:44:33 +00:00
|
|
|
pbuf->listMacroNames(names);
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
// resolve macros
|
2007-12-21 20:42:46 +00:00
|
|
|
MacroNameSet::iterator it = names.begin();
|
|
|
|
MacroNameSet::iterator end = names.end();
|
|
|
|
for (; it != end; ++it) {
|
|
|
|
// defined?
|
2008-08-01 17:57:01 +00:00
|
|
|
MacroData const * data =
|
2009-04-03 00:44:33 +00:00
|
|
|
pbuf->getMacro(*it, *this, false);
|
2008-03-26 12:55:36 +00:00
|
|
|
if (data) {
|
|
|
|
macros.insert(data);
|
2008-08-01 17:57:01 +00:00
|
|
|
|
2008-03-26 12:55:36 +00:00
|
|
|
// we cannot access the original MathMacroTemplate anymore
|
|
|
|
// here to calls validate method. So we do its work here manually.
|
|
|
|
// FIXME: somehow make the template accessible here.
|
|
|
|
if (data->optionals() > 0)
|
|
|
|
features.require("xargs");
|
|
|
|
}
|
2007-12-21 20:42:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-03-02 15:27:35 +00:00
|
|
|
Buffer::References & Buffer::references(docstring const & label)
|
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
if (d->parent())
|
2008-03-02 15:27:35 +00:00
|
|
|
return const_cast<Buffer *>(masterBuffer())->references(label);
|
|
|
|
|
|
|
|
RefCache::iterator it = d->ref_cache_.find(label);
|
|
|
|
if (it != d->ref_cache_.end())
|
|
|
|
return it->second.second;
|
|
|
|
|
|
|
|
static InsetLabel const * dummy_il = 0;
|
|
|
|
static References const dummy_refs;
|
2008-03-07 20:51:56 +00:00
|
|
|
it = d->ref_cache_.insert(
|
|
|
|
make_pair(label, make_pair(dummy_il, dummy_refs))).first;
|
2008-03-02 15:27:35 +00:00
|
|
|
return it->second.second;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Buffer::References const & Buffer::references(docstring const & label) const
|
|
|
|
{
|
|
|
|
return const_cast<Buffer *>(this)->references(label);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::setInsetLabel(docstring const & label, InsetLabel const * il)
|
|
|
|
{
|
|
|
|
masterBuffer()->d->ref_cache_[label].first = il;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
InsetLabel const * Buffer::insetLabel(docstring const & label) const
|
|
|
|
{
|
|
|
|
return masterBuffer()->d->ref_cache_[label].first;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::clearReferenceCache() const
|
|
|
|
{
|
2009-04-03 00:44:33 +00:00
|
|
|
if (!d->parent())
|
2008-03-02 15:27:35 +00:00
|
|
|
d->ref_cache_.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-10-22 11:00:04 +00:00
|
|
|
void Buffer::changeRefsIfUnique(docstring const & from, docstring const & to,
|
2007-10-13 09:11:10 +00:00
|
|
|
InsetCode code)
|
2005-07-17 23:03:01 +00:00
|
|
|
{
|
2006-10-09 14:21:11 +00:00
|
|
|
//FIXME: This does not work for child documents yet.
|
2008-04-10 21:49:34 +00:00
|
|
|
LASSERT(code == CITE_CODE, /**/);
|
2005-07-17 23:03:01 +00:00
|
|
|
// Check if the label 'from' appears more than once
|
2006-10-12 10:50:45 +00:00
|
|
|
vector<docstring> labels;
|
2007-10-23 18:51:04 +00:00
|
|
|
string paramName;
|
2010-01-11 16:29:26 +00:00
|
|
|
checkBibInfoCache();
|
2008-04-25 20:03:03 +00:00
|
|
|
BiblioInfo const & keys = masterBibInfo();
|
2008-03-07 13:45:48 +00:00
|
|
|
BiblioInfo::const_iterator bit = keys.begin();
|
|
|
|
BiblioInfo::const_iterator bend = keys.end();
|
2006-10-09 14:21:11 +00:00
|
|
|
|
2008-03-07 13:45:48 +00:00
|
|
|
for (; bit != bend; ++bit)
|
|
|
|
// FIXME UNICODE
|
|
|
|
labels.push_back(bit->first);
|
|
|
|
paramName = "key";
|
2005-07-17 23:03:01 +00:00
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
if (count(labels.begin(), labels.end(), from) > 1)
|
2005-07-17 23:03:01 +00:00
|
|
|
return;
|
|
|
|
|
2006-10-09 14:21:11 +00:00
|
|
|
for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
|
|
|
|
if (it->lyxCode() == code) {
|
2007-03-25 00:56:01 +00:00
|
|
|
InsetCommand & inset = static_cast<InsetCommand &>(*it);
|
2007-10-23 18:51:04 +00:00
|
|
|
docstring const oldValue = inset.getParam(paramName);
|
|
|
|
if (oldValue == from)
|
|
|
|
inset.setParam(paramName, to);
|
2005-07-17 23:03:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
|
|
|
|
|
2006-10-22 11:00:04 +00:00
|
|
|
void Buffer::getSourceCode(odocstream & os, pit_type par_begin,
|
2008-09-29 10:48:49 +00:00
|
|
|
pit_type par_end, bool full_source) const
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
{
|
2007-03-18 10:59:16 +00:00
|
|
|
OutputParams runparams(¶ms().encoding());
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
runparams.nice = true;
|
2009-04-06 06:58:30 +00:00
|
|
|
runparams.flavor = params().useXetex ?
|
|
|
|
OutputParams::XETEX : OutputParams::LATEX;
|
2007-01-15 22:49:14 +00:00
|
|
|
runparams.linelen = lyxrc.plaintext_linelen;
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
// No side effect of file copying and image conversion
|
|
|
|
runparams.dryrun = true;
|
|
|
|
|
2006-08-04 13:59:12 +00:00
|
|
|
if (full_source) {
|
2007-05-25 09:20:35 +00:00
|
|
|
os << "% " << _("Preview source code") << "\n\n";
|
2009-05-24 01:38:33 +00:00
|
|
|
d->texrow.reset();
|
2007-11-30 17:46:49 +00:00
|
|
|
d->texrow.newline();
|
|
|
|
d->texrow.newline();
|
2008-10-16 07:43:46 +00:00
|
|
|
if (isDocBook())
|
2007-10-20 10:03:45 +00:00
|
|
|
writeDocBookSource(os, absFileName(), runparams, false);
|
2008-10-29 14:04:10 +00:00
|
|
|
else
|
2008-10-16 07:43:46 +00:00
|
|
|
// latex or literate
|
2008-10-29 14:04:10 +00:00
|
|
|
writeLaTeXSource(os, string(), runparams, true, true);
|
2006-08-04 13:59:12 +00:00
|
|
|
} else {
|
|
|
|
runparams.par_begin = par_begin;
|
|
|
|
runparams.par_end = par_end;
|
2008-03-07 20:51:56 +00:00
|
|
|
if (par_begin + 1 == par_end) {
|
2007-05-25 09:20:35 +00:00
|
|
|
os << "% "
|
2007-08-12 18:58:59 +00:00
|
|
|
<< bformat(_("Preview source code for paragraph %1$d"), par_begin)
|
2007-05-25 09:20:35 +00:00
|
|
|
<< "\n\n";
|
2008-03-07 20:51:56 +00:00
|
|
|
} else {
|
2007-05-25 09:20:35 +00:00
|
|
|
os << "% "
|
|
|
|
<< bformat(_("Preview source code from paragraph %1$s to %2$s"),
|
|
|
|
convert<docstring>(par_begin),
|
|
|
|
convert<docstring>(par_end - 1))
|
|
|
|
<< "\n\n";
|
2008-03-07 20:51:56 +00:00
|
|
|
}
|
2009-05-24 01:38:33 +00:00
|
|
|
TexRow texrow;
|
|
|
|
texrow.reset();
|
|
|
|
texrow.newline();
|
|
|
|
texrow.newline();
|
2006-08-04 13:59:12 +00:00
|
|
|
// output paragraphs
|
2008-10-16 07:43:46 +00:00
|
|
|
if (isDocBook())
|
2009-08-09 18:35:39 +00:00
|
|
|
docbookParagraphs(text(), *this, os, runparams);
|
2008-10-16 07:43:46 +00:00
|
|
|
else
|
|
|
|
// latex or literate
|
2009-05-24 01:38:33 +00:00
|
|
|
latexParagraphs(*this, text(), os, texrow, runparams);
|
2006-08-04 13:59:12 +00:00
|
|
|
}
|
view-source feature, from Bo Peng <ben.bob@gmail.com>
* src/buffer.h buffer.C - getSourceCode()
* src/lyxfunc.C - open view-source dialog
* src/text3.C - change LFUN_MOUSE_RELEASE
* src/output_linuxdoc.C, src/output_docbook.C, src/output_latex.C
- intercept output
* src/outputparams.h, outputparams.C - add par_begin, par_end, dryrun
* src/insets/insetgraphics.C - add dryrun mode of file conversion
* lib/ui/stdmenus.ui - add view-source menu item under view
* Add view-source dialog, add
src/frontends/qt2/QViewSourceDialog.h, QViewSource.C, QViewSource.h, QViewSourceDialog.C
src/frontends/qt2/ui/QViewSourceDialogBase.ui
src/frontends/controllers/ControlViewSource.h ControlViewSource.C
modify
src/frontends/qt2/Makefile.dialogs, Makefile.am, Dialogs.C,
src/frontends/controllers/Makefile.am, po.POTFILES.in
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@13610 a592a061-630c-0410-9148-cb99ea01b6c8
2006-04-09 02:48:54 +00:00
|
|
|
}
|
|
|
|
|
2006-07-15 22:43:37 +00:00
|
|
|
|
2007-11-30 17:41:27 +00:00
|
|
|
ErrorList & Buffer::errorList(string const & type) const
|
2006-07-15 22:43:37 +00:00
|
|
|
{
|
2007-11-30 17:41:27 +00:00
|
|
|
static ErrorList emptyErrorList;
|
2010-03-13 16:55:59 +00:00
|
|
|
map<string, ErrorList>::iterator it = d->errorLists.find(type);
|
|
|
|
if (it == d->errorLists.end())
|
2006-08-14 09:33:49 +00:00
|
|
|
return emptyErrorList;
|
2006-07-15 22:43:37 +00:00
|
|
|
|
2010-03-13 16:55:59 +00:00
|
|
|
return it->second;
|
2006-07-15 22:43:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-09-30 09:50:54 +00:00
|
|
|
void Buffer::updateTocItem(std::string const & type,
|
|
|
|
DocIterator const & dit) const
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
if (d->gui_)
|
|
|
|
d->gui_->updateTocItem(type, dit);
|
2008-09-30 09:50:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-02 18:27:20 +00:00
|
|
|
void Buffer::structureChanged() const
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
if (d->gui_)
|
|
|
|
d->gui_->structureChanged();
|
2007-10-02 18:27:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-21 12:26:41 +00:00
|
|
|
void Buffer::errors(string const & err, bool from_master) const
|
2007-10-02 18:27:20 +00:00
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
if (d->gui_)
|
|
|
|
d->gui_->errors(err, from_master);
|
2007-10-02 18:27:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::message(docstring const & msg) const
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
if (d->gui_)
|
|
|
|
d->gui_->message(msg);
|
2007-10-02 18:27:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-21 10:50:56 +00:00
|
|
|
void Buffer::setBusy(bool on) const
|
2007-10-02 18:27:20 +00:00
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
if (d->gui_)
|
|
|
|
d->gui_->setBusy(on);
|
2007-10-02 18:27:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::updateTitles() const
|
|
|
|
{
|
2007-11-30 17:46:49 +00:00
|
|
|
if (d->wa_)
|
|
|
|
d->wa_->updateTitles();
|
2007-10-02 18:27:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::resetAutosaveTimers() const
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
if (d->gui_)
|
|
|
|
d->gui_->resetAutosaveTimers();
|
2007-10-02 18:27:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-02-19 00:29:04 +00:00
|
|
|
bool Buffer::hasGuiDelegate() const
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
return d->gui_;
|
2009-02-19 00:29:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-02 18:27:20 +00:00
|
|
|
void Buffer::setGuiDelegate(frontend::GuiBufferDelegate * gui)
|
|
|
|
{
|
2010-01-25 14:32:39 +00:00
|
|
|
d->gui_ = gui;
|
2007-10-02 18:27:20 +00:00
|
|
|
}
|
|
|
|
|
2007-10-03 11:00:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
2008-11-16 11:51:42 +00:00
|
|
|
class AutoSaveBuffer : public ForkedProcess {
|
2007-10-03 11:00:18 +00:00
|
|
|
public:
|
|
|
|
///
|
|
|
|
AutoSaveBuffer(Buffer const & buffer, FileName const & fname)
|
|
|
|
: buffer_(buffer), fname_(fname) {}
|
|
|
|
///
|
2010-04-22 11:37:32 +00:00
|
|
|
virtual shared_ptr<ForkedProcess> clone() const
|
2007-10-03 11:00:18 +00:00
|
|
|
{
|
2010-04-22 11:37:32 +00:00
|
|
|
return shared_ptr<ForkedProcess>(new AutoSaveBuffer(*this));
|
2007-10-03 11:00:18 +00:00
|
|
|
}
|
|
|
|
///
|
|
|
|
int start()
|
|
|
|
{
|
2008-08-01 17:57:01 +00:00
|
|
|
command_ = to_utf8(bformat(_("Auto-saving %1$s"),
|
2010-04-21 01:19:09 +00:00
|
|
|
from_utf8(fname_.absFileName())));
|
2007-10-03 11:00:18 +00:00
|
|
|
return run(DontWait);
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
///
|
|
|
|
virtual int generateChild();
|
|
|
|
///
|
|
|
|
Buffer const & buffer_;
|
|
|
|
FileName fname_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
int AutoSaveBuffer::generateChild()
|
|
|
|
{
|
2009-11-17 17:04:47 +00:00
|
|
|
#if defined(__APPLE__)
|
|
|
|
/* FIXME fork() is not usable for autosave on Mac OS X 10.6 (snow leopard)
|
|
|
|
* We should use something else like threads.
|
|
|
|
*
|
|
|
|
* Since I do not know how to determine at run time what is the OS X
|
|
|
|
* version, I just disable forking altogether for now (JMarc)
|
|
|
|
*/
|
|
|
|
pid_t const pid = -1;
|
|
|
|
#else
|
2007-10-03 11:00:18 +00:00
|
|
|
// tmp_ret will be located (usually) in /tmp
|
|
|
|
// will that be a problem?
|
2008-01-15 18:26:53 +00:00
|
|
|
// Note that this calls ForkedCalls::fork(), so it's
|
|
|
|
// ok cross-platform.
|
2007-10-03 22:27:31 +00:00
|
|
|
pid_t const pid = fork();
|
|
|
|
// If you want to debug the autosave
|
|
|
|
// you should set pid to -1, and comment out the fork.
|
2008-01-14 09:04:14 +00:00
|
|
|
if (pid != 0 && pid != -1)
|
|
|
|
return pid;
|
2009-11-17 17:04:47 +00:00
|
|
|
#endif
|
2008-01-14 09:04:14 +00:00
|
|
|
|
|
|
|
// pid = -1 signifies that lyx was unable
|
|
|
|
// to fork. But we will do the save
|
|
|
|
// anyway.
|
|
|
|
bool failed = false;
|
|
|
|
FileName const tmp_ret = FileName::tempName("lyxauto");
|
|
|
|
if (!tmp_ret.empty()) {
|
|
|
|
buffer_.writeFile(tmp_ret);
|
|
|
|
// assume successful write of tmp_ret
|
|
|
|
if (!tmp_ret.moveTo(fname_))
|
2007-10-03 11:00:18 +00:00
|
|
|
failed = true;
|
2008-01-14 09:04:14 +00:00
|
|
|
} else
|
|
|
|
failed = true;
|
|
|
|
|
|
|
|
if (failed) {
|
|
|
|
// failed to write/rename tmp_ret so try writing direct
|
|
|
|
if (!buffer_.writeFile(fname_)) {
|
|
|
|
// It is dangerous to do this in the child,
|
|
|
|
// but safe in the parent, so...
|
|
|
|
if (pid == -1) // emit message signal.
|
|
|
|
buffer_.message(_("Autosave failed!"));
|
2007-10-03 11:00:18 +00:00
|
|
|
}
|
|
|
|
}
|
2008-01-14 09:04:14 +00:00
|
|
|
|
|
|
|
if (pid == 0) // we are the child so...
|
|
|
|
_exit(0);
|
|
|
|
|
2007-10-03 11:00:18 +00:00
|
|
|
return pid;
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace anon
|
|
|
|
|
|
|
|
|
2010-10-25 12:31:22 +00:00
|
|
|
FileName Buffer::getEmergencyFileName() const
|
|
|
|
{
|
|
|
|
return getEmergencyFileNameFor(d->filename);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
FileName Buffer::getEmergencyFileNameFor(FileName const & fn) const
|
|
|
|
{
|
|
|
|
return FileName(fn.absFileName() + ".emergency");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-21 01:19:33 +00:00
|
|
|
FileName Buffer::getAutosaveFileName() const
|
2009-04-04 03:13:46 +00:00
|
|
|
{
|
2009-05-03 10:24:12 +00:00
|
|
|
// if the document is unnamed try to save in the backup dir, else
|
|
|
|
// in the default document path, and as a last try in the filePath,
|
|
|
|
// which will most often be the temporary directory
|
|
|
|
string fpath;
|
|
|
|
if (isUnnamed())
|
|
|
|
fpath = lyxrc.backupdir_path.empty() ? lyxrc.document_path
|
|
|
|
: lyxrc.backupdir_path;
|
|
|
|
if (!isUnnamed() || fpath.empty() || !FileName(fpath).exists())
|
|
|
|
fpath = filePath();
|
|
|
|
|
2010-10-25 12:31:22 +00:00
|
|
|
string const fname = d->filename.onlyFileName();
|
|
|
|
return getAutosaveFileNameFor(makeAbsPath(fname, fpath));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
FileName Buffer::getAutosaveFileNameFor(FileName const & fn) const
|
|
|
|
{
|
|
|
|
string const fname = "#" + onlyFileName(fn.absFileName()) + "#";
|
|
|
|
return FileName(onlyPath(fn.absFileName()) + fname);
|
2009-04-04 03:13:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::removeAutosaveFile() const
|
|
|
|
{
|
2010-04-21 01:19:33 +00:00
|
|
|
FileName const f = getAutosaveFileName();
|
2009-04-04 03:13:46 +00:00
|
|
|
if (f.exists())
|
|
|
|
f.removeFile();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-05-03 10:21:21 +00:00
|
|
|
void Buffer::moveAutosaveFile(support::FileName const & oldauto) const
|
|
|
|
{
|
2010-04-21 01:19:33 +00:00
|
|
|
FileName const newauto = getAutosaveFileName();
|
2010-02-12 23:01:36 +00:00
|
|
|
oldauto.refresh();
|
|
|
|
if (newauto != oldauto && oldauto.exists())
|
2010-02-12 23:03:43 +00:00
|
|
|
if (!oldauto.moveTo(newauto))
|
2010-02-12 23:01:36 +00:00
|
|
|
LYXERR0("Unable to move autosave file `" << oldauto << "'!");
|
2009-05-03 10:21:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-03 11:00:18 +00:00
|
|
|
// Perfect target for a thread...
|
|
|
|
void Buffer::autoSave() const
|
|
|
|
{
|
2010-01-25 14:13:04 +00:00
|
|
|
if (d->bak_clean || isReadonly()) {
|
2007-10-03 11:00:18 +00:00
|
|
|
// We don't save now, but we'll try again later
|
|
|
|
resetAutosaveTimers();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// emit message signal.
|
|
|
|
message(_("Autosaving current document..."));
|
2010-04-21 01:19:33 +00:00
|
|
|
AutoSaveBuffer autosave(*this, getAutosaveFileName());
|
2007-10-03 11:00:18 +00:00
|
|
|
autosave.start();
|
|
|
|
|
2010-01-25 14:13:04 +00:00
|
|
|
d->bak_clean = true;
|
|
|
|
|
2007-10-03 11:00:18 +00:00
|
|
|
resetAutosaveTimers();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
string Buffer::bufferFormat() const
|
|
|
|
{
|
2009-05-28 08:59:25 +00:00
|
|
|
string format = params().documentClass().outputFormat();
|
|
|
|
if (format == "latex") {
|
|
|
|
if (params().useXetex)
|
|
|
|
return "xetex";
|
|
|
|
if (params().encoding().package() == Encoding::japanese)
|
|
|
|
return "platex";
|
|
|
|
}
|
|
|
|
return format;
|
2007-10-20 10:03:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-04-07 05:01:08 +00:00
|
|
|
string Buffer::getDefaultOutputFormat() const
|
|
|
|
{
|
2009-04-10 11:06:53 +00:00
|
|
|
if (!params().defaultOutputFormat.empty()
|
|
|
|
&& params().defaultOutputFormat != "default")
|
|
|
|
return params().defaultOutputFormat;
|
2009-04-07 05:01:08 +00:00
|
|
|
typedef vector<Format const *> Formats;
|
|
|
|
Formats formats = exportableFormats(true);
|
|
|
|
if (isDocBook()
|
|
|
|
|| isLiterate()
|
|
|
|
|| params().useXetex
|
2009-04-07 11:54:02 +00:00
|
|
|
|| params().encoding().package() == Encoding::japanese) {
|
|
|
|
if (formats.empty())
|
|
|
|
return string();
|
2009-04-07 05:01:08 +00:00
|
|
|
// return the first we find
|
|
|
|
return formats.front()->name();
|
2009-04-07 11:54:02 +00:00
|
|
|
}
|
2009-04-07 05:01:08 +00:00
|
|
|
return lyxrc.default_view_format;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-07-21 21:51:33 +00:00
|
|
|
namespace {
|
|
|
|
// helper class, to guarantee this gets reset properly
|
|
|
|
class MarkAsExporting {
|
|
|
|
public:
|
|
|
|
MarkAsExporting(Buffer const * buf) : buf_(buf)
|
|
|
|
{
|
|
|
|
LASSERT(buf_, /* */);
|
|
|
|
buf_->setExportStatus(true);
|
|
|
|
}
|
|
|
|
~MarkAsExporting()
|
|
|
|
{
|
|
|
|
buf_->setExportStatus(false);
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
Buffer const * const buf_;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::setExportStatus(bool e) const
|
|
|
|
{
|
|
|
|
d->doing_export = e;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::isExporting() const
|
|
|
|
{
|
|
|
|
return d->doing_export;
|
|
|
|
}
|
|
|
|
|
2009-04-07 05:01:08 +00:00
|
|
|
|
2007-10-23 21:41:17 +00:00
|
|
|
bool Buffer::doExport(string const & format, bool put_in_tempdir,
|
2010-01-10 13:25:41 +00:00
|
|
|
bool includeall, string & result_file) const
|
2007-10-20 10:51:13 +00:00
|
|
|
{
|
2010-07-21 21:51:33 +00:00
|
|
|
MarkAsExporting exporting(this);
|
2007-10-20 10:51:13 +00:00
|
|
|
string backend_format;
|
|
|
|
OutputParams runparams(¶ms().encoding());
|
|
|
|
runparams.flavor = OutputParams::LATEX;
|
|
|
|
runparams.linelen = lyxrc.plaintext_linelen;
|
2010-01-10 13:25:41 +00:00
|
|
|
runparams.includeall = includeall;
|
2007-10-20 10:51:13 +00:00
|
|
|
vector<string> backs = backends();
|
|
|
|
if (find(backs.begin(), backs.end(), format) == backs.end()) {
|
|
|
|
// Get shortest path to format
|
|
|
|
Graph::EdgePath path;
|
|
|
|
for (vector<string>::const_iterator it = backs.begin();
|
|
|
|
it != backs.end(); ++it) {
|
|
|
|
Graph::EdgePath p = theConverters().getPath(*it, format);
|
|
|
|
if (!p.empty() && (path.empty() || p.size() < path.size())) {
|
|
|
|
backend_format = *it;
|
|
|
|
path = p;
|
|
|
|
}
|
|
|
|
}
|
2009-12-18 22:51:06 +00:00
|
|
|
if (path.empty()) {
|
|
|
|
if (!put_in_tempdir) {
|
|
|
|
// Only show this alert if this is an export to a non-temporary
|
|
|
|
// file (not for previewing).
|
|
|
|
Alert::error(_("Couldn't export file"), bformat(
|
|
|
|
_("No information for exporting the format %1$s."),
|
|
|
|
formats.prettyName(format)));
|
|
|
|
}
|
2007-10-20 10:51:13 +00:00
|
|
|
return false;
|
|
|
|
}
|
2009-12-18 22:51:06 +00:00
|
|
|
runparams.flavor = theConverters().getFlavor(path);
|
|
|
|
|
2007-10-20 10:51:13 +00:00
|
|
|
} else {
|
|
|
|
backend_format = format;
|
|
|
|
// FIXME: Don't hardcode format names here, but use a flag
|
|
|
|
if (backend_format == "pdflatex")
|
|
|
|
runparams.flavor = OutputParams::PDFLATEX;
|
|
|
|
}
|
|
|
|
|
|
|
|
string filename = latexName(false);
|
|
|
|
filename = addName(temppath(), filename);
|
|
|
|
filename = changeExtension(filename,
|
|
|
|
formats.extension(backend_format));
|
|
|
|
|
2007-12-21 20:42:46 +00:00
|
|
|
// fix macros
|
|
|
|
updateMacroInstances();
|
|
|
|
|
2007-10-20 10:51:13 +00:00
|
|
|
// Plain text backend
|
2009-11-14 12:54:12 +00:00
|
|
|
if (backend_format == "text") {
|
|
|
|
runparams.flavor = OutputParams::TEXT;
|
2007-10-20 10:51:13 +00:00
|
|
|
writePlaintextFile(*this, FileName(filename), runparams);
|
2009-11-14 12:54:12 +00:00
|
|
|
}
|
2009-10-25 01:48:14 +00:00
|
|
|
// HTML backend
|
|
|
|
else if (backend_format == "xhtml") {
|
|
|
|
runparams.flavor = OutputParams::HTML;
|
2010-03-31 18:24:52 +00:00
|
|
|
switch (params().html_math_output) {
|
|
|
|
case BufferParams::MathML:
|
|
|
|
runparams.math_flavor = OutputParams::MathAsMathML;
|
|
|
|
break;
|
|
|
|
case BufferParams::HTML:
|
|
|
|
runparams.math_flavor = OutputParams::MathAsHTML;
|
|
|
|
break;
|
|
|
|
case BufferParams::Images:
|
|
|
|
runparams.math_flavor = OutputParams::MathAsImages;
|
|
|
|
break;
|
|
|
|
case BufferParams::LaTeX:
|
|
|
|
runparams.math_flavor = OutputParams::MathAsLaTeX;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2009-06-05 17:44:35 +00:00
|
|
|
makeLyXHTMLFile(FileName(filename), runparams);
|
2009-10-25 01:48:14 +00:00
|
|
|
} else if (backend_format == "lyx")
|
2007-10-20 10:51:13 +00:00
|
|
|
writeFile(FileName(filename));
|
|
|
|
// Docbook backend
|
|
|
|
else if (isDocBook()) {
|
|
|
|
runparams.nice = !put_in_tempdir;
|
|
|
|
makeDocBookFile(FileName(filename), runparams);
|
|
|
|
}
|
|
|
|
// LaTeX backend
|
2008-11-03 09:08:01 +00:00
|
|
|
else if (backend_format == format) {
|
2007-10-20 10:51:13 +00:00
|
|
|
runparams.nice = true;
|
|
|
|
if (!makeLaTeXFile(FileName(filename), string(), runparams))
|
|
|
|
return false;
|
|
|
|
} else if (!lyxrc.tex_allows_spaces
|
2007-12-12 19:57:42 +00:00
|
|
|
&& contains(filePath(), ' ')) {
|
2007-10-20 10:51:13 +00:00
|
|
|
Alert::error(_("File name error"),
|
|
|
|
_("The directory path to the document cannot contain spaces."));
|
|
|
|
return false;
|
|
|
|
} else {
|
|
|
|
runparams.nice = false;
|
|
|
|
if (!makeLaTeXFile(FileName(filename), filePath(), runparams))
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
string const error_type = (format == "program")
|
|
|
|
? "Build" : bufferFormat();
|
2008-07-14 07:16:00 +00:00
|
|
|
ErrorList & error_list = d->errorLists[error_type];
|
2007-10-20 10:51:13 +00:00
|
|
|
string const ext = formats.extension(format);
|
|
|
|
FileName const tmp_result_file(changeExtension(filename, ext));
|
|
|
|
bool const success = theConverters().convert(this, FileName(filename),
|
|
|
|
tmp_result_file, FileName(absFileName()), backend_format, format,
|
2008-07-14 07:16:00 +00:00
|
|
|
error_list);
|
2010-03-13 11:39:50 +00:00
|
|
|
|
|
|
|
// Emit the signal to show the error list or copy it back to the
|
|
|
|
// cloned Buffer so that it cab be emitted afterwards.
|
2009-06-21 12:26:41 +00:00
|
|
|
if (format != backend_format) {
|
2010-03-13 11:39:50 +00:00
|
|
|
if (d->cloned_buffer_) {
|
|
|
|
d->cloned_buffer_->d->errorLists[error_type] =
|
|
|
|
d->errorLists[error_type];
|
|
|
|
} else
|
|
|
|
errors(error_type);
|
2009-06-21 12:26:41 +00:00
|
|
|
// also to the children, in case of master-buffer-view
|
2010-09-29 13:05:57 +00:00
|
|
|
ListOfBuffers clist = getDescendents();
|
2010-09-29 11:55:10 +00:00
|
|
|
ListOfBuffers::const_iterator cit = clist.begin();
|
|
|
|
ListOfBuffers::const_iterator const cen = clist.end();
|
|
|
|
for (; cit != cen; ++cit) {
|
2010-03-13 11:39:50 +00:00
|
|
|
if (d->cloned_buffer_) {
|
|
|
|
(*cit)->d->cloned_buffer_->d->errorLists[error_type] =
|
|
|
|
(*cit)->d->errorLists[error_type];
|
|
|
|
} else
|
|
|
|
(*cit)->errors(error_type, true);
|
|
|
|
}
|
2009-06-21 12:26:41 +00:00
|
|
|
}
|
2007-10-20 10:51:13 +00:00
|
|
|
|
2010-01-08 09:00:28 +00:00
|
|
|
if (d->cloned_buffer_) {
|
|
|
|
// Enable reverse dvi or pdf to work by copying back the texrow
|
|
|
|
// object to the cloned buffer.
|
|
|
|
// FIXME: There is a possibility of concurrent access to texrow
|
|
|
|
// here from the main GUI thread that should be securized.
|
|
|
|
d->cloned_buffer_->d->texrow = d->texrow;
|
2010-03-13 11:39:50 +00:00
|
|
|
string const error_type = bufferFormat();
|
|
|
|
d->cloned_buffer_->d->errorLists[error_type] = d->errorLists[error_type];
|
2010-01-08 09:00:28 +00:00
|
|
|
}
|
|
|
|
|
2010-03-13 11:39:50 +00:00
|
|
|
if (!success)
|
|
|
|
return false;
|
|
|
|
|
2008-01-14 14:09:20 +00:00
|
|
|
if (put_in_tempdir) {
|
2010-04-21 01:19:09 +00:00
|
|
|
result_file = tmp_result_file.absFileName();
|
2008-01-14 14:09:20 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2010-04-21 01:19:09 +00:00
|
|
|
result_file = changeExtension(d->exportFileName().absFileName(), ext);
|
2008-01-14 14:09:20 +00:00
|
|
|
// We need to copy referenced files (e. g. included graphics
|
|
|
|
// if format == "dvi") to the result dir.
|
|
|
|
vector<ExportedFile> const files =
|
|
|
|
runparams.exportdata->externalFiles(format);
|
|
|
|
string const dest = onlyPath(result_file);
|
2010-04-20 16:49:49 +00:00
|
|
|
bool use_force = use_gui ? lyxrc.export_overwrite == ALL_FILES
|
|
|
|
: force_overwrite == ALL_FILES;
|
|
|
|
CopyStatus status = use_force ? FORCE : SUCCESS;
|
2010-01-21 18:12:23 +00:00
|
|
|
|
|
|
|
vector<ExportedFile>::const_iterator it = files.begin();
|
|
|
|
vector<ExportedFile>::const_iterator const en = files.end();
|
|
|
|
for (; it != en && status != CANCEL; ++it) {
|
2008-01-14 14:09:20 +00:00
|
|
|
string const fmt = formats.getFormatFromFile(it->sourceName);
|
|
|
|
status = copyFile(fmt, it->sourceName,
|
|
|
|
makeAbsPath(it->exportName, dest),
|
|
|
|
it->exportName, status == FORCE);
|
|
|
|
}
|
2010-01-21 18:12:23 +00:00
|
|
|
|
2008-01-14 14:09:20 +00:00
|
|
|
if (status == CANCEL) {
|
|
|
|
message(_("Document export cancelled."));
|
|
|
|
} else if (tmp_result_file.exists()) {
|
|
|
|
// Finally copy the main file
|
2010-04-20 16:49:49 +00:00
|
|
|
use_force = use_gui ? lyxrc.export_overwrite != NO_FILES
|
|
|
|
: force_overwrite != NO_FILES;
|
|
|
|
if (status == SUCCESS && use_force)
|
2010-04-19 23:53:23 +00:00
|
|
|
status = FORCE;
|
2008-01-14 14:09:20 +00:00
|
|
|
status = copyFile(format, tmp_result_file,
|
|
|
|
FileName(result_file), result_file,
|
|
|
|
status == FORCE);
|
|
|
|
message(bformat(_("Document exported as %1$s "
|
|
|
|
"to file `%2$s'"),
|
|
|
|
formats.prettyName(format),
|
|
|
|
makeDisplayPath(result_file)));
|
|
|
|
} else {
|
|
|
|
// This must be a dummy converter like fax (bug 1888)
|
|
|
|
message(bformat(_("Document exported as %1$s"),
|
|
|
|
formats.prettyName(format)));
|
2007-10-20 10:51:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-10 13:25:41 +00:00
|
|
|
bool Buffer::doExport(string const & format, bool put_in_tempdir,
|
|
|
|
bool includeall) const
|
2007-10-20 10:51:13 +00:00
|
|
|
{
|
|
|
|
string result_file;
|
2010-01-10 13:25:41 +00:00
|
|
|
// (1) export with all included children (omit \includeonly)
|
|
|
|
if (includeall && !doExport(format, put_in_tempdir, true, result_file))
|
|
|
|
return false;
|
|
|
|
// (2) export with included children only
|
|
|
|
return doExport(format, put_in_tempdir, false, result_file);
|
2007-10-20 10:51:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-10 13:25:41 +00:00
|
|
|
bool Buffer::preview(string const & format, bool includeall) const
|
2007-10-20 10:51:13 +00:00
|
|
|
{
|
2010-07-21 21:51:33 +00:00
|
|
|
MarkAsExporting exporting(this);
|
2007-10-20 10:51:13 +00:00
|
|
|
string result_file;
|
2010-01-10 13:25:41 +00:00
|
|
|
// (1) export with all included children (omit \includeonly)
|
|
|
|
if (includeall && !doExport(format, true, true))
|
|
|
|
return false;
|
|
|
|
// (2) export with included children only
|
|
|
|
if (!doExport(format, true, false, result_file))
|
2007-10-20 10:51:13 +00:00
|
|
|
return false;
|
|
|
|
return formats.view(*this, FileName(result_file), format);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Buffer::isExportable(string const & format) const
|
|
|
|
{
|
|
|
|
vector<string> backs = backends();
|
|
|
|
for (vector<string>::const_iterator it = backs.begin();
|
|
|
|
it != backs.end(); ++it)
|
|
|
|
if (theConverters().isReachable(*it, format))
|
|
|
|
return true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
vector<Format const *> Buffer::exportableFormats(bool only_viewable) const
|
|
|
|
{
|
2009-11-09 03:00:19 +00:00
|
|
|
vector<string> const backs = backends();
|
2007-10-20 10:51:13 +00:00
|
|
|
vector<Format const *> result =
|
|
|
|
theConverters().getReachable(backs[0], only_viewable, true);
|
|
|
|
for (vector<string>::const_iterator it = backs.begin() + 1;
|
|
|
|
it != backs.end(); ++it) {
|
|
|
|
vector<Format const *> r =
|
|
|
|
theConverters().getReachable(*it, only_viewable, false);
|
|
|
|
result.insert(result.end(), r.begin(), r.end());
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
vector<string> Buffer::backends() const
|
|
|
|
{
|
|
|
|
vector<string> v;
|
2009-11-14 12:54:12 +00:00
|
|
|
v.push_back(bufferFormat());
|
|
|
|
// FIXME: Don't hardcode format names here, but use a flag
|
|
|
|
if (v.back() == "latex")
|
|
|
|
v.push_back("pdflatex");
|
2009-06-05 17:44:35 +00:00
|
|
|
v.push_back("xhtml");
|
2009-11-11 23:50:54 +00:00
|
|
|
v.push_back("text");
|
2007-10-20 10:51:13 +00:00
|
|
|
v.push_back("lyx");
|
|
|
|
return v;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-10-25 11:57:56 +00:00
|
|
|
Buffer::ReadStatus Buffer::readFromVC(FileName const & fn)
|
2007-10-21 10:50:56 +00:00
|
|
|
{
|
2010-10-25 11:57:56 +00:00
|
|
|
bool const found = LyXVC::file_not_found_hook(fn);
|
|
|
|
if (!found)
|
|
|
|
return ReadFileNotFound;
|
|
|
|
if (!fn.isReadableFile())
|
|
|
|
return ReadVCError;
|
|
|
|
return ReadSuccess;
|
|
|
|
}
|
2007-10-21 10:50:56 +00:00
|
|
|
|
|
|
|
|
2010-10-25 11:57:56 +00:00
|
|
|
Buffer::ReadStatus Buffer::readEmergency(FileName const & fn)
|
|
|
|
{
|
2010-10-25 12:31:22 +00:00
|
|
|
FileName const emergencyFile = getEmergencyFileNameFor(fn);
|
2010-10-25 12:14:10 +00:00
|
|
|
if (!emergencyFile.exists()
|
|
|
|
|| emergencyFile.lastModified() <= fn.lastModified())
|
|
|
|
return ReadFileNotFound;
|
2010-10-25 11:57:56 +00:00
|
|
|
|
2010-10-25 12:14:10 +00:00
|
|
|
docstring const file = makeDisplayPath(fn.absFileName(), 20);
|
2010-10-25 12:31:22 +00:00
|
|
|
docstring const text = bformat(_("An emergency save of the document "
|
|
|
|
"%1$s exists.\n\nRecover emergency save?"), file);
|
2010-10-25 15:33:51 +00:00
|
|
|
|
|
|
|
int const load_emerg = Alert::prompt(_("Load emergency save?"), text,
|
2010-10-25 12:31:22 +00:00
|
|
|
0, 2, _("&Recover"), _("&Load Original"), _("&Cancel"));
|
|
|
|
|
2010-10-25 15:33:51 +00:00
|
|
|
switch (load_emerg)
|
2010-10-25 12:14:10 +00:00
|
|
|
{
|
|
|
|
case 0: {
|
|
|
|
docstring str;
|
2010-10-25 13:04:13 +00:00
|
|
|
ReadStatus const ret_rf = readFile(emergencyFile);
|
2010-10-25 15:33:51 +00:00
|
|
|
bool const success = (ret_rf == ReadSuccess);
|
|
|
|
if (success) {
|
2010-10-25 13:29:50 +00:00
|
|
|
saveCheckSum(fn);
|
2010-10-25 15:33:51 +00:00
|
|
|
markDirty();
|
2010-10-25 12:14:10 +00:00
|
|
|
str = _("Document was successfully recovered.");
|
2010-10-25 13:29:50 +00:00
|
|
|
} else
|
2010-10-25 12:14:10 +00:00
|
|
|
str = _("Document was NOT successfully recovered.");
|
|
|
|
str += "\n\n" + bformat(_("Remove emergency file now?\n(%1$s)"),
|
|
|
|
makeDisplayPath(emergencyFile.absFileName()));
|
|
|
|
|
2010-10-25 15:33:51 +00:00
|
|
|
int const del_emerg =
|
|
|
|
Alert::prompt(_("Delete emergency file?"), str, 1, 1,
|
|
|
|
_("&Remove"), _("&Keep"));
|
|
|
|
if (del_emerg == 0) {
|
2010-10-25 12:14:10 +00:00
|
|
|
emergencyFile.removeFile();
|
2010-10-25 15:33:51 +00:00
|
|
|
if (success)
|
2010-10-25 12:14:10 +00:00
|
|
|
Alert::warning(_("Emergency file deleted"),
|
|
|
|
_("Do not forget to save your file now!"), true);
|
|
|
|
}
|
2010-10-25 15:33:51 +00:00
|
|
|
return success ? ReadSuccess : ReadEmergencyFailure;
|
2010-10-25 12:14:10 +00:00
|
|
|
}
|
2010-10-25 15:33:51 +00:00
|
|
|
case 1: {
|
|
|
|
int const del_emerg =
|
|
|
|
Alert::prompt(_("Delete emergency file?"),
|
2010-10-25 12:14:10 +00:00
|
|
|
_("Remove emergency file now?"), 1, 1,
|
2010-10-25 15:33:51 +00:00
|
|
|
_("&Remove"), _("&Keep"));
|
|
|
|
if (del_emerg == 0)
|
2010-10-25 12:14:10 +00:00
|
|
|
emergencyFile.removeFile();
|
|
|
|
return ReadOriginal;
|
2010-10-25 15:33:51 +00:00
|
|
|
}
|
2010-10-25 11:57:56 +00:00
|
|
|
|
2010-10-25 12:14:10 +00:00
|
|
|
default:
|
2010-10-25 12:31:22 +00:00
|
|
|
break;
|
2007-10-21 10:50:56 +00:00
|
|
|
}
|
2010-10-25 12:14:10 +00:00
|
|
|
return ReadCancel;
|
2010-10-25 11:57:56 +00:00
|
|
|
}
|
2007-10-21 10:50:56 +00:00
|
|
|
|
|
|
|
|
2010-10-25 11:57:56 +00:00
|
|
|
Buffer::ReadStatus Buffer::readAutosave(FileName const & fn)
|
|
|
|
{
|
|
|
|
// Now check if autosave file is newer.
|
2010-10-25 13:04:13 +00:00
|
|
|
FileName const autosaveFile = getAutosaveFileNameFor(fn);
|
2010-10-25 12:24:15 +00:00
|
|
|
if (!autosaveFile.exists()
|
|
|
|
|| autosaveFile.lastModified() <= fn.lastModified())
|
|
|
|
return ReadFileNotFound;
|
|
|
|
|
|
|
|
docstring const file = makeDisplayPath(fn.absFileName(), 20);
|
2010-10-25 13:04:13 +00:00
|
|
|
docstring const text = bformat(_("The backup of the document %1$s "
|
2010-10-25 12:31:22 +00:00
|
|
|
"is newer.\n\nLoad the backup instead?"), file);
|
|
|
|
int const ret = Alert::prompt(_("Load backup?"), text, 0, 2,
|
|
|
|
_("&Load backup"), _("Load &original"), _("&Cancel"));
|
|
|
|
|
|
|
|
switch (ret)
|
2010-10-25 12:24:15 +00:00
|
|
|
{
|
|
|
|
case 0: {
|
2010-10-25 13:04:13 +00:00
|
|
|
ReadStatus const ret_rf = readFile(autosaveFile);
|
2010-10-25 12:24:15 +00:00
|
|
|
// the file is not saved if we load the autosave file.
|
2010-10-25 13:04:13 +00:00
|
|
|
if (ret_rf == ReadSuccess) {
|
2007-10-21 10:50:56 +00:00
|
|
|
markDirty();
|
2010-10-25 13:32:38 +00:00
|
|
|
saveCheckSum(fn);
|
2010-10-25 12:24:15 +00:00
|
|
|
return ReadSuccess;
|
2007-10-21 10:50:56 +00:00
|
|
|
}
|
2010-10-25 12:24:15 +00:00
|
|
|
return ReadAutosaveFailure;
|
|
|
|
}
|
|
|
|
case 1:
|
|
|
|
// Here we delete the autosave
|
|
|
|
autosaveFile.removeFile();
|
|
|
|
return ReadOriginal;
|
|
|
|
default:
|
2010-10-25 12:31:22 +00:00
|
|
|
break;
|
2007-10-21 10:50:56 +00:00
|
|
|
}
|
2010-10-25 12:24:15 +00:00
|
|
|
return ReadCancel;
|
2007-10-21 10:50:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-10-25 11:57:56 +00:00
|
|
|
Buffer::ReadStatus Buffer::loadLyXFile(FileName const & fn)
|
2007-10-21 10:50:56 +00:00
|
|
|
{
|
2010-10-25 11:57:56 +00:00
|
|
|
if (!fn.isReadableFile()) {
|
|
|
|
ReadStatus const ret_rvc = readFromVC(fn);
|
|
|
|
if (ret_rvc != ReadSuccess)
|
|
|
|
return ret_rvc;
|
2007-10-21 10:50:56 +00:00
|
|
|
}
|
2010-10-25 11:57:56 +00:00
|
|
|
|
|
|
|
ReadStatus const ret_re = readEmergency(fn);
|
|
|
|
if (ret_re == ReadSuccess || ret_re == ReadCancel)
|
|
|
|
return ret_re;
|
|
|
|
|
|
|
|
ReadStatus const ret_ra = readAutosave(fn);
|
|
|
|
if (ret_ra == ReadSuccess || ret_ra == ReadCancel)
|
|
|
|
return ret_ra;
|
|
|
|
|
2010-10-25 13:04:13 +00:00
|
|
|
return readFile(fn);
|
2007-10-21 10:50:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Buffer::bufferErrors(TeXErrors const & terr, ErrorList & errorList) const
|
|
|
|
{
|
|
|
|
TeXErrors::Errors::const_iterator cit = terr.begin();
|
|
|
|
TeXErrors::Errors::const_iterator end = terr.end();
|
|
|
|
|
|
|
|
for (; cit != end; ++cit) {
|
|
|
|
int id_start = -1;
|
|
|
|
int pos_start = -1;
|
|
|
|
int errorRow = cit->error_in_line;
|
2007-11-30 17:46:49 +00:00
|
|
|
bool found = d->texrow.getIdFromRow(errorRow, id_start,
|
2007-10-21 10:50:56 +00:00
|
|
|
pos_start);
|
|
|
|
int id_end = -1;
|
|
|
|
int pos_end = -1;
|
|
|
|
do {
|
|
|
|
++errorRow;
|
2007-11-30 17:46:49 +00:00
|
|
|
found = d->texrow.getIdFromRow(errorRow, id_end, pos_end);
|
2007-10-21 10:50:56 +00:00
|
|
|
} while (found && id_start == id_end && pos_start == pos_end);
|
|
|
|
|
|
|
|
errorList.push_back(ErrorItem(cit->error_desc,
|
|
|
|
cit->error_text, id_start, pos_start, pos_end));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-04-20 03:08:11 +00:00
|
|
|
|
2009-03-12 20:10:46 +00:00
|
|
|
void Buffer::setBuffersForInsets() const
|
|
|
|
{
|
|
|
|
inset().setBuffer(const_cast<Buffer &>(*this));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-03-03 22:13:45 +00:00
|
|
|
void Buffer::updateBuffer(UpdateScope scope, UpdateType utype) const
|
2008-11-16 16:43:49 +00:00
|
|
|
{
|
|
|
|
// Use the master text class also for child documents
|
|
|
|
Buffer const * const master = masterBuffer();
|
|
|
|
DocumentClass const & textclass = master->params().documentClass();
|
2010-01-11 16:29:26 +00:00
|
|
|
|
|
|
|
// do this only if we are the top-level Buffer
|
2010-08-10 14:12:48 +00:00
|
|
|
if (master == this)
|
2010-01-11 16:29:26 +00:00
|
|
|
checkBibInfoCache();
|
2008-11-16 16:43:49 +00:00
|
|
|
|
|
|
|
// keep the buffers to be children in this set. If the call from the
|
|
|
|
// master comes back we can see which of them were actually seen (i.e.
|
|
|
|
// via an InsetInclude). The remaining ones in the set need still be updated.
|
|
|
|
static std::set<Buffer const *> bufToUpdate;
|
2009-03-12 20:02:12 +00:00
|
|
|
if (scope == UpdateMaster) {
|
2008-11-16 16:43:49 +00:00
|
|
|
// If this is a child document start with the master
|
|
|
|
if (master != this) {
|
|
|
|
bufToUpdate.insert(this);
|
2010-03-03 22:13:45 +00:00
|
|
|
master->updateBuffer(UpdateMaster, utype);
|
2009-02-19 00:29:04 +00:00
|
|
|
// Do this here in case the master has no gui associated with it. Then,
|
|
|
|
// the TocModel is not updated and TocModel::toc_ is invalid (bug 5699).
|
2010-01-25 14:32:39 +00:00
|
|
|
if (!master->d->gui_)
|
2009-07-09 09:48:34 +00:00
|
|
|
structureChanged();
|
2008-11-16 16:43:49 +00:00
|
|
|
|
|
|
|
// was buf referenced from the master (i.e. not in bufToUpdate anymore)?
|
|
|
|
if (bufToUpdate.find(this) == bufToUpdate.end())
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// start over the counters in the master
|
|
|
|
textclass.counters().reset();
|
|
|
|
}
|
|
|
|
|
|
|
|
// update will be done below for this buffer
|
|
|
|
bufToUpdate.erase(this);
|
|
|
|
|
|
|
|
// update all caches
|
|
|
|
clearReferenceCache();
|
|
|
|
updateMacros();
|
|
|
|
|
|
|
|
Buffer & cbuf = const_cast<Buffer &>(*this);
|
|
|
|
|
|
|
|
LASSERT(!text().paragraphs().empty(), /**/);
|
|
|
|
|
|
|
|
// do the real work
|
|
|
|
ParIterator parit = cbuf.par_iterator_begin();
|
2010-03-03 22:13:45 +00:00
|
|
|
updateBuffer(parit, utype);
|
2008-11-16 16:43:49 +00:00
|
|
|
|
|
|
|
if (master != this)
|
|
|
|
// TocBackend update will be done later.
|
|
|
|
return;
|
|
|
|
|
|
|
|
cbuf.tocBackend().update();
|
2009-03-12 20:02:12 +00:00
|
|
|
if (scope == UpdateMaster)
|
2008-11-16 16:43:49 +00:00
|
|
|
cbuf.structureChanged();
|
|
|
|
}
|
|
|
|
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
|
|
static depth_type getDepth(DocIterator const & it)
|
|
|
|
{
|
|
|
|
depth_type depth = 0;
|
|
|
|
for (size_t i = 0 ; i < it.depth() ; ++i)
|
|
|
|
if (!it[i].inset().inMathed())
|
|
|
|
depth += it[i].paragraph().getDepth() + 1;
|
|
|
|
// remove 1 since the outer inset does not count
|
|
|
|
return depth - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static depth_type getItemDepth(ParIterator const & it)
|
|
|
|
{
|
|
|
|
Paragraph const & par = *it;
|
|
|
|
LabelType const labeltype = par.layout().labeltype;
|
|
|
|
|
|
|
|
if (labeltype != LABEL_ENUMERATE && labeltype != LABEL_ITEMIZE)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
// this will hold the lowest depth encountered up to now.
|
|
|
|
depth_type min_depth = getDepth(it);
|
|
|
|
ParIterator prev_it = it;
|
|
|
|
while (true) {
|
|
|
|
if (prev_it.pit())
|
|
|
|
--prev_it.top().pit();
|
|
|
|
else {
|
|
|
|
// start of nested inset: go to outer par
|
|
|
|
prev_it.pop_back();
|
|
|
|
if (prev_it.empty()) {
|
|
|
|
// start of document: nothing to do
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// We search for the first paragraph with same label
|
|
|
|
// that is not more deeply nested.
|
|
|
|
Paragraph & prev_par = *prev_it;
|
|
|
|
depth_type const prev_depth = getDepth(prev_it);
|
|
|
|
if (labeltype == prev_par.layout().labeltype) {
|
|
|
|
if (prev_depth < min_depth)
|
|
|
|
return prev_par.itemdepth + 1;
|
|
|
|
if (prev_depth == min_depth)
|
|
|
|
return prev_par.itemdepth;
|
|
|
|
}
|
|
|
|
min_depth = min(min_depth, prev_depth);
|
|
|
|
// small optimization: if we are at depth 0, we won't
|
|
|
|
// find anything else
|
|
|
|
if (prev_depth == 0)
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static bool needEnumCounterReset(ParIterator const & it)
|
|
|
|
{
|
|
|
|
Paragraph const & par = *it;
|
|
|
|
LASSERT(par.layout().labeltype == LABEL_ENUMERATE, /**/);
|
|
|
|
depth_type const cur_depth = par.getDepth();
|
|
|
|
ParIterator prev_it = it;
|
|
|
|
while (prev_it.pit()) {
|
|
|
|
--prev_it.top().pit();
|
|
|
|
Paragraph const & prev_par = *prev_it;
|
|
|
|
if (prev_par.getDepth() <= cur_depth)
|
|
|
|
return prev_par.layout().labeltype != LABEL_ENUMERATE;
|
|
|
|
}
|
|
|
|
// start of nested inset: reset
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// set the label of a paragraph. This includes the counters.
|
2010-01-25 18:39:08 +00:00
|
|
|
void Buffer::Impl::setLabel(ParIterator & it, UpdateType utype) const
|
2008-11-16 17:49:00 +00:00
|
|
|
{
|
2010-01-25 18:39:08 +00:00
|
|
|
BufferParams const & bp = owner_->masterBuffer()->params();
|
2008-11-16 17:49:00 +00:00
|
|
|
DocumentClass const & textclass = bp.documentClass();
|
|
|
|
Paragraph & par = it.paragraph();
|
|
|
|
Layout const & layout = par.layout();
|
|
|
|
Counters & counters = textclass.counters();
|
|
|
|
|
|
|
|
if (par.params().startOfAppendix()) {
|
|
|
|
// FIXME: only the counter corresponding to toplevel
|
2009-12-03 23:30:26 +00:00
|
|
|
// sectioning should be reset
|
2008-11-16 17:49:00 +00:00
|
|
|
counters.reset();
|
|
|
|
counters.appendix(true);
|
|
|
|
}
|
|
|
|
par.params().appendix(counters.appendix());
|
|
|
|
|
|
|
|
// Compute the item depth of the paragraph
|
|
|
|
par.itemdepth = getItemDepth(it);
|
|
|
|
|
2009-05-25 08:13:56 +00:00
|
|
|
if (layout.margintype == MARGIN_MANUAL
|
|
|
|
|| layout.latextype == LATEX_BIB_ENVIRONMENT) {
|
2008-11-16 17:49:00 +00:00
|
|
|
if (par.params().labelWidthString().empty())
|
2009-07-12 21:48:50 +00:00
|
|
|
par.params().labelWidthString(par.expandLabel(layout, bp));
|
2008-11-16 17:49:00 +00:00
|
|
|
} else {
|
|
|
|
par.params().labelWidthString(docstring());
|
|
|
|
}
|
|
|
|
|
|
|
|
switch(layout.labeltype) {
|
|
|
|
case LABEL_COUNTER:
|
|
|
|
if (layout.toclevel <= bp.secnumdepth
|
|
|
|
&& (layout.latextype != LATEX_ENVIRONMENT
|
2009-08-09 18:35:39 +00:00
|
|
|
|| it.text()->isFirstInSequence(it.pit()))) {
|
2010-01-20 19:42:12 +00:00
|
|
|
counters.step(layout.counter, utype);
|
2008-11-16 17:49:00 +00:00
|
|
|
par.params().labelString(
|
|
|
|
par.expandLabel(layout, bp));
|
|
|
|
} else
|
|
|
|
par.params().labelString(docstring());
|
|
|
|
break;
|
|
|
|
|
|
|
|
case LABEL_ITEMIZE: {
|
|
|
|
// At some point of time we should do something more
|
|
|
|
// clever here, like:
|
|
|
|
// par.params().labelString(
|
|
|
|
// bp.user_defined_bullet(par.itemdepth).getText());
|
|
|
|
// for now, use a simple hardcoded label
|
|
|
|
docstring itemlabel;
|
|
|
|
switch (par.itemdepth) {
|
|
|
|
case 0:
|
|
|
|
itemlabel = char_type(0x2022);
|
|
|
|
break;
|
|
|
|
case 1:
|
|
|
|
itemlabel = char_type(0x2013);
|
|
|
|
break;
|
|
|
|
case 2:
|
|
|
|
itemlabel = char_type(0x2217);
|
|
|
|
break;
|
|
|
|
case 3:
|
|
|
|
itemlabel = char_type(0x2219); // or 0x00b7
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
par.params().labelString(itemlabel);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case LABEL_ENUMERATE: {
|
2009-06-02 16:02:29 +00:00
|
|
|
docstring enumcounter = layout.counter.empty() ? from_ascii("enum") : layout.counter;
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
|
|
switch (par.itemdepth) {
|
|
|
|
case 2:
|
|
|
|
enumcounter += 'i';
|
|
|
|
case 1:
|
|
|
|
enumcounter += 'i';
|
|
|
|
case 0:
|
|
|
|
enumcounter += 'i';
|
|
|
|
break;
|
|
|
|
case 3:
|
|
|
|
enumcounter += "iv";
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
// not a valid enumdepth...
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Maybe we have to reset the enumeration counter.
|
|
|
|
if (needEnumCounterReset(it))
|
|
|
|
counters.reset(enumcounter);
|
2010-01-20 19:42:12 +00:00
|
|
|
counters.step(enumcounter, utype);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
2009-07-12 20:09:53 +00:00
|
|
|
string const & lang = par.getParLanguage(bp)->code();
|
|
|
|
par.params().labelString(counters.theCounter(enumcounter, lang));
|
2008-11-16 17:49:00 +00:00
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case LABEL_SENSITIVE: {
|
|
|
|
string const & type = counters.current_float();
|
|
|
|
docstring full_label;
|
|
|
|
if (type.empty())
|
2010-01-25 18:39:08 +00:00
|
|
|
full_label = owner_->B_("Senseless!!! ");
|
2008-11-16 17:49:00 +00:00
|
|
|
else {
|
2010-01-25 18:39:08 +00:00
|
|
|
docstring name = owner_->B_(textclass.floats().getType(type).name());
|
2008-11-16 17:49:00 +00:00
|
|
|
if (counters.hasCounter(from_utf8(type))) {
|
2009-07-12 20:09:53 +00:00
|
|
|
string const & lang = par.getParLanguage(bp)->code();
|
2010-01-20 19:42:12 +00:00
|
|
|
counters.step(from_utf8(type), utype);
|
2008-11-16 17:49:00 +00:00
|
|
|
full_label = bformat(from_ascii("%1$s %2$s:"),
|
|
|
|
name,
|
2009-07-12 20:09:53 +00:00
|
|
|
counters.theCounter(from_utf8(type), lang));
|
2008-11-16 17:49:00 +00:00
|
|
|
} else
|
|
|
|
full_label = bformat(from_ascii("%1$s #:"), name);
|
|
|
|
}
|
|
|
|
par.params().labelString(full_label);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case LABEL_NO_LABEL:
|
|
|
|
par.params().labelString(docstring());
|
|
|
|
break;
|
|
|
|
|
|
|
|
case LABEL_MANUAL:
|
|
|
|
case LABEL_TOP_ENVIRONMENT:
|
|
|
|
case LABEL_CENTERED_TOP_ENVIRONMENT:
|
|
|
|
case LABEL_STATIC:
|
|
|
|
case LABEL_BIBLIO:
|
2009-07-12 21:48:50 +00:00
|
|
|
par.params().labelString(par.expandLabel(layout, bp));
|
2008-11-16 17:49:00 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-03-03 22:13:45 +00:00
|
|
|
void Buffer::updateBuffer(ParIterator & parit, UpdateType utype) const
|
2008-11-16 17:49:00 +00:00
|
|
|
{
|
|
|
|
LASSERT(parit.pit() == 0, /**/);
|
|
|
|
|
2010-10-21 17:53:21 +00:00
|
|
|
// Set the position of the text in the buffer to be able
|
|
|
|
// to resolve macros in it.
|
2008-11-16 17:49:00 +00:00
|
|
|
parit.text()->setMacrocontextPosition(parit);
|
|
|
|
|
|
|
|
depth_type maxdepth = 0;
|
|
|
|
pit_type const lastpit = parit.lastpit();
|
|
|
|
for ( ; parit.pit() <= lastpit ; ++parit.pit()) {
|
|
|
|
// reduce depth if necessary
|
|
|
|
parit->params().depth(min(parit->params().depth(), maxdepth));
|
|
|
|
maxdepth = parit->getMaxDepthAfter();
|
|
|
|
|
2010-01-20 19:42:12 +00:00
|
|
|
if (utype == OutputUpdate) {
|
2010-01-20 19:03:17 +00:00
|
|
|
// track the active counters
|
|
|
|
// we have to do this for the master buffer, since the local
|
|
|
|
// buffer isn't tracking anything.
|
|
|
|
masterBuffer()->params().documentClass().counters().
|
|
|
|
setActiveLayout(parit->layout());
|
|
|
|
}
|
|
|
|
|
2008-11-16 17:49:00 +00:00
|
|
|
// set the counter for this paragraph
|
2010-01-25 18:39:08 +00:00
|
|
|
d->setLabel(parit, utype);
|
2008-11-16 17:49:00 +00:00
|
|
|
|
2010-01-20 19:03:17 +00:00
|
|
|
// now the insets
|
2008-11-16 17:49:00 +00:00
|
|
|
InsetList::const_iterator iit = parit->insetList().begin();
|
|
|
|
InsetList::const_iterator end = parit->insetList().end();
|
|
|
|
for (; iit != end; ++iit) {
|
|
|
|
parit.pos() = iit->pos;
|
2010-03-15 08:55:58 +00:00
|
|
|
iit->inset->updateBuffer(parit, utype);
|
2008-11-16 17:49:00 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-03-28 19:02:49 +00:00
|
|
|
|
2009-04-04 16:40:47 +00:00
|
|
|
int Buffer::spellCheck(DocIterator & from, DocIterator & to,
|
|
|
|
WordLangTuple & word_lang, docstring_list & suggestions) const
|
|
|
|
{
|
|
|
|
int progress = 0;
|
2009-06-22 16:38:11 +00:00
|
|
|
WordLangTuple wl;
|
2009-04-04 16:40:47 +00:00
|
|
|
suggestions.clear();
|
2009-06-22 17:30:57 +00:00
|
|
|
word_lang = WordLangTuple();
|
2009-06-22 16:38:11 +00:00
|
|
|
// OK, we start from here.
|
2009-06-22 17:30:57 +00:00
|
|
|
DocIterator const end = doc_iterator_end(this);
|
|
|
|
for (; from != end; from.forwardPos()) {
|
2009-06-22 21:00:48 +00:00
|
|
|
// We are only interested in text so remove the math CursorSlice.
|
2009-12-12 02:56:06 +00:00
|
|
|
while (from.inMathed()) {
|
|
|
|
from.pop_back();
|
|
|
|
from.pos()++;
|
|
|
|
}
|
|
|
|
// If from is at the end of the document (which is possible
|
|
|
|
// when leaving the mathed) LyX will crash later.
|
|
|
|
if (from == end)
|
|
|
|
break;
|
2009-06-22 17:30:57 +00:00
|
|
|
to = from;
|
2010-09-14 05:24:04 +00:00
|
|
|
from.paragraph().spellCheck();
|
2010-08-05 20:10:40 +00:00
|
|
|
SpellChecker::Result res = from.paragraph().spellCheck(from.pos(), to.pos(), wl, suggestions);
|
|
|
|
if (SpellChecker::misspelled(res)) {
|
2009-07-04 18:00:01 +00:00
|
|
|
word_lang = wl;
|
2009-06-22 17:30:57 +00:00
|
|
|
break;
|
2009-07-04 18:00:01 +00:00
|
|
|
}
|
2009-12-12 02:56:06 +00:00
|
|
|
|
|
|
|
// Do not increase progress when from == to, otherwise the word
|
|
|
|
// count will be wrong.
|
|
|
|
if (from != to) {
|
|
|
|
from = to;
|
|
|
|
++progress;
|
|
|
|
}
|
2009-04-04 16:40:47 +00:00
|
|
|
}
|
|
|
|
return progress;
|
|
|
|
}
|
|
|
|
|
2010-01-08 02:15:56 +00:00
|
|
|
|
|
|
|
bool Buffer::reload()
|
|
|
|
{
|
|
|
|
setBusy(true);
|
2010-03-31 21:29:32 +00:00
|
|
|
// c.f. bug 6587
|
|
|
|
removeAutosaveFile();
|
2010-01-08 02:15:56 +00:00
|
|
|
// e.g., read-only status could have changed due to version control
|
|
|
|
d->filename.refresh();
|
2010-04-21 01:19:09 +00:00
|
|
|
docstring const disp_fn = makeDisplayPath(d->filename.absFileName());
|
2010-01-08 02:15:56 +00:00
|
|
|
|
2010-10-25 10:18:42 +00:00
|
|
|
bool const success = (loadLyXFile(d->filename) == ReadSuccess);
|
2010-01-08 02:15:56 +00:00
|
|
|
if (success) {
|
2010-03-03 22:13:45 +00:00
|
|
|
updateBuffer();
|
2010-01-08 02:15:56 +00:00
|
|
|
changed(true);
|
2010-04-28 04:27:10 +00:00
|
|
|
updateTitles();
|
2010-01-08 18:48:36 +00:00
|
|
|
markClean();
|
2010-10-24 21:38:19 +00:00
|
|
|
saveCheckSum(d->filename);
|
2010-01-08 02:15:56 +00:00
|
|
|
message(bformat(_("Document %1$s reloaded."), disp_fn));
|
|
|
|
} else {
|
|
|
|
message(bformat(_("Could not reload document %1$s."), disp_fn));
|
|
|
|
}
|
|
|
|
setBusy(false);
|
2010-03-28 22:41:37 +00:00
|
|
|
thePreviews().removeLoader(*this);
|
|
|
|
if (graphics::Previews::status() != LyXRC::PREVIEW_OFF)
|
|
|
|
thePreviews().generateBufferPreviews(*this);
|
2010-01-08 18:48:36 +00:00
|
|
|
errors("Parse");
|
2010-01-08 02:15:56 +00:00
|
|
|
return success;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-03-09 03:25:47 +00:00
|
|
|
// FIXME We could do better here, but it is complicated. What would be
|
|
|
|
// nice is to offer either (a) to save the child buffer to an appropriate
|
|
|
|
// location, so that it would "move with the master", or else (b) to update
|
|
|
|
// the InsetInclude so that it pointed to the same file. But (a) is a bit
|
|
|
|
// complicated, because the code for this lives in GuiView.
|
|
|
|
void Buffer::checkChildBuffers()
|
|
|
|
{
|
|
|
|
Impl::BufferPositionMap::iterator it = d->children_positions.begin();
|
|
|
|
Impl::BufferPositionMap::iterator const en = d->children_positions.end();
|
|
|
|
for (; it != en; ++it) {
|
|
|
|
DocIterator dit = it->second;
|
|
|
|
Buffer * cbuf = const_cast<Buffer *>(it->first);
|
|
|
|
if (!cbuf || !theBufferList().isLoaded(cbuf))
|
|
|
|
continue;
|
|
|
|
Inset * inset = dit.nextInset();
|
|
|
|
LASSERT(inset && inset->lyxCode() == INCLUDE_CODE, continue);
|
|
|
|
InsetInclude * inset_inc = static_cast<InsetInclude *>(inset);
|
|
|
|
docstring const & incfile = inset_inc->getParam("filename");
|
|
|
|
string oldloc = cbuf->absFileName();
|
|
|
|
string newloc = makeAbsPath(to_utf8(incfile),
|
2010-04-21 01:19:09 +00:00
|
|
|
onlyPath(absFileName())).absFileName();
|
2010-03-09 03:25:47 +00:00
|
|
|
if (oldloc == newloc)
|
|
|
|
continue;
|
|
|
|
// the location of the child file is incorrect.
|
|
|
|
Alert::warning(_("Included File Invalid"),
|
|
|
|
bformat(_("Saving this document to a new location has made the file:\n"
|
|
|
|
" %1$s\n"
|
|
|
|
"inaccessible. You will need to update the included filename."),
|
|
|
|
from_utf8(oldloc)));
|
|
|
|
cbuf->setParent(0);
|
|
|
|
inset_inc->setChildBuffer(0);
|
|
|
|
}
|
|
|
|
// invalidate cache of children
|
|
|
|
d->children_positions.clear();
|
|
|
|
d->position_to_children.clear();
|
|
|
|
}
|
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
} // namespace lyx
|