2002-09-25 14:26:13 +00:00
|
|
|
/**
|
2007-04-25 01:24:38 +00:00
|
|
|
* \file InsetBibtex.cpp
|
2002-09-25 14:26:13 +00:00
|
|
|
* This file is part of LyX, the document processor.
|
|
|
|
* Licence details can be found in the file COPYING.
|
|
|
|
*
|
|
|
|
* \author Alejandro Aguilar Sierra
|
2007-08-16 01:59:20 +00:00
|
|
|
* \author Richard Heck (BibTeX parser improvements)
|
2002-09-25 14:26:13 +00:00
|
|
|
*
|
2003-08-23 00:17:00 +00:00
|
|
|
* Full author contact details are available in file CREDITS.
|
2002-09-25 14:26:13 +00:00
|
|
|
*/
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2003-06-02 10:03:27 +00:00
|
|
|
#include <config.h>
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2007-04-25 01:24:38 +00:00
|
|
|
#include "InsetBibtex.h"
|
2003-09-05 09:01:27 +00:00
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Buffer.h"
|
|
|
|
#include "BufferParams.h"
|
|
|
|
#include "DispatchResult.h"
|
2008-01-08 18:55:34 +00:00
|
|
|
#include "EmbeddedFiles.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Encoding.h"
|
|
|
|
#include "FuncRequest.h"
|
2005-07-14 12:53:12 +00:00
|
|
|
#include "LaTeXFeatures.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "MetricsInfo.h"
|
|
|
|
#include "OutputParams.h"
|
2007-11-07 23:25:08 +00:00
|
|
|
#include "TextClass.h"
|
2001-12-28 13:26:54 +00:00
|
|
|
|
2007-04-28 20:44:46 +00:00
|
|
|
#include "frontends/alert.h"
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2008-02-18 07:14:42 +00:00
|
|
|
#include "support/debug.h"
|
2007-10-16 18:53:10 +00:00
|
|
|
#include "support/ExceptionMessage.h"
|
2007-11-01 22:17:22 +00:00
|
|
|
#include "support/docstream.h"
|
1999-10-02 16:21:10 +00:00
|
|
|
#include "support/filetools.h"
|
2008-02-18 07:14:42 +00:00
|
|
|
#include "support/gettext.h"
|
2001-07-29 17:39:01 +00:00
|
|
|
#include "support/lstrings.h"
|
2003-09-05 09:01:27 +00:00
|
|
|
#include "support/os.h"
|
Rename files in src/support, step one.
src/support/package.h src/support/Package.h Package
src/support/package.C.in src/support/Package.C.in Package
src/support/path.h src/support/Path.h Path
src/support/fs_extras.h src/support/fs_extras.h NOCLASSES
src/support/RandomAccessList.h src/support/RandomAccessList.h RandomAccessList
src/support/lyxmanip.h src/support/lyxmanip.h NOCLASSES
src/support/rename.C src/support/rename.cpp NOCLASSES
src/support/abort.C src/support/abort.cpp NOCLASSES
src/support/lyxlib.h src/support/lyxlib.h NOCLASSES
src/support/ExceptionMessage.h src/support/ExceptionMessage.h ExceptionMessage
src/support/copy.C src/support/copy.cpp NOCLASSES
src/support/limited_stack.h src/support/limited_stack.h limited_stack
src/support/filefilterlist.C src/support/FileFilterList.cpp ['FileFilterList', 'Filter']
src/support/cow_ptr.h src/support/cow_ptr.h cow_ptr
src/support/os_unix.C src/support/os_unix.cpp NOCLASSES
src/support/socktools.h src/support/socktools.h NOCLASSES
src/support/forkedcontr.h src/support/ForkedcallsController.h ForkedcallsController
src/support/os.h src/support/os.h NOCLASSES
src/support/FileMonitor.h src/support/FileMonitor.h FileMonitor
src/support/copied_ptr.h src/support/copied_ptr.h copied_ptr
src/support/translator.h src/support/Translator.h Translator
src/support/filetools.C src/support/filetools.cpp NOCLASSES
src/support/unlink.C src/support/unlink.cpp NOCLASSES
src/support/os_win32.C src/support/os_win32.cpp GetFolderPath
src/support/lstrings.C src/support/lstrings.cpp NOCLASSES
src/support/qstring_helpers.C src/support/qstring_helpers.cpp NOCLASSES
src/support/getcwd.C src/support/getcwd.cpp NOCLASSES
src/support/systemcall.C src/support/Systemcall.cpp Systemcall
src/support/lyxalgo.h src/support/lyxalgo.h NOCLASSES
src/support/filefilterlist.h src/support/FileFilterList.h ['FileFilterList', 'Filter']
src/support/unicode.C src/support/unicode.cpp IconvProcessor
src/support/userinfo.C src/support/userinfo.cpp NOCLASSES
src/support/lyxtime.C src/support/lyxtime.cpp NOCLASSES
src/support/kill.C src/support/kill.cpp NOCLASSES
src/support/docstring.C src/support/docstring.cpp to_local8bit_failure
src/support/os_cygwin.C src/support/os_cygwin.cpp NOCLASSES
src/support/lyxsum.C src/support/lyxsum.cpp NOCLASSES
src/support/environment.C src/support/environment.cpp NOCLASSES
src/support/filetools.h src/support/filetools.h NOCLASSES
src/support/textutils.C src/support/textutils.cpp NOCLASSES
src/support/mkdir.C src/support/mkdir.cpp NOCLASSES
src/support/forkedcall.C src/support/Forkedcall.cpp ['ForkedProcess', 'Forkedcall']
src/support/tempname.C src/support/tempname.cpp NOCLASSES
src/support/os_win32.h src/support/os_win32.h GetFolderPath
src/support/types.h src/support/types.h NOCLASSES
src/support/lstrings.h src/support/lstrings.h NOCLASSES
src/support/forkedcallqueue.C src/support/ForkedCallQueue.cpp ForkedCallQueue
src/support/qstring_helpers.h src/support/qstring_helpers.h NOCLASSES
src/support/convert.C src/support/convert.cpp NOCLASSES
src/support/filename.C src/support/FileName.cpp ['FileName', 'DocFileName']
src/support/tests/convert.C src/support/tests/convert.cpp NOCLASSES
src/support/tests/filetools.C src/support/tests/filetools.cpp NOCLASSES
src/support/tests/lstrings.C src/support/tests/lstrings.cpp NOCLASSES
src/support/tests/boost.C src/support/tests/boost.cpp NOCLASSES
src/support/docstream.C src/support/docstream.cpp ['iconv_codecvt_facet_exception', 'idocfstream', 'odocfstream']
src/support/std_istream.h src/support/std_istream.h NOCLASSES
src/support/systemcall.h src/support/Systemcall.h Systemcall
src/support/chdir.C src/support/chdir.cpp NOCLASSES
src/support/std_ostream.h src/support/std_ostream.h NOCLASSES
src/support/unicode.h src/support/unicode.h IconvProcessor
src/support/path.C src/support/Path.cpp Path
src/support/fs_extras.C src/support/fs_extras.cpp NOCLASSES
src/support/userinfo.h src/support/userinfo.h NOCLASSES
src/support/lyxtime.h src/support/lyxtime.h NOCLASSES
src/support/docstring.h src/support/docstring.h to_local8bit_failure
src/support/debugstream.h src/support/debugstream.h basic_debugstream
src/support/environment.h src/support/environment.h NOCLASSES
src/support/textutils.h src/support/textutils.h NOCLASSES
src/support/forkedcall.h src/support/Forkedcall.h ['ForkedProcess', 'Forkedcall']
src/support/socktools.C src/support/socktools.cpp NOCLASSES
src/support/forkedcallqueue.h src/support/ForkedCallQueue.h ForkedCallQueue
src/support/forkedcontr.C src/support/ForkedcallsController.cpp ForkedcallsController
src/support/os.C src/support/os.cpp NOCLASSES
src/support/convert.h src/support/convert.h NOCLASSES
src/support/filename.h src/support/FileName.h ['FileName', 'DocFileName']
src/support/docstream.h src/support/docstream.h ['iconv_codecvt_facet_exception', 'idocfstream', 'odocfstream']
src/support/FileMonitor.C src/support/FileMonitor.cpp FileMonitor
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@18024 a592a061-630c-0410-9148-cb99ea01b6c8
2007-04-26 05:12:52 +00:00
|
|
|
#include "support/Path.h"
|
2007-04-04 20:06:34 +00:00
|
|
|
#include "support/textutils.h"
|
2001-12-28 13:26:54 +00:00
|
|
|
|
2005-07-14 12:53:12 +00:00
|
|
|
#include <boost/tokenizer.hpp>
|
2008-02-07 17:04:06 +00:00
|
|
|
#include <limits>
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2007-12-12 10:16:00 +00:00
|
|
|
using namespace std;
|
2007-12-12 18:57:56 +00:00
|
|
|
using namespace lyx::support;
|
2006-10-21 00:16:43 +00:00
|
|
|
|
|
|
|
namespace lyx {
|
|
|
|
|
|
|
|
namespace Alert = frontend::Alert;
|
|
|
|
namespace os = support::os;
|
2003-09-16 11:03:20 +00:00
|
|
|
|
2003-02-18 12:36:02 +00:00
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
InsetBibtex::InsetBibtex(InsetCommandParams const & p)
|
2003-12-11 15:23:15 +00:00
|
|
|
: InsetCommand(p, "bibtex")
|
2000-07-15 23:51:46 +00:00
|
|
|
{}
|
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2008-02-23 22:01:02 +00:00
|
|
|
ParamInfo const & InsetBibtex::findInfo(string const & /* cmdName */)
|
2007-10-25 04:13:56 +00:00
|
|
|
{
|
2008-02-23 22:01:02 +00:00
|
|
|
static ParamInfo param_info_;
|
|
|
|
if (param_info_.empty()) {
|
|
|
|
param_info_.add("options", true);
|
|
|
|
param_info_.add("btprint", true);
|
|
|
|
param_info_.add("bibfiles", false);
|
|
|
|
param_info_.add("embed", false);
|
|
|
|
}
|
|
|
|
return param_info_;
|
2007-10-25 04:13:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-08-30 18:03:17 +00:00
|
|
|
Inset * InsetBibtex::clone() const
|
2003-09-03 17:28:39 +00:00
|
|
|
{
|
2007-08-30 18:03:17 +00:00
|
|
|
return new InsetBibtex(*this);
|
2003-09-03 17:28:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-04-26 14:56:30 +00:00
|
|
|
void InsetBibtex::doDispatch(Cursor & cur, FuncRequest & cmd)
|
2003-02-26 19:28:38 +00:00
|
|
|
{
|
2003-03-07 21:44:48 +00:00
|
|
|
switch (cmd.action) {
|
2003-05-16 07:44:00 +00:00
|
|
|
|
2003-03-07 21:44:48 +00:00
|
|
|
case LFUN_INSET_MODIFY: {
|
2007-10-19 17:22:55 +00:00
|
|
|
InsetCommandParams p(BIBTEX_CODE);
|
2007-10-16 18:53:10 +00:00
|
|
|
try {
|
|
|
|
if (!InsetCommandMailer::string2params("bibtex",
|
|
|
|
to_utf8(cmd.argument()), p)) {
|
|
|
|
cur.noUpdate();
|
|
|
|
break;
|
|
|
|
}
|
2007-12-12 19:57:42 +00:00
|
|
|
} catch (ExceptionMessage const & message) {
|
|
|
|
if (message.type_ == WarningException) {
|
2007-10-16 18:53:10 +00:00
|
|
|
Alert::warning(message.title_, message.details_);
|
|
|
|
cur.noUpdate();
|
|
|
|
} else
|
|
|
|
throw message;
|
|
|
|
break;
|
|
|
|
}
|
2008-01-08 17:02:09 +00:00
|
|
|
//
|
|
|
|
InsetCommandParams orig = params();
|
2008-01-10 23:39:58 +00:00
|
|
|
// returned "embed" is composed of "true" or "false", which needs to be adjusted
|
|
|
|
string tmp;
|
|
|
|
string emb;
|
|
|
|
|
|
|
|
string newBibfiles;
|
|
|
|
string newEmbedStatus;
|
|
|
|
|
|
|
|
string bibfiles = to_utf8(p["bibfiles"]);
|
|
|
|
string embedStatus = to_utf8(p["embed"]);
|
|
|
|
|
|
|
|
bibfiles = split(bibfiles, tmp, ',');
|
|
|
|
embedStatus = split(embedStatus, emb, ',');
|
|
|
|
while (!tmp.empty()) {
|
|
|
|
EmbeddedFile file(changeExtension(tmp, "bib"), cur.buffer().filePath());
|
|
|
|
if (!newBibfiles.empty())
|
|
|
|
newBibfiles += ",";
|
|
|
|
newBibfiles += tmp;
|
|
|
|
if (!newEmbedStatus.empty())
|
|
|
|
newEmbedStatus += ",";
|
|
|
|
if (emb == "true")
|
|
|
|
newEmbedStatus += file.inzipName();
|
|
|
|
// Get next file name
|
|
|
|
bibfiles = split(bibfiles, tmp, ',');
|
|
|
|
embedStatus = split(embedStatus, emb, ',');
|
|
|
|
}
|
|
|
|
LYXERR(Debug::FILES, "Update parameters from " << p["bibfiles"]
|
|
|
|
<< " " << p["embed"] << " to " << newBibfiles << " "
|
|
|
|
<< newEmbedStatus);
|
|
|
|
p["bibfiles"] = from_utf8(newBibfiles);
|
|
|
|
p["embed"] = from_utf8(newEmbedStatus);
|
|
|
|
|
2007-10-16 18:53:10 +00:00
|
|
|
setParams(p);
|
2008-01-08 17:02:09 +00:00
|
|
|
try {
|
2008-01-10 23:39:58 +00:00
|
|
|
// test parameter and copy files
|
2008-01-08 17:02:09 +00:00
|
|
|
getFiles(cur.buffer());
|
|
|
|
} catch (ExceptionMessage const & message) {
|
|
|
|
Alert::error(message.title_, message.details_);
|
|
|
|
// do not set parameter if an error happens
|
|
|
|
setParams(orig);
|
|
|
|
break;
|
|
|
|
}
|
2007-10-16 18:53:10 +00:00
|
|
|
cur.buffer().updateBibfilesCache();
|
2004-02-16 11:58:51 +00:00
|
|
|
break;
|
2003-03-07 21:44:48 +00:00
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2003-03-07 21:44:48 +00:00
|
|
|
default:
|
2004-11-24 21:58:42 +00:00
|
|
|
InsetCommand::doDispatch(cur, cmd);
|
2004-02-16 11:58:51 +00:00
|
|
|
break;
|
2003-02-26 19:28:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2003-11-03 19:52:47 +00:00
|
|
|
|
2006-10-11 19:40:50 +00:00
|
|
|
docstring const InsetBibtex::getScreenLabel(Buffer const &) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2006-10-11 19:40:50 +00:00
|
|
|
return _("BibTeX Generated Bibliography");
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-04-20 12:46:15 +00:00
|
|
|
namespace {
|
|
|
|
|
2007-10-20 10:03:45 +00:00
|
|
|
string normalizeName(Buffer const & buffer, OutputParams const & runparams,
|
2004-04-20 12:46:15 +00:00
|
|
|
string const & name, string const & ext)
|
|
|
|
{
|
2006-12-27 10:56:11 +00:00
|
|
|
string const fname = makeAbsPath(name, buffer.filePath()).absFilename();
|
2007-12-17 16:04:46 +00:00
|
|
|
if (FileName(name).isAbsolute() || !FileName(fname + ext).isReadableFile())
|
2004-04-20 12:46:15 +00:00
|
|
|
return name;
|
2007-10-18 23:03:51 +00:00
|
|
|
if (!runparams.nice)
|
2004-04-20 12:46:15 +00:00
|
|
|
return fname;
|
2007-10-18 23:03:51 +00:00
|
|
|
|
|
|
|
// FIXME UNICODE
|
|
|
|
return to_utf8(makeRelPath(from_utf8(fname),
|
2007-10-20 10:03:45 +00:00
|
|
|
from_utf8(buffer.masterBuffer()->filePath())));
|
2004-04-20 12:46:15 +00:00
|
|
|
}
|
2004-04-29 07:58:52 +00:00
|
|
|
|
2004-04-20 12:46:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-10-19 16:51:30 +00:00
|
|
|
int InsetBibtex::latex(Buffer const & buffer, odocstream & os,
|
2003-11-05 12:06:20 +00:00
|
|
|
OutputParams const & runparams) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2004-03-29 13:17:25 +00:00
|
|
|
// the sequence of the commands:
|
2001-08-20 13:42:29 +00:00
|
|
|
// 1. \bibliographystyle{style}
|
2003-09-03 17:40:58 +00:00
|
|
|
// 2. \addcontentsline{...} - if option bibtotoc set
|
|
|
|
// 3. \bibliography{database}
|
2004-03-29 13:17:25 +00:00
|
|
|
// and with bibtopic:
|
|
|
|
// 1. \bibliographystyle{style}
|
|
|
|
// 2. \begin{btSect}{database}
|
|
|
|
// 3. \btPrint{Cited|NotCited|All}
|
|
|
|
// 4. \end{btSect}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2005-07-14 12:53:12 +00:00
|
|
|
// Database(s)
|
|
|
|
// If we are processing the LaTeX file in a temp directory then
|
|
|
|
// copy the .bib databases to this temp directory, mangling their
|
|
|
|
// names in the process. Store this mangled name in the list of
|
|
|
|
// all databases.
|
|
|
|
// (We need to do all this because BibTeX *really*, *really*
|
|
|
|
// can't handle "files with spaces" and Windows users tend to
|
|
|
|
// use such filenames.)
|
|
|
|
// Otherwise, store the (maybe absolute) path to the original,
|
|
|
|
// unmangled database name.
|
2008-01-11 03:57:22 +00:00
|
|
|
EmbeddedFileList const bibs = getFiles(buffer);
|
|
|
|
EmbeddedFileList::const_iterator it = bibs.begin();
|
|
|
|
EmbeddedFileList::const_iterator it_end = bibs.end();
|
2006-11-14 19:54:56 +00:00
|
|
|
odocstringstream dbs;
|
2008-01-11 03:57:22 +00:00
|
|
|
for (; it != it_end; ++it) {
|
|
|
|
string utf8input = removeExtension(it->availableFile().absFilename());
|
2005-07-14 12:53:12 +00:00
|
|
|
string database =
|
2007-10-20 10:03:45 +00:00
|
|
|
normalizeName(buffer, runparams, utf8input, ".bib");
|
2006-12-27 10:56:11 +00:00
|
|
|
FileName const try_in_file(makeAbsPath(database + ".bib", buffer.filePath()));
|
2007-11-25 11:18:16 +00:00
|
|
|
bool const not_from_texmf = try_in_file.isReadableFile();
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2006-04-10 16:27:59 +00:00
|
|
|
if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
|
2006-11-26 21:30:39 +00:00
|
|
|
not_from_texmf) {
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2006-04-01 10:49:58 +00:00
|
|
|
// mangledFilename() needs the extension
|
2006-11-26 21:30:39 +00:00
|
|
|
DocFileName const in_file = DocFileName(try_in_file);
|
|
|
|
database = removeExtension(in_file.mangledFilename());
|
2007-10-20 10:03:45 +00:00
|
|
|
FileName const out_file = makeAbsPath(database + ".bib",
|
|
|
|
buffer.masterBuffer()->temppath());
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2007-12-15 08:25:53 +00:00
|
|
|
bool const success = in_file.copyTo(out_file);
|
2005-07-14 12:53:12 +00:00
|
|
|
if (!success) {
|
|
|
|
lyxerr << "Failed to copy '" << in_file
|
|
|
|
<< "' to '" << out_file << "'"
|
|
|
|
<< endl;
|
|
|
|
}
|
2007-06-26 16:55:42 +00:00
|
|
|
} else if (!runparams.inComment && runparams.nice && not_from_texmf &&
|
|
|
|
!isValidLaTeXFilename(database)) {
|
|
|
|
frontend::Alert::warning(_("Invalid filename"),
|
|
|
|
_("The following filename is likely to cause trouble "
|
|
|
|
"when running the exported file through LaTeX: ") +
|
|
|
|
from_utf8(database));
|
2005-07-14 12:53:12 +00:00
|
|
|
}
|
|
|
|
|
2008-01-11 03:57:22 +00:00
|
|
|
if (it != bibs.begin())
|
2005-07-14 12:53:12 +00:00
|
|
|
dbs << ',';
|
2006-11-14 19:54:56 +00:00
|
|
|
// FIXME UNICODE
|
|
|
|
dbs << from_utf8(latex_path(database));
|
2005-07-14 12:53:12 +00:00
|
|
|
}
|
2006-11-14 19:54:56 +00:00
|
|
|
docstring const db_out = dbs.str();
|
2005-07-14 12:53:12 +00:00
|
|
|
|
|
|
|
// Post this warning only once.
|
|
|
|
static bool warned_about_spaces = false;
|
|
|
|
if (!warned_about_spaces &&
|
2006-10-20 16:12:49 +00:00
|
|
|
runparams.nice && db_out.find(' ') != docstring::npos) {
|
2005-07-14 12:53:12 +00:00
|
|
|
warned_about_spaces = true;
|
|
|
|
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::warning(_("Export Warning!"),
|
|
|
|
_("There are spaces in the paths to your BibTeX databases.\n"
|
|
|
|
"BibTeX will be unable to find them."));
|
2004-03-29 13:17:25 +00:00
|
|
|
}
|
2003-09-03 17:40:58 +00:00
|
|
|
|
|
|
|
// Style-Options
|
2006-10-21 00:16:43 +00:00
|
|
|
string style = to_utf8(getParam("options")); // maybe empty! and with bibtotoc
|
2003-09-03 17:40:58 +00:00
|
|
|
string bibtotoc;
|
|
|
|
if (prefixIs(style, "bibtotoc")) {
|
|
|
|
bibtotoc = "bibtotoc";
|
2007-10-20 10:03:45 +00:00
|
|
|
if (contains(style, ','))
|
2003-09-03 17:40:58 +00:00
|
|
|
style = split(style, bibtotoc, ',');
|
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2004-03-29 13:17:25 +00:00
|
|
|
// line count
|
2005-07-14 12:53:12 +00:00
|
|
|
int nlines = 0;
|
2001-08-20 13:42:29 +00:00
|
|
|
|
2004-03-29 13:17:25 +00:00
|
|
|
if (!style.empty()) {
|
2007-10-20 10:03:45 +00:00
|
|
|
string base = normalizeName(buffer, runparams, style, ".bst");
|
2006-12-27 10:56:11 +00:00
|
|
|
FileName const try_in_file(makeAbsPath(base + ".bst", buffer.filePath()));
|
2007-11-25 11:18:16 +00:00
|
|
|
bool const not_from_texmf = try_in_file.isReadableFile();
|
2006-04-01 10:49:58 +00:00
|
|
|
// If this style does not come from texmf and we are not
|
|
|
|
// exporting to .tex copy it to the tmp directory.
|
|
|
|
// This prevents problems with spaces and 8bit charcaters
|
|
|
|
// in the file name.
|
2006-04-10 16:27:59 +00:00
|
|
|
if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
|
2006-11-26 21:30:39 +00:00
|
|
|
not_from_texmf) {
|
2006-04-01 10:49:58 +00:00
|
|
|
// use new style name
|
2006-11-26 21:30:39 +00:00
|
|
|
DocFileName const in_file = DocFileName(try_in_file);
|
|
|
|
base = removeExtension(in_file.mangledFilename());
|
2006-12-27 10:56:11 +00:00
|
|
|
FileName const out_file(makeAbsPath(base + ".bst",
|
2007-10-20 10:03:45 +00:00
|
|
|
buffer.masterBuffer()->temppath()));
|
2007-12-15 08:25:53 +00:00
|
|
|
bool const success = in_file.copyTo(out_file);
|
2006-04-01 10:49:58 +00:00
|
|
|
if (!success) {
|
|
|
|
lyxerr << "Failed to copy '" << in_file
|
|
|
|
<< "' to '" << out_file << "'"
|
|
|
|
<< endl;
|
|
|
|
}
|
|
|
|
}
|
2006-10-19 16:51:30 +00:00
|
|
|
// FIXME UNICODE
|
2004-04-20 12:46:15 +00:00
|
|
|
os << "\\bibliographystyle{"
|
2007-10-20 10:03:45 +00:00
|
|
|
<< from_utf8(latex_path(normalizeName(buffer, runparams, base, ".bst")))
|
2004-04-20 12:46:15 +00:00
|
|
|
<< "}\n";
|
2005-07-14 12:53:12 +00:00
|
|
|
nlines += 1;
|
2004-03-29 13:17:25 +00:00
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2006-04-01 10:49:58 +00:00
|
|
|
// Post this warning only once.
|
|
|
|
static bool warned_about_bst_spaces = false;
|
|
|
|
if (!warned_about_bst_spaces && runparams.nice && contains(style, ' ')) {
|
|
|
|
warned_about_bst_spaces = true;
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::warning(_("Export Warning!"),
|
|
|
|
_("There are spaces in the path to your BibTeX style file.\n"
|
|
|
|
"BibTeX will be unable to find it."));
|
2006-04-01 10:49:58 +00:00
|
|
|
}
|
|
|
|
|
2005-07-14 12:53:12 +00:00
|
|
|
if (!db_out.empty() && buffer.params().use_bibtopic){
|
2006-10-20 16:12:49 +00:00
|
|
|
os << "\\begin{btSect}{" << db_out << "}\n";
|
2006-10-19 16:51:30 +00:00
|
|
|
docstring btprint = getParam("btprint");
|
2004-03-29 13:17:25 +00:00
|
|
|
if (btprint.empty())
|
|
|
|
// default
|
2006-10-21 00:16:43 +00:00
|
|
|
btprint = from_ascii("btPrintCited");
|
2004-03-29 13:17:25 +00:00
|
|
|
os << "\\" << btprint << "\n"
|
|
|
|
<< "\\end{btSect}\n";
|
2005-07-14 12:53:12 +00:00
|
|
|
nlines += 3;
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
// bibtotoc-Option
|
2004-03-29 13:17:25 +00:00
|
|
|
if (!bibtotoc.empty() && !buffer.params().use_bibtopic) {
|
2003-09-03 17:40:58 +00:00
|
|
|
// maybe a problem when a textclass has no "art" as
|
|
|
|
// part of its name, because it's than book.
|
|
|
|
// For the "official" lyx-layouts it's no problem to support
|
|
|
|
// all well
|
2008-02-24 14:59:23 +00:00
|
|
|
if (!contains(buffer.params().getTextClass().name(),
|
2003-09-03 17:40:58 +00:00
|
|
|
"art")) {
|
2007-11-07 23:25:08 +00:00
|
|
|
if (buffer.params().sides == OneSide) {
|
2003-09-03 17:40:58 +00:00
|
|
|
// oneside
|
|
|
|
os << "\\clearpage";
|
2001-08-20 13:42:29 +00:00
|
|
|
} else {
|
2003-09-03 17:40:58 +00:00
|
|
|
// twoside
|
|
|
|
os << "\\cleardoublepage";
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
// bookclass
|
|
|
|
os << "\\addcontentsline{toc}{chapter}{\\bibname}";
|
|
|
|
|
2001-08-20 13:42:29 +00:00
|
|
|
} else {
|
|
|
|
// article class
|
2003-09-03 17:40:58 +00:00
|
|
|
os << "\\addcontentsline{toc}{section}{\\refname}";
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2005-07-14 12:53:12 +00:00
|
|
|
if (!db_out.empty() && !buffer.params().use_bibtopic){
|
2007-12-28 16:56:57 +00:00
|
|
|
docstring btprint = getParam("btprint");
|
|
|
|
if (btprint == "btPrintAll") {
|
|
|
|
os << "\\nocite{*}\n";
|
|
|
|
nlines += 1;
|
|
|
|
}
|
2006-10-20 16:12:49 +00:00
|
|
|
os << "\\bibliography{" << db_out << "}\n";
|
2005-07-14 12:53:12 +00:00
|
|
|
nlines += 1;
|
2000-03-02 02:19:43 +00:00
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2005-07-14 12:53:12 +00:00
|
|
|
return nlines;
|
2003-09-03 17:28:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-01-08 17:02:09 +00:00
|
|
|
EmbeddedFileList const InsetBibtex::getFiles(Buffer const & buffer) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2007-04-06 18:03:29 +00:00
|
|
|
FileName path(buffer.filePath());
|
2007-12-12 19:57:42 +00:00
|
|
|
PathChanger p(path);
|
2001-08-20 13:42:29 +00:00
|
|
|
|
2008-01-08 17:02:09 +00:00
|
|
|
EmbeddedFileList vec;
|
2000-05-17 13:40:40 +00:00
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
string tmp;
|
2008-01-08 17:02:09 +00:00
|
|
|
string emb;
|
2006-10-20 16:12:49 +00:00
|
|
|
// FIXME UNICODE
|
2006-10-21 00:16:43 +00:00
|
|
|
string bibfiles = to_utf8(getParam("bibfiles"));
|
2008-01-08 17:02:09 +00:00
|
|
|
string embedStatus = to_utf8(getParam("embed"));
|
2003-09-03 17:40:58 +00:00
|
|
|
bibfiles = split(bibfiles, tmp, ',');
|
2008-01-08 17:02:09 +00:00
|
|
|
embedStatus = split(embedStatus, emb, ',');
|
2003-09-03 17:40:58 +00:00
|
|
|
while (!tmp.empty()) {
|
2008-01-10 23:39:58 +00:00
|
|
|
if (!emb.empty()) {
|
2008-01-08 17:02:09 +00:00
|
|
|
EmbeddedFile file(changeExtension(tmp, "bib"), buffer.filePath());
|
|
|
|
// If the file structure is correct, this should not fail.
|
|
|
|
file.setEmbed(true);
|
2008-01-08 18:33:43 +00:00
|
|
|
file.enable(buffer.embedded(), &buffer);
|
2003-09-03 17:40:58 +00:00
|
|
|
vec.push_back(file);
|
2008-01-08 17:02:09 +00:00
|
|
|
} else {
|
|
|
|
// this includes the cases when the embed parameter is empty
|
|
|
|
FileName const file = findtexfile(changeExtension(tmp, "bib"), "bib");
|
|
|
|
|
|
|
|
// If we didn't find a matching file name just fail silently
|
|
|
|
if (!file.empty()) {
|
|
|
|
EmbeddedFile efile = EmbeddedFile(file.absFilename(), buffer.filePath());
|
|
|
|
efile.setEmbed(false);
|
2008-01-08 18:33:43 +00:00
|
|
|
efile.enable(buffer.embedded(), &buffer);
|
2008-01-08 17:02:09 +00:00
|
|
|
vec.push_back(efile);
|
|
|
|
}
|
|
|
|
}
|
2002-03-21 17:09:55 +00:00
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
// Get next file name
|
|
|
|
bibfiles = split(bibfiles, tmp, ',');
|
2008-01-08 17:02:09 +00:00
|
|
|
embedStatus = split(embedStatus, emb, ',');
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
return vec;
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
// methods for parsing bibtex files
|
|
|
|
|
|
|
|
typedef map<docstring, docstring> VarMap;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
/// remove whitespace characters, optionally a single comma,
|
2007-04-04 20:06:34 +00:00
|
|
|
/// and further whitespace characters from the stream.
|
|
|
|
/// @return true if a comma was found, false otherwise
|
|
|
|
///
|
|
|
|
bool removeWSAndComma(idocfstream & ifs) {
|
|
|
|
char_type ch;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
if (ch != ',') {
|
|
|
|
ifs.putback(ch);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
|
|
|
if (ifs) {
|
|
|
|
ifs.putback(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2007-04-23 15:18:01 +00:00
|
|
|
|
|
|
|
enum charCase {
|
|
|
|
makeLowerCase,
|
|
|
|
keepCase
|
|
|
|
};
|
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
/// remove whitespace characters, read characer sequence
|
|
|
|
/// not containing whitespace characters or characters in
|
|
|
|
/// delimChars, and remove further whitespace characters.
|
|
|
|
///
|
|
|
|
/// @return true if a string of length > 0 could be read.
|
2007-05-28 22:27:45 +00:00
|
|
|
///
|
|
|
|
bool readTypeOrKey(docstring & val, idocfstream & ifs,
|
2007-07-02 18:01:27 +00:00
|
|
|
docstring const & delimChars, docstring const &illegalChars,
|
|
|
|
charCase chCase) {
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
char_type ch;
|
|
|
|
|
|
|
|
val.clear();
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// read value
|
2007-07-03 18:58:02 +00:00
|
|
|
bool legalChar = true;
|
2007-07-02 18:01:27 +00:00
|
|
|
while (ifs && !isSpace(ch) &&
|
2007-08-16 01:59:20 +00:00
|
|
|
delimChars.find(ch) == docstring::npos &&
|
|
|
|
(legalChar = (illegalChars.find(ch) == docstring::npos))
|
|
|
|
)
|
|
|
|
{
|
|
|
|
if (chCase == makeLowerCase)
|
2007-04-23 15:18:01 +00:00
|
|
|
val += lowercase(ch);
|
2007-08-16 01:59:20 +00:00
|
|
|
else
|
2007-04-23 15:18:01 +00:00
|
|
|
val += ch;
|
2007-04-04 20:06:34 +00:00
|
|
|
ifs.get(ch);
|
|
|
|
}
|
2007-07-02 18:01:27 +00:00
|
|
|
|
|
|
|
if (!legalChar) {
|
|
|
|
ifs.putback(ch);
|
|
|
|
return false;
|
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
while (ifs && isSpace(ch)) {
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (ifs) {
|
|
|
|
ifs.putback(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
return val.length() > 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// read subsequent bibtex values that are delimited with a #-character.
|
2007-05-28 22:27:45 +00:00
|
|
|
/// Concatenate all parts and replace names with the associated string in
|
2007-04-04 20:06:34 +00:00
|
|
|
/// the variable strings.
|
|
|
|
/// @return true if reading was successfull (all single parts were delimited
|
|
|
|
/// correctly)
|
|
|
|
bool readValue(docstring & val, idocfstream & ifs, const VarMap & strings) {
|
|
|
|
|
|
|
|
char_type ch;
|
|
|
|
|
|
|
|
val.clear();
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
do {
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// check for field type
|
|
|
|
if (isDigit(ch)) {
|
|
|
|
|
|
|
|
// read integer value
|
|
|
|
do {
|
|
|
|
val += ch;
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isDigit(ch));
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
} else if (ch == '"' || ch == '{') {
|
2007-08-16 01:59:20 +00:00
|
|
|
// set end delimiter
|
|
|
|
char_type delim = ch == '"' ? '"': '}';
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2007-08-16 01:59:20 +00:00
|
|
|
//Skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
//We now have the first non-whitespace character
|
|
|
|
//We'll collapse adjacent whitespace.
|
|
|
|
bool lastWasWhiteSpace = false;
|
|
|
|
|
|
|
|
// inside this delimited text braces must match.
|
|
|
|
// Thus we can have a closing delimiter only
|
|
|
|
// when nestLevel == 0
|
2007-04-04 20:06:34 +00:00
|
|
|
int nestLevel = 0;
|
2007-08-16 01:59:20 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
while (ifs && (nestLevel > 0 || ch != delim)) {
|
2007-08-16 01:59:20 +00:00
|
|
|
if (isSpace(ch)) {
|
|
|
|
lastWasWhiteSpace = true;
|
|
|
|
ifs.get(ch);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
//We output the space only after we stop getting
|
|
|
|
//whitespace so as not to output any whitespace
|
|
|
|
//at the end of the value.
|
|
|
|
if (lastWasWhiteSpace) {
|
|
|
|
lastWasWhiteSpace = false;
|
|
|
|
val += ' ';
|
|
|
|
}
|
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
val += ch;
|
2007-05-28 22:27:45 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
// update nesting level
|
|
|
|
switch (ch) {
|
|
|
|
case '{':
|
|
|
|
++nestLevel;
|
|
|
|
break;
|
|
|
|
case '}':
|
|
|
|
--nestLevel;
|
|
|
|
if (nestLevel < 0) return false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
ifs.get(ch);
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
// reading a string name
|
|
|
|
docstring strName;
|
|
|
|
|
|
|
|
while (ifs && !isSpace(ch) && ch != '#' && ch != ',' && ch != '}' && ch != ')') {
|
|
|
|
strName += lowercase(ch);
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// replace the string with its assigned value or
|
|
|
|
// discard it if it's not assigned
|
|
|
|
if (strName.length()) {
|
|
|
|
VarMap::const_iterator pos = strings.find(strName);
|
|
|
|
if (pos != strings.end()) {
|
|
|
|
val += pos->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// skip WS
|
|
|
|
while (ifs && isSpace(ch)) {
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// continue reading next value on concatenate with '#'
|
2007-05-28 22:27:45 +00:00
|
|
|
} while (ch == '#');
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
ifs.putback(ch);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2003-02-18 12:36:02 +00:00
|
|
|
|
2001-08-20 13:42:29 +00:00
|
|
|
// This method returns a comma separated list of Bibtex entries
|
2003-08-28 07:41:31 +00:00
|
|
|
void InsetBibtex::fillWithBibKeys(Buffer const & buffer,
|
2007-08-20 16:30:02 +00:00
|
|
|
BiblioInfo & keylist, InsetIterator const & /*di*/) const
|
2001-08-20 13:42:29 +00:00
|
|
|
{
|
2008-01-08 17:02:09 +00:00
|
|
|
EmbeddedFileList const files = getFiles(buffer);
|
|
|
|
for (vector<EmbeddedFile>::const_iterator it = files.begin();
|
2001-08-20 13:42:29 +00:00
|
|
|
it != files.end(); ++ it) {
|
2007-04-23 15:18:01 +00:00
|
|
|
// This bibtex parser is a first step to parse bibtex files
|
2007-05-28 22:27:45 +00:00
|
|
|
// more precisely.
|
|
|
|
//
|
2007-04-04 20:06:34 +00:00
|
|
|
// - it reads the whole bibtex entry and does a syntax check
|
|
|
|
// (matching delimiters, missing commas,...
|
|
|
|
// - it recovers from errors starting with the next @-character
|
2007-05-28 22:27:45 +00:00
|
|
|
// - it reads @string definitions and replaces them in the
|
2007-04-04 20:06:34 +00:00
|
|
|
// field values.
|
2007-05-28 22:27:45 +00:00
|
|
|
// - it accepts more characters in keys or value names than
|
2007-04-04 20:06:34 +00:00
|
|
|
// bibtex does.
|
|
|
|
//
|
2006-12-15 16:09:05 +00:00
|
|
|
// Officially bibtex does only support ASCII, but in practice
|
|
|
|
// you can use the encoding of the main document as long as
|
|
|
|
// some elements like keys and names are pure ASCII. Therefore
|
|
|
|
// we convert the file from the buffer encoding.
|
2006-12-17 10:52:04 +00:00
|
|
|
// We don't restrict keys to ASCII in LyX, since our own
|
|
|
|
// InsetBibitem can generate non-ASCII keys, and nonstandard
|
|
|
|
// 8bit clean bibtex forks exist.
|
2007-08-16 01:59:20 +00:00
|
|
|
|
2008-01-08 17:02:09 +00:00
|
|
|
idocfstream ifs(it->availableFile().toFilesystemEncoding().c_str(),
|
2007-12-12 19:28:07 +00:00
|
|
|
ios_base::in,
|
2007-08-16 01:59:20 +00:00
|
|
|
buffer.params().encoding().iconvName());
|
2007-05-28 22:27:45 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
char_type ch;
|
|
|
|
VarMap strings;
|
|
|
|
|
|
|
|
while (ifs) {
|
|
|
|
|
|
|
|
ifs.get(ch);
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (ch != '@')
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
|
|
|
|
|
|
|
docstring entryType;
|
|
|
|
|
2007-07-02 18:01:27 +00:00
|
|
|
if (!readTypeOrKey(entryType, ifs, from_ascii("{("),
|
|
|
|
docstring(), makeLowerCase) || !ifs)
|
2006-12-17 10:52:04 +00:00
|
|
|
continue;
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
if (entryType == from_ascii("comment")) {
|
|
|
|
|
2007-12-12 19:28:07 +00:00
|
|
|
ifs.ignore(numeric_limits<int>::max(), '\n');
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
2007-05-28 22:27:45 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
ifs.get(ch);
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
|
|
|
|
2007-04-23 15:18:01 +00:00
|
|
|
if ((ch != '(') && (ch != '{')) {
|
2007-04-04 20:06:34 +00:00
|
|
|
// invalid entry delimiter
|
|
|
|
ifs.putback(ch);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// process the entry
|
|
|
|
if (entryType == from_ascii("string")) {
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// read string and add it to the strings map
|
2007-04-04 20:06:34 +00:00
|
|
|
// (or replace it's old value)
|
|
|
|
docstring name;
|
|
|
|
docstring value;
|
|
|
|
|
2007-07-02 18:01:27 +00:00
|
|
|
if (!readTypeOrKey(name, ifs, from_ascii("="),
|
|
|
|
from_ascii("#{}(),"), makeLowerCase) || !ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
|
|
|
|
2007-07-02 18:01:27 +00:00
|
|
|
// next char must be an equal sign
|
2007-04-04 20:06:34 +00:00
|
|
|
ifs.get(ch);
|
|
|
|
if (!ifs || ch != '=')
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (!readValue(value, ifs, strings))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
strings[name] = value;
|
|
|
|
|
|
|
|
} else if (entryType == from_ascii("preamble")) {
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// preamble definitions are discarded.
|
2007-04-04 20:06:34 +00:00
|
|
|
// can they be of any use in lyx?
|
|
|
|
docstring value;
|
|
|
|
|
|
|
|
if (!readValue(value, ifs, strings))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
2007-08-16 01:59:20 +00:00
|
|
|
// Citation entry. Try to read the key.
|
2007-04-04 20:06:34 +00:00
|
|
|
docstring key;
|
|
|
|
|
2007-07-02 18:01:27 +00:00
|
|
|
if (!readTypeOrKey(key, ifs, from_ascii(","),
|
|
|
|
from_ascii("}"), keepCase) || !ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
|
|
|
|
2007-08-16 01:59:20 +00:00
|
|
|
/////////////////////////////////////////////
|
|
|
|
// now we have a key, so we will add an entry
|
|
|
|
// (even if it's empty, as bibtex does)
|
2007-05-28 22:27:45 +00:00
|
|
|
//
|
2007-08-16 01:59:20 +00:00
|
|
|
// we now read the field = value pairs.
|
|
|
|
// all items must be separated by a comma. If
|
|
|
|
// it is missing the scanning of this entry is
|
|
|
|
// stopped and the next is searched.
|
|
|
|
docstring fields;
|
|
|
|
docstring name;
|
|
|
|
docstring value;
|
|
|
|
docstring commaNewline;
|
|
|
|
docstring data;
|
2008-02-14 05:00:54 +00:00
|
|
|
BibTeXInfo keyvalmap(key, entryType);
|
2007-08-16 01:59:20 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
bool readNext = removeWSAndComma(ifs);
|
2007-08-16 01:59:20 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
while (ifs && readNext) {
|
|
|
|
|
|
|
|
// read field name
|
2007-07-02 18:01:27 +00:00
|
|
|
if (!readTypeOrKey(name, ifs, from_ascii("="),
|
|
|
|
from_ascii("{}(),"), makeLowerCase) || !ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
// next char must be an equal sign
|
|
|
|
ifs.get(ch);
|
|
|
|
if (!ifs)
|
|
|
|
break;
|
|
|
|
if (ch != '=') {
|
|
|
|
ifs.putback(ch);
|
|
|
|
break;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
// read field value
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!readValue(value, ifs, strings))
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
|
|
|
|
2007-08-16 01:59:20 +00:00
|
|
|
keyvalmap[name] = value;
|
|
|
|
data += "\n\n" + value;
|
2008-02-14 05:28:59 +00:00
|
|
|
keylist.addFieldName(name);
|
2007-04-04 20:06:34 +00:00
|
|
|
readNext = removeWSAndComma(ifs);
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
// add the new entry
|
2008-02-14 05:28:59 +00:00
|
|
|
keylist.addEntryType(entryType);
|
2008-02-14 07:10:12 +00:00
|
|
|
keyvalmap.setAllData(data);
|
2007-08-20 16:30:02 +00:00
|
|
|
keylist[key] = keyvalmap;
|
2007-04-04 20:06:34 +00:00
|
|
|
}
|
|
|
|
} //< searching '@'
|
|
|
|
} //< for loop over files
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
1999-11-24 22:14:46 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
bool InsetBibtex::addDatabase(string const & db)
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2006-10-20 16:12:49 +00:00
|
|
|
// FIXME UNICODE
|
2006-10-21 00:16:43 +00:00
|
|
|
string bibfiles(to_utf8(getParam("bibfiles")));
|
2006-10-20 16:12:49 +00:00
|
|
|
if (tokenPos(bibfiles, ',', db) == -1) {
|
|
|
|
if (!bibfiles.empty())
|
|
|
|
bibfiles += ',';
|
2006-10-21 00:16:43 +00:00
|
|
|
setParam("bibfiles", from_utf8(bibfiles + db));
|
1999-09-27 18:44:28 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
bool InsetBibtex::delDatabase(string const & db)
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2006-10-20 16:12:49 +00:00
|
|
|
// FIXME UNICODE
|
2006-10-21 00:16:43 +00:00
|
|
|
string bibfiles(to_utf8(getParam("bibfiles")));
|
2006-10-20 16:12:49 +00:00
|
|
|
if (contains(bibfiles, db)) {
|
|
|
|
int const n = tokenPos(bibfiles, ',', db);
|
1999-10-02 16:21:10 +00:00
|
|
|
string bd = db;
|
1999-09-27 18:44:28 +00:00
|
|
|
if (n > 0) {
|
2005-06-25 15:57:15 +00:00
|
|
|
// this is not the first database
|
|
|
|
string tmp = ',' + bd;
|
2006-10-21 00:16:43 +00:00
|
|
|
setParam("bibfiles", from_utf8(subst(bibfiles, tmp, string())));
|
1999-11-15 10:58:38 +00:00
|
|
|
} else if (n == 0)
|
2005-06-25 15:57:15 +00:00
|
|
|
// this is the first (or only) database
|
2006-10-21 00:16:43 +00:00
|
|
|
setParam("bibfiles", from_utf8(split(bibfiles, bd, ',')));
|
2002-03-21 17:09:55 +00:00
|
|
|
else
|
1999-09-27 18:44:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2004-03-29 13:17:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
void InsetBibtex::validate(LaTeXFeatures & features) const
|
|
|
|
{
|
|
|
|
if (features.bufferParams().use_bibtopic)
|
|
|
|
features.require("bibtopic");
|
|
|
|
}
|
2006-10-21 00:16:43 +00:00
|
|
|
|
|
|
|
|
2008-01-08 18:55:34 +00:00
|
|
|
void InsetBibtex::registerEmbeddedFiles(Buffer const & buffer, EmbeddedFileList & files) const
|
2008-01-08 17:02:09 +00:00
|
|
|
{
|
|
|
|
EmbeddedFileList const dbs = getFiles(buffer);
|
|
|
|
for (vector<EmbeddedFile>::const_iterator it = dbs.begin();
|
|
|
|
it != dbs.end(); ++ it)
|
2008-01-08 18:33:43 +00:00
|
|
|
files.registerFile(*it, this, buffer);
|
2008-01-08 17:02:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void InsetBibtex::updateEmbeddedFile(Buffer const & buf, EmbeddedFile const & file)
|
|
|
|
{
|
|
|
|
// look for the item and update status
|
|
|
|
docstring bibfiles;
|
|
|
|
docstring embed;
|
|
|
|
|
|
|
|
bool first = true;
|
|
|
|
EmbeddedFileList dbs = getFiles(buf);
|
2008-01-08 18:55:34 +00:00
|
|
|
for (EmbeddedFileList::iterator it = dbs.begin();
|
2008-01-08 17:02:09 +00:00
|
|
|
it != dbs.end(); ++ it) {
|
|
|
|
// update from file
|
|
|
|
if (it->absFilename() == file.absFilename())
|
|
|
|
it->setEmbed(file.embedded());
|
|
|
|
// write parameter string
|
|
|
|
if (!first) {
|
|
|
|
bibfiles += ',';
|
|
|
|
embed += ',';
|
2008-01-10 23:39:58 +00:00
|
|
|
} else
|
2008-01-08 17:02:09 +00:00
|
|
|
first = false;
|
|
|
|
bibfiles += from_utf8(it->outputFilename(buf.filePath()));
|
2008-01-10 23:39:58 +00:00
|
|
|
if (it->embedded())
|
|
|
|
embed += from_utf8(it->inzipName());
|
2008-01-08 17:02:09 +00:00
|
|
|
}
|
|
|
|
setParam("bibfiles", bibfiles);
|
|
|
|
setParam("embed", embed);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
} // namespace lyx
|