2002-09-25 14:26:13 +00:00
|
|
|
/**
|
2007-04-25 01:24:38 +00:00
|
|
|
* \file InsetBibtex.cpp
|
2002-09-25 14:26:13 +00:00
|
|
|
* This file is part of LyX, the document processor.
|
|
|
|
* Licence details can be found in the file COPYING.
|
|
|
|
*
|
|
|
|
* \author Alejandro Aguilar Sierra
|
2007-08-16 01:59:20 +00:00
|
|
|
* \author Richard Heck (BibTeX parser improvements)
|
2002-09-25 14:26:13 +00:00
|
|
|
*
|
2003-08-23 00:17:00 +00:00
|
|
|
* Full author contact details are available in file CREDITS.
|
2002-09-25 14:26:13 +00:00
|
|
|
*/
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2003-06-02 10:03:27 +00:00
|
|
|
#include <config.h>
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2007-04-25 01:24:38 +00:00
|
|
|
#include "InsetBibtex.h"
|
2003-09-05 09:01:27 +00:00
|
|
|
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "Buffer.h"
|
|
|
|
#include "BufferParams.h"
|
|
|
|
#include "DispatchResult.h"
|
|
|
|
#include "Encoding.h"
|
2008-04-28 15:14:18 +00:00
|
|
|
#include "Format.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "FuncRequest.h"
|
2008-04-28 15:14:18 +00:00
|
|
|
#include "FuncStatus.h"
|
2005-07-14 12:53:12 +00:00
|
|
|
#include "LaTeXFeatures.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "MetricsInfo.h"
|
2009-06-11 22:39:16 +00:00
|
|
|
#include "output_xhtml.h"
|
2007-04-26 04:41:58 +00:00
|
|
|
#include "OutputParams.h"
|
2007-11-07 23:25:08 +00:00
|
|
|
#include "TextClass.h"
|
2009-06-11 22:39:16 +00:00
|
|
|
#include "TocBackend.h"
|
2001-12-28 13:26:54 +00:00
|
|
|
|
2007-04-28 20:44:46 +00:00
|
|
|
#include "frontends/alert.h"
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2008-04-28 15:14:18 +00:00
|
|
|
#include "support/convert.h"
|
2008-02-18 07:14:42 +00:00
|
|
|
#include "support/debug.h"
|
2007-11-01 22:17:22 +00:00
|
|
|
#include "support/docstream.h"
|
2008-03-13 17:19:36 +00:00
|
|
|
#include "support/ExceptionMessage.h"
|
1999-10-02 16:21:10 +00:00
|
|
|
#include "support/filetools.h"
|
2008-02-18 07:14:42 +00:00
|
|
|
#include "support/gettext.h"
|
2001-07-29 17:39:01 +00:00
|
|
|
#include "support/lstrings.h"
|
2003-09-05 09:01:27 +00:00
|
|
|
#include "support/os.h"
|
Rename files in src/support, step one.
src/support/package.h src/support/Package.h Package
src/support/package.C.in src/support/Package.C.in Package
src/support/path.h src/support/Path.h Path
src/support/fs_extras.h src/support/fs_extras.h NOCLASSES
src/support/RandomAccessList.h src/support/RandomAccessList.h RandomAccessList
src/support/lyxmanip.h src/support/lyxmanip.h NOCLASSES
src/support/rename.C src/support/rename.cpp NOCLASSES
src/support/abort.C src/support/abort.cpp NOCLASSES
src/support/lyxlib.h src/support/lyxlib.h NOCLASSES
src/support/ExceptionMessage.h src/support/ExceptionMessage.h ExceptionMessage
src/support/copy.C src/support/copy.cpp NOCLASSES
src/support/limited_stack.h src/support/limited_stack.h limited_stack
src/support/filefilterlist.C src/support/FileFilterList.cpp ['FileFilterList', 'Filter']
src/support/cow_ptr.h src/support/cow_ptr.h cow_ptr
src/support/os_unix.C src/support/os_unix.cpp NOCLASSES
src/support/socktools.h src/support/socktools.h NOCLASSES
src/support/forkedcontr.h src/support/ForkedcallsController.h ForkedcallsController
src/support/os.h src/support/os.h NOCLASSES
src/support/FileMonitor.h src/support/FileMonitor.h FileMonitor
src/support/copied_ptr.h src/support/copied_ptr.h copied_ptr
src/support/translator.h src/support/Translator.h Translator
src/support/filetools.C src/support/filetools.cpp NOCLASSES
src/support/unlink.C src/support/unlink.cpp NOCLASSES
src/support/os_win32.C src/support/os_win32.cpp GetFolderPath
src/support/lstrings.C src/support/lstrings.cpp NOCLASSES
src/support/qstring_helpers.C src/support/qstring_helpers.cpp NOCLASSES
src/support/getcwd.C src/support/getcwd.cpp NOCLASSES
src/support/systemcall.C src/support/Systemcall.cpp Systemcall
src/support/lyxalgo.h src/support/lyxalgo.h NOCLASSES
src/support/filefilterlist.h src/support/FileFilterList.h ['FileFilterList', 'Filter']
src/support/unicode.C src/support/unicode.cpp IconvProcessor
src/support/userinfo.C src/support/userinfo.cpp NOCLASSES
src/support/lyxtime.C src/support/lyxtime.cpp NOCLASSES
src/support/kill.C src/support/kill.cpp NOCLASSES
src/support/docstring.C src/support/docstring.cpp to_local8bit_failure
src/support/os_cygwin.C src/support/os_cygwin.cpp NOCLASSES
src/support/lyxsum.C src/support/lyxsum.cpp NOCLASSES
src/support/environment.C src/support/environment.cpp NOCLASSES
src/support/filetools.h src/support/filetools.h NOCLASSES
src/support/textutils.C src/support/textutils.cpp NOCLASSES
src/support/mkdir.C src/support/mkdir.cpp NOCLASSES
src/support/forkedcall.C src/support/Forkedcall.cpp ['ForkedProcess', 'Forkedcall']
src/support/tempname.C src/support/tempname.cpp NOCLASSES
src/support/os_win32.h src/support/os_win32.h GetFolderPath
src/support/types.h src/support/types.h NOCLASSES
src/support/lstrings.h src/support/lstrings.h NOCLASSES
src/support/forkedcallqueue.C src/support/ForkedCallQueue.cpp ForkedCallQueue
src/support/qstring_helpers.h src/support/qstring_helpers.h NOCLASSES
src/support/convert.C src/support/convert.cpp NOCLASSES
src/support/filename.C src/support/FileName.cpp ['FileName', 'DocFileName']
src/support/tests/convert.C src/support/tests/convert.cpp NOCLASSES
src/support/tests/filetools.C src/support/tests/filetools.cpp NOCLASSES
src/support/tests/lstrings.C src/support/tests/lstrings.cpp NOCLASSES
src/support/tests/boost.C src/support/tests/boost.cpp NOCLASSES
src/support/docstream.C src/support/docstream.cpp ['iconv_codecvt_facet_exception', 'idocfstream', 'odocfstream']
src/support/std_istream.h src/support/std_istream.h NOCLASSES
src/support/systemcall.h src/support/Systemcall.h Systemcall
src/support/chdir.C src/support/chdir.cpp NOCLASSES
src/support/std_ostream.h src/support/std_ostream.h NOCLASSES
src/support/unicode.h src/support/unicode.h IconvProcessor
src/support/path.C src/support/Path.cpp Path
src/support/fs_extras.C src/support/fs_extras.cpp NOCLASSES
src/support/userinfo.h src/support/userinfo.h NOCLASSES
src/support/lyxtime.h src/support/lyxtime.h NOCLASSES
src/support/docstring.h src/support/docstring.h to_local8bit_failure
src/support/debugstream.h src/support/debugstream.h basic_debugstream
src/support/environment.h src/support/environment.h NOCLASSES
src/support/textutils.h src/support/textutils.h NOCLASSES
src/support/forkedcall.h src/support/Forkedcall.h ['ForkedProcess', 'Forkedcall']
src/support/socktools.C src/support/socktools.cpp NOCLASSES
src/support/forkedcallqueue.h src/support/ForkedCallQueue.h ForkedCallQueue
src/support/forkedcontr.C src/support/ForkedcallsController.cpp ForkedcallsController
src/support/os.C src/support/os.cpp NOCLASSES
src/support/convert.h src/support/convert.h NOCLASSES
src/support/filename.h src/support/FileName.h ['FileName', 'DocFileName']
src/support/docstream.h src/support/docstream.h ['iconv_codecvt_facet_exception', 'idocfstream', 'odocfstream']
src/support/FileMonitor.C src/support/FileMonitor.cpp FileMonitor
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@18024 a592a061-630c-0410-9148-cb99ea01b6c8
2007-04-26 05:12:52 +00:00
|
|
|
#include "support/Path.h"
|
2007-04-04 20:06:34 +00:00
|
|
|
#include "support/textutils.h"
|
2001-12-28 13:26:54 +00:00
|
|
|
|
2008-02-07 17:04:06 +00:00
|
|
|
#include <limits>
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2007-12-12 10:16:00 +00:00
|
|
|
using namespace std;
|
2007-12-12 18:57:56 +00:00
|
|
|
using namespace lyx::support;
|
2006-10-21 00:16:43 +00:00
|
|
|
|
|
|
|
namespace lyx {
|
|
|
|
|
|
|
|
namespace Alert = frontend::Alert;
|
|
|
|
namespace os = support::os;
|
2003-09-16 11:03:20 +00:00
|
|
|
|
2003-02-18 12:36:02 +00:00
|
|
|
|
2008-06-05 06:42:53 +00:00
|
|
|
InsetBibtex::InsetBibtex(Buffer const & buf, InsetCommandParams const & p)
|
2008-04-19 15:52:35 +00:00
|
|
|
: InsetCommand(p, "bibtex")
|
2008-06-05 06:42:53 +00:00
|
|
|
{
|
|
|
|
Inset::setBuffer(const_cast<Buffer &>(buf));
|
More cache fixing. Similar fix as for InsetBibtex, and I've renamed the key routine.
Unfortunately, we can't do a one-short fix for InsetCommand, requiring that it take
a Buffer & in the constructor, due to problems in mathed, specifically, here:
void InsetMathHull::label(row_type row, docstring const & label)
{
...
label_[row] = new InsetLabel(p);
if (buffer_)
label_[row]->setBuffer(buffer());
}
If InsetLabel has to have a Buffer, then buffer_ has to be set, which means InsetMathHull needs a Buffer. But then truckloads of these are created in Parser, where we don't really seem to have access to a Buffer.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@25137 a592a061-630c-0410-9148-cb99ea01b6c8
2008-06-05 06:55:34 +00:00
|
|
|
buffer_->invalidateBibinfoCache();
|
2008-06-05 06:42:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
InsetBibtex::~InsetBibtex()
|
|
|
|
{
|
2008-06-06 03:11:56 +00:00
|
|
|
if (isBufferValid())
|
More cache fixing. Similar fix as for InsetBibtex, and I've renamed the key routine.
Unfortunately, we can't do a one-short fix for InsetCommand, requiring that it take
a Buffer & in the constructor, due to problems in mathed, specifically, here:
void InsetMathHull::label(row_type row, docstring const & label)
{
...
label_[row] = new InsetLabel(p);
if (buffer_)
label_[row]->setBuffer(buffer());
}
If InsetLabel has to have a Buffer, then buffer_ has to be set, which means InsetMathHull needs a Buffer. But then truckloads of these are created in Parser, where we don't really seem to have access to a Buffer.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@25137 a592a061-630c-0410-9148-cb99ea01b6c8
2008-06-05 06:55:34 +00:00
|
|
|
buffer_->invalidateBibinfoCache();
|
2008-06-05 06:42:53 +00:00
|
|
|
}
|
2000-07-15 23:51:46 +00:00
|
|
|
|
1999-09-27 18:44:28 +00:00
|
|
|
|
2008-02-23 22:01:02 +00:00
|
|
|
ParamInfo const & InsetBibtex::findInfo(string const & /* cmdName */)
|
2007-10-25 04:13:56 +00:00
|
|
|
{
|
2008-02-23 22:01:02 +00:00
|
|
|
static ParamInfo param_info_;
|
|
|
|
if (param_info_.empty()) {
|
Per Abdel's suggestion that we focus on bug-fixing at this point, this will be the last patch in this series for a bit. But I wanted to get this done before I forget what it is I was doing, so here it is.
The idea behind this patch is to make real key-value support for InsetCommand parameters possible. This should be particularly useful for the listings version of InsetInclude, though we would need some kind of UI for it before it would really be helpful. (See below for some thoughts.) This doesn't substantially change anything else, though some things do get re-arranged a bit.
Basically, the idea is this. First, we introduce a whole range of parameter types: Normal LaTeX optional and required parameters; ones for LyX's internal use (like embed); and finally, in connection with keyval, ones that represent keys and ones that represent optional and required arguments where the keyval stuff will appear. (I'm assuming here that there will always be exactly one of those, and that it will accept only keyval-type material.) The parameters themselves are stored in a map, so it's really only the output routines that need to care about the different types of parameters.
Regarding the frontend, it seems to me that something like the following would work:
(i) scan the parameter list for LATEX_KEY type parameters
(ii) the dialog will have a series of lines, each of which has a combo box listing the acceptable keys and a QLineEdit for entering its value, as well as a "delete" button of some sort for removing this key and its value
(iii) there should be an "add line" button to add a new line, activated only when all other lines are filled with values
Probably not even too hard.
git-svn-id: svn://svn.lyx.org/lyx/lyx-devel/trunk@23235 a592a061-630c-0410-9148-cb99ea01b6c8
2008-02-25 22:13:45 +00:00
|
|
|
param_info_.add("btprint", ParamInfo::LATEX_OPTIONAL);
|
|
|
|
param_info_.add("bibfiles", ParamInfo::LATEX_REQUIRED);
|
|
|
|
param_info_.add("options", ParamInfo::LYX_INTERNAL);
|
2008-02-23 22:01:02 +00:00
|
|
|
}
|
|
|
|
return param_info_;
|
2007-10-25 04:13:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-04-26 14:56:30 +00:00
|
|
|
void InsetBibtex::doDispatch(Cursor & cur, FuncRequest & cmd)
|
2003-02-26 19:28:38 +00:00
|
|
|
{
|
2003-03-07 21:44:48 +00:00
|
|
|
switch (cmd.action) {
|
2003-05-16 07:44:00 +00:00
|
|
|
|
2008-04-28 15:14:18 +00:00
|
|
|
case LFUN_INSET_EDIT:
|
|
|
|
editDatabases();
|
|
|
|
break;
|
|
|
|
|
2003-03-07 21:44:48 +00:00
|
|
|
case LFUN_INSET_MODIFY: {
|
2007-10-19 17:22:55 +00:00
|
|
|
InsetCommandParams p(BIBTEX_CODE);
|
2007-10-16 18:53:10 +00:00
|
|
|
try {
|
2008-03-27 22:26:24 +00:00
|
|
|
if (!InsetCommand::string2params("bibtex",
|
2007-10-16 18:53:10 +00:00
|
|
|
to_utf8(cmd.argument()), p)) {
|
|
|
|
cur.noUpdate();
|
|
|
|
break;
|
|
|
|
}
|
2007-12-12 19:57:42 +00:00
|
|
|
} catch (ExceptionMessage const & message) {
|
|
|
|
if (message.type_ == WarningException) {
|
2007-10-16 18:53:10 +00:00
|
|
|
Alert::warning(message.title_, message.details_);
|
|
|
|
cur.noUpdate();
|
|
|
|
} else
|
|
|
|
throw message;
|
|
|
|
break;
|
|
|
|
}
|
2008-01-08 17:02:09 +00:00
|
|
|
//
|
2008-03-29 22:13:30 +00:00
|
|
|
setParams(p);
|
2008-02-27 21:51:29 +00:00
|
|
|
buffer().updateBibfilesCache();
|
2004-02-16 11:58:51 +00:00
|
|
|
break;
|
2003-03-07 21:44:48 +00:00
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2003-03-07 21:44:48 +00:00
|
|
|
default:
|
2004-11-24 21:58:42 +00:00
|
|
|
InsetCommand::doDispatch(cur, cmd);
|
2004-02-16 11:58:51 +00:00
|
|
|
break;
|
2003-02-26 19:28:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2003-11-03 19:52:47 +00:00
|
|
|
|
2008-04-28 15:14:18 +00:00
|
|
|
bool InsetBibtex::getStatus(Cursor & cur, FuncRequest const & cmd,
|
|
|
|
FuncStatus & flag) const
|
|
|
|
{
|
|
|
|
switch (cmd.action) {
|
|
|
|
case LFUN_INSET_EDIT:
|
2008-05-29 15:14:00 +00:00
|
|
|
flag.setEnabled(true);
|
2008-04-28 15:14:18 +00:00
|
|
|
return true;
|
|
|
|
|
|
|
|
default:
|
|
|
|
return InsetCommand::getStatus(cur, cmd, flag);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void InsetBibtex::editDatabases() const
|
|
|
|
{
|
|
|
|
vector<docstring> bibfilelist = getVectorFromString(getParam("bibfiles"));
|
|
|
|
|
|
|
|
if (bibfilelist.empty())
|
|
|
|
return;
|
|
|
|
|
|
|
|
int nr_databases = bibfilelist.size();
|
|
|
|
if (nr_databases > 1) {
|
|
|
|
docstring message = bformat(_("The BibTeX inset includes %1$s databases.\n"
|
|
|
|
"If you proceed, all of them will be opened."),
|
|
|
|
convert<docstring>(nr_databases));
|
|
|
|
int const ret = Alert::prompt(_("Open Databases?"),
|
|
|
|
message, 0, 1, _("&Cancel"), _("&Proceed"));
|
|
|
|
|
|
|
|
if (ret == 0)
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
vector<docstring>::const_iterator it = bibfilelist.begin();
|
|
|
|
vector<docstring>::const_iterator en = bibfilelist.end();
|
|
|
|
for (; it != en; ++it) {
|
|
|
|
FileName bibfile = getBibTeXPath(*it, buffer());
|
|
|
|
formats.edit(buffer(), bibfile,
|
|
|
|
formats.getFormatFromFile(bibfile));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-02-27 20:43:16 +00:00
|
|
|
docstring InsetBibtex::screenLabel() const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2006-10-11 19:40:50 +00:00
|
|
|
return _("BibTeX Generated Bibliography");
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-04-28 12:48:04 +00:00
|
|
|
docstring InsetBibtex::toolTip(BufferView const & /*bv*/, int /*x*/, int /*y*/) const
|
|
|
|
{
|
|
|
|
docstring item = from_ascii("* ");
|
2008-05-29 11:24:42 +00:00
|
|
|
docstring tip = _("Databases:") + "\n";
|
2008-04-28 12:48:04 +00:00
|
|
|
vector<docstring> bibfilelist = getVectorFromString(getParam("bibfiles"));
|
|
|
|
|
|
|
|
if (bibfilelist.empty()) {
|
|
|
|
tip += item;
|
|
|
|
tip += _("none");
|
|
|
|
} else {
|
|
|
|
vector<docstring>::const_iterator it = bibfilelist.begin();
|
|
|
|
vector<docstring>::const_iterator en = bibfilelist.end();
|
|
|
|
for (; it != en; ++it) {
|
|
|
|
tip += item;
|
|
|
|
tip += *it + "\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Style-Options
|
2008-04-28 13:13:42 +00:00
|
|
|
bool toc = false;
|
2008-04-28 12:48:04 +00:00
|
|
|
docstring style = getParam("options"); // maybe empty! and with bibtotoc
|
|
|
|
docstring bibtotoc = from_ascii("bibtotoc");
|
|
|
|
if (prefixIs(style, bibtotoc)) {
|
2008-04-28 13:13:42 +00:00
|
|
|
toc = true;
|
2008-04-28 12:48:04 +00:00
|
|
|
if (contains(style, char_type(',')))
|
|
|
|
style = split(style, bibtotoc, char_type(','));
|
|
|
|
}
|
|
|
|
|
2008-05-29 11:24:42 +00:00
|
|
|
tip += _("Style File:") +"\n";
|
2008-04-28 12:48:04 +00:00
|
|
|
tip += item;
|
|
|
|
if (!style.empty())
|
|
|
|
tip += style;
|
|
|
|
else
|
|
|
|
tip += _("none");
|
|
|
|
|
2008-05-29 11:24:42 +00:00
|
|
|
tip += "\n" + _("Lists:") + " ";
|
2008-04-28 13:13:42 +00:00
|
|
|
docstring btprint = getParam("btprint");
|
|
|
|
if (btprint == "btPrintAll")
|
|
|
|
tip += _("all references");
|
|
|
|
else if (btprint == "btPrintNotCited")
|
|
|
|
tip += _("all uncited references");
|
|
|
|
else
|
|
|
|
tip += _("all cited references");
|
|
|
|
|
|
|
|
if (toc) {
|
|
|
|
tip += ", ";
|
|
|
|
tip += _("included in TOC");
|
|
|
|
}
|
|
|
|
|
2008-04-28 12:48:04 +00:00
|
|
|
return tip;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-02-27 20:43:16 +00:00
|
|
|
static string normalizeName(Buffer const & buffer,
|
|
|
|
OutputParams const & runparams, string const & name, string const & ext)
|
2004-04-20 12:46:15 +00:00
|
|
|
{
|
2006-12-27 10:56:11 +00:00
|
|
|
string const fname = makeAbsPath(name, buffer.filePath()).absFilename();
|
2009-05-02 17:12:31 +00:00
|
|
|
if (FileName::isAbsolute(name) || !FileName(fname + ext).isReadableFile())
|
2004-04-20 12:46:15 +00:00
|
|
|
return name;
|
2007-10-18 23:03:51 +00:00
|
|
|
if (!runparams.nice)
|
2004-04-20 12:46:15 +00:00
|
|
|
return fname;
|
2007-10-18 23:03:51 +00:00
|
|
|
|
|
|
|
// FIXME UNICODE
|
|
|
|
return to_utf8(makeRelPath(from_utf8(fname),
|
2007-10-20 10:03:45 +00:00
|
|
|
from_utf8(buffer.masterBuffer()->filePath())));
|
2004-04-20 12:46:15 +00:00
|
|
|
}
|
2004-04-29 07:58:52 +00:00
|
|
|
|
2004-04-20 12:46:15 +00:00
|
|
|
|
2008-02-27 20:43:16 +00:00
|
|
|
int InsetBibtex::latex(odocstream & os, OutputParams const & runparams) const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2004-03-29 13:17:25 +00:00
|
|
|
// the sequence of the commands:
|
2001-08-20 13:42:29 +00:00
|
|
|
// 1. \bibliographystyle{style}
|
2003-09-03 17:40:58 +00:00
|
|
|
// 2. \addcontentsline{...} - if option bibtotoc set
|
|
|
|
// 3. \bibliography{database}
|
2004-03-29 13:17:25 +00:00
|
|
|
// and with bibtopic:
|
|
|
|
// 1. \bibliographystyle{style}
|
|
|
|
// 2. \begin{btSect}{database}
|
|
|
|
// 3. \btPrint{Cited|NotCited|All}
|
|
|
|
// 4. \end{btSect}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2005-07-14 12:53:12 +00:00
|
|
|
// Database(s)
|
|
|
|
// If we are processing the LaTeX file in a temp directory then
|
|
|
|
// copy the .bib databases to this temp directory, mangling their
|
|
|
|
// names in the process. Store this mangled name in the list of
|
|
|
|
// all databases.
|
|
|
|
// (We need to do all this because BibTeX *really*, *really*
|
|
|
|
// can't handle "files with spaces" and Windows users tend to
|
|
|
|
// use such filenames.)
|
|
|
|
// Otherwise, store the (maybe absolute) path to the original,
|
|
|
|
// unmangled database name.
|
2008-04-19 15:52:35 +00:00
|
|
|
vector<docstring> bibfilelist = getVectorFromString(getParam("bibfiles"));
|
|
|
|
vector<docstring>::const_iterator it = bibfilelist.begin();
|
|
|
|
vector<docstring>::const_iterator en = bibfilelist.end();
|
2006-11-14 19:54:56 +00:00
|
|
|
odocstringstream dbs;
|
2008-04-19 15:52:35 +00:00
|
|
|
bool didone = false;
|
|
|
|
|
|
|
|
for (; it != en; ++it) {
|
|
|
|
string utf8input = to_utf8(*it);
|
2005-07-14 12:53:12 +00:00
|
|
|
string database =
|
2008-02-27 20:43:16 +00:00
|
|
|
normalizeName(buffer(), runparams, utf8input, ".bib");
|
|
|
|
FileName const try_in_file =
|
|
|
|
makeAbsPath(database + ".bib", buffer().filePath());
|
2007-11-25 11:18:16 +00:00
|
|
|
bool const not_from_texmf = try_in_file.isReadableFile();
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2006-04-10 16:27:59 +00:00
|
|
|
if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
|
2006-11-26 21:30:39 +00:00
|
|
|
not_from_texmf) {
|
2006-04-01 10:49:58 +00:00
|
|
|
// mangledFilename() needs the extension
|
2006-11-26 21:30:39 +00:00
|
|
|
DocFileName const in_file = DocFileName(try_in_file);
|
|
|
|
database = removeExtension(in_file.mangledFilename());
|
2007-10-20 10:03:45 +00:00
|
|
|
FileName const out_file = makeAbsPath(database + ".bib",
|
2008-02-27 20:43:16 +00:00
|
|
|
buffer().masterBuffer()->temppath());
|
2005-07-14 12:53:12 +00:00
|
|
|
|
2007-12-15 08:25:53 +00:00
|
|
|
bool const success = in_file.copyTo(out_file);
|
2005-07-14 12:53:12 +00:00
|
|
|
if (!success) {
|
|
|
|
lyxerr << "Failed to copy '" << in_file
|
|
|
|
<< "' to '" << out_file << "'"
|
|
|
|
<< endl;
|
|
|
|
}
|
2007-06-26 16:55:42 +00:00
|
|
|
} else if (!runparams.inComment && runparams.nice && not_from_texmf &&
|
|
|
|
!isValidLaTeXFilename(database)) {
|
|
|
|
frontend::Alert::warning(_("Invalid filename"),
|
|
|
|
_("The following filename is likely to cause trouble "
|
|
|
|
"when running the exported file through LaTeX: ") +
|
|
|
|
from_utf8(database));
|
2005-07-14 12:53:12 +00:00
|
|
|
}
|
|
|
|
|
2008-04-19 15:52:35 +00:00
|
|
|
if (didone)
|
2005-07-14 12:53:12 +00:00
|
|
|
dbs << ',';
|
2008-04-19 15:52:35 +00:00
|
|
|
else
|
2008-04-22 23:12:36 +00:00
|
|
|
didone = true;
|
2006-11-14 19:54:56 +00:00
|
|
|
// FIXME UNICODE
|
|
|
|
dbs << from_utf8(latex_path(database));
|
2005-07-14 12:53:12 +00:00
|
|
|
}
|
2006-11-14 19:54:56 +00:00
|
|
|
docstring const db_out = dbs.str();
|
2005-07-14 12:53:12 +00:00
|
|
|
|
|
|
|
// Post this warning only once.
|
|
|
|
static bool warned_about_spaces = false;
|
|
|
|
if (!warned_about_spaces &&
|
2006-10-20 16:12:49 +00:00
|
|
|
runparams.nice && db_out.find(' ') != docstring::npos) {
|
2005-07-14 12:53:12 +00:00
|
|
|
warned_about_spaces = true;
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::warning(_("Export Warning!"),
|
|
|
|
_("There are spaces in the paths to your BibTeX databases.\n"
|
|
|
|
"BibTeX will be unable to find them."));
|
2004-03-29 13:17:25 +00:00
|
|
|
}
|
2003-09-03 17:40:58 +00:00
|
|
|
// Style-Options
|
2006-10-21 00:16:43 +00:00
|
|
|
string style = to_utf8(getParam("options")); // maybe empty! and with bibtotoc
|
2003-09-03 17:40:58 +00:00
|
|
|
string bibtotoc;
|
|
|
|
if (prefixIs(style, "bibtotoc")) {
|
|
|
|
bibtotoc = "bibtotoc";
|
2007-10-20 10:03:45 +00:00
|
|
|
if (contains(style, ','))
|
2003-09-03 17:40:58 +00:00
|
|
|
style = split(style, bibtotoc, ',');
|
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2004-03-29 13:17:25 +00:00
|
|
|
// line count
|
2005-07-14 12:53:12 +00:00
|
|
|
int nlines = 0;
|
2001-08-20 13:42:29 +00:00
|
|
|
|
2004-03-29 13:17:25 +00:00
|
|
|
if (!style.empty()) {
|
2008-02-27 20:43:16 +00:00
|
|
|
string base = normalizeName(buffer(), runparams, style, ".bst");
|
|
|
|
FileName const try_in_file =
|
|
|
|
makeAbsPath(base + ".bst", buffer().filePath());
|
2007-11-25 11:18:16 +00:00
|
|
|
bool const not_from_texmf = try_in_file.isReadableFile();
|
2006-04-01 10:49:58 +00:00
|
|
|
// If this style does not come from texmf and we are not
|
|
|
|
// exporting to .tex copy it to the tmp directory.
|
|
|
|
// This prevents problems with spaces and 8bit charcaters
|
|
|
|
// in the file name.
|
2006-04-10 16:27:59 +00:00
|
|
|
if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
|
2006-11-26 21:30:39 +00:00
|
|
|
not_from_texmf) {
|
2006-04-01 10:49:58 +00:00
|
|
|
// use new style name
|
2006-11-26 21:30:39 +00:00
|
|
|
DocFileName const in_file = DocFileName(try_in_file);
|
|
|
|
base = removeExtension(in_file.mangledFilename());
|
2008-02-27 20:43:16 +00:00
|
|
|
FileName const out_file = makeAbsPath(base + ".bst",
|
|
|
|
buffer().masterBuffer()->temppath());
|
2007-12-15 08:25:53 +00:00
|
|
|
bool const success = in_file.copyTo(out_file);
|
2006-04-01 10:49:58 +00:00
|
|
|
if (!success) {
|
|
|
|
lyxerr << "Failed to copy '" << in_file
|
|
|
|
<< "' to '" << out_file << "'"
|
|
|
|
<< endl;
|
|
|
|
}
|
|
|
|
}
|
2006-10-19 16:51:30 +00:00
|
|
|
// FIXME UNICODE
|
2004-04-20 12:46:15 +00:00
|
|
|
os << "\\bibliographystyle{"
|
2008-02-27 20:43:16 +00:00
|
|
|
<< from_utf8(latex_path(normalizeName(buffer(), runparams, base, ".bst")))
|
2004-04-20 12:46:15 +00:00
|
|
|
<< "}\n";
|
2005-07-14 12:53:12 +00:00
|
|
|
nlines += 1;
|
2004-03-29 13:17:25 +00:00
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2006-04-01 10:49:58 +00:00
|
|
|
// Post this warning only once.
|
|
|
|
static bool warned_about_bst_spaces = false;
|
|
|
|
if (!warned_about_bst_spaces && runparams.nice && contains(style, ' ')) {
|
|
|
|
warned_about_bst_spaces = true;
|
2006-09-11 08:54:10 +00:00
|
|
|
Alert::warning(_("Export Warning!"),
|
|
|
|
_("There are spaces in the path to your BibTeX style file.\n"
|
|
|
|
"BibTeX will be unable to find it."));
|
2006-04-01 10:49:58 +00:00
|
|
|
}
|
|
|
|
|
2008-02-28 01:42:02 +00:00
|
|
|
if (!db_out.empty() && buffer().params().use_bibtopic) {
|
2006-10-20 16:12:49 +00:00
|
|
|
os << "\\begin{btSect}{" << db_out << "}\n";
|
2006-10-19 16:51:30 +00:00
|
|
|
docstring btprint = getParam("btprint");
|
2004-03-29 13:17:25 +00:00
|
|
|
if (btprint.empty())
|
|
|
|
// default
|
2006-10-21 00:16:43 +00:00
|
|
|
btprint = from_ascii("btPrintCited");
|
2004-03-29 13:17:25 +00:00
|
|
|
os << "\\" << btprint << "\n"
|
|
|
|
<< "\\end{btSect}\n";
|
2005-07-14 12:53:12 +00:00
|
|
|
nlines += 3;
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
|
2003-09-03 17:40:58 +00:00
|
|
|
// bibtotoc-Option
|
2008-02-27 20:43:16 +00:00
|
|
|
if (!bibtotoc.empty() && !buffer().params().use_bibtopic) {
|
2008-02-28 21:25:57 +00:00
|
|
|
if (buffer().params().documentClass().hasLaTeXLayout("chapter")) {
|
2008-02-27 20:43:16 +00:00
|
|
|
if (buffer().params().sides == OneSide) {
|
2003-09-03 17:40:58 +00:00
|
|
|
// oneside
|
|
|
|
os << "\\clearpage";
|
2001-08-20 13:42:29 +00:00
|
|
|
} else {
|
2003-09-03 17:40:58 +00:00
|
|
|
// twoside
|
|
|
|
os << "\\cleardoublepage";
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
2003-09-03 17:40:58 +00:00
|
|
|
os << "\\addcontentsline{toc}{chapter}{\\bibname}";
|
2008-02-28 21:25:57 +00:00
|
|
|
} else if (buffer().params().documentClass().hasLaTeXLayout("section"))
|
2003-09-03 17:40:58 +00:00
|
|
|
os << "\\addcontentsline{toc}{section}{\\refname}";
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
|
2008-02-27 20:43:16 +00:00
|
|
|
if (!db_out.empty() && !buffer().params().use_bibtopic) {
|
2007-12-28 16:56:57 +00:00
|
|
|
docstring btprint = getParam("btprint");
|
|
|
|
if (btprint == "btPrintAll") {
|
|
|
|
os << "\\nocite{*}\n";
|
|
|
|
nlines += 1;
|
|
|
|
}
|
2006-10-20 16:12:49 +00:00
|
|
|
os << "\\bibliography{" << db_out << "}\n";
|
2005-07-14 12:53:12 +00:00
|
|
|
nlines += 1;
|
2000-03-02 02:19:43 +00:00
|
|
|
}
|
2004-04-03 08:37:12 +00:00
|
|
|
|
2005-07-14 12:53:12 +00:00
|
|
|
return nlines;
|
2003-09-03 17:28:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-04-19 15:52:35 +00:00
|
|
|
support::FileNameList InsetBibtex::getBibFiles() const
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2008-04-19 15:52:35 +00:00
|
|
|
FileName path(buffer().filePath());
|
|
|
|
support::PathChanger p(path);
|
|
|
|
|
2008-04-20 03:08:11 +00:00
|
|
|
support::FileNameList vec;
|
2008-04-19 15:52:35 +00:00
|
|
|
|
|
|
|
vector<docstring> bibfilelist = getVectorFromString(getParam("bibfiles"));
|
|
|
|
vector<docstring>::const_iterator it = bibfilelist.begin();
|
|
|
|
vector<docstring>::const_iterator en = bibfilelist.end();
|
|
|
|
for (; it != en; ++it) {
|
2009-05-02 17:12:31 +00:00
|
|
|
FileName const file = getBibTeXPath(*it, buffer());
|
2008-06-05 05:39:48 +00:00
|
|
|
|
2008-04-19 15:52:35 +00:00
|
|
|
if (!file.empty())
|
|
|
|
vec.push_back(file);
|
2008-06-05 05:39:48 +00:00
|
|
|
else
|
|
|
|
LYXERR0("Couldn't find " + to_utf8(*it) + " in InsetBibtex::getBibFiles()!");
|
2008-04-19 15:52:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return vec;
|
|
|
|
|
2001-08-20 13:42:29 +00:00
|
|
|
}
|
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
// methods for parsing bibtex files
|
|
|
|
|
|
|
|
typedef map<docstring, docstring> VarMap;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
/// remove whitespace characters, optionally a single comma,
|
2007-04-04 20:06:34 +00:00
|
|
|
/// and further whitespace characters from the stream.
|
|
|
|
/// @return true if a comma was found, false otherwise
|
|
|
|
///
|
2008-11-16 12:21:29 +00:00
|
|
|
bool removeWSAndComma(ifdocstream & ifs) {
|
2007-04-04 20:06:34 +00:00
|
|
|
char_type ch;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
if (ch != ',') {
|
|
|
|
ifs.putback(ch);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
|
|
|
if (ifs) {
|
|
|
|
ifs.putback(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2007-04-23 15:18:01 +00:00
|
|
|
|
|
|
|
enum charCase {
|
|
|
|
makeLowerCase,
|
|
|
|
keepCase
|
|
|
|
};
|
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
/// remove whitespace characters, read characer sequence
|
|
|
|
/// not containing whitespace characters or characters in
|
|
|
|
/// delimChars, and remove further whitespace characters.
|
|
|
|
///
|
|
|
|
/// @return true if a string of length > 0 could be read.
|
2007-05-28 22:27:45 +00:00
|
|
|
///
|
2008-11-16 12:21:29 +00:00
|
|
|
bool readTypeOrKey(docstring & val, ifdocstream & ifs,
|
2007-07-02 18:01:27 +00:00
|
|
|
docstring const & delimChars, docstring const &illegalChars,
|
|
|
|
charCase chCase) {
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
char_type ch;
|
|
|
|
|
|
|
|
val.clear();
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// read value
|
2007-07-03 18:58:02 +00:00
|
|
|
bool legalChar = true;
|
2007-07-02 18:01:27 +00:00
|
|
|
while (ifs && !isSpace(ch) &&
|
2007-08-16 01:59:20 +00:00
|
|
|
delimChars.find(ch) == docstring::npos &&
|
|
|
|
(legalChar = (illegalChars.find(ch) == docstring::npos))
|
|
|
|
)
|
|
|
|
{
|
|
|
|
if (chCase == makeLowerCase)
|
2007-04-23 15:18:01 +00:00
|
|
|
val += lowercase(ch);
|
2007-08-16 01:59:20 +00:00
|
|
|
else
|
2007-04-23 15:18:01 +00:00
|
|
|
val += ch;
|
2007-04-04 20:06:34 +00:00
|
|
|
ifs.get(ch);
|
|
|
|
}
|
2007-07-02 18:01:27 +00:00
|
|
|
|
|
|
|
if (!legalChar) {
|
|
|
|
ifs.putback(ch);
|
|
|
|
return false;
|
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
// skip whitespace
|
|
|
|
while (ifs && isSpace(ch)) {
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (ifs) {
|
|
|
|
ifs.putback(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
return val.length() > 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// read subsequent bibtex values that are delimited with a #-character.
|
2007-05-28 22:27:45 +00:00
|
|
|
/// Concatenate all parts and replace names with the associated string in
|
2007-04-04 20:06:34 +00:00
|
|
|
/// the variable strings.
|
|
|
|
/// @return true if reading was successfull (all single parts were delimited
|
|
|
|
/// correctly)
|
2009-01-16 23:23:14 +00:00
|
|
|
bool readValue(docstring & val, ifdocstream & ifs, const VarMap & strings) {
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
char_type ch;
|
|
|
|
|
2009-01-16 23:23:14 +00:00
|
|
|
val.clear();
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
return false;
|
|
|
|
|
|
|
|
do {
|
|
|
|
// skip whitespace
|
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// check for field type
|
|
|
|
if (isDigit(ch)) {
|
|
|
|
|
|
|
|
// read integer value
|
|
|
|
do {
|
|
|
|
val += ch;
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isDigit(ch));
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
} else if (ch == '"' || ch == '{') {
|
2007-08-16 01:59:20 +00:00
|
|
|
// set end delimiter
|
|
|
|
char_type delim = ch == '"' ? '"': '}';
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2008-04-30 15:57:53 +00:00
|
|
|
// Skip whitespace
|
2007-08-16 01:59:20 +00:00
|
|
|
do {
|
|
|
|
ifs.get(ch);
|
|
|
|
} while (ifs && isSpace(ch));
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
2008-04-30 15:57:53 +00:00
|
|
|
// We now have the first non-whitespace character
|
|
|
|
// We'll collapse adjacent whitespace.
|
2007-08-16 01:59:20 +00:00
|
|
|
bool lastWasWhiteSpace = false;
|
|
|
|
|
|
|
|
// inside this delimited text braces must match.
|
|
|
|
// Thus we can have a closing delimiter only
|
|
|
|
// when nestLevel == 0
|
2007-04-04 20:06:34 +00:00
|
|
|
int nestLevel = 0;
|
2007-08-16 01:59:20 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
while (ifs && (nestLevel > 0 || ch != delim)) {
|
2007-08-16 01:59:20 +00:00
|
|
|
if (isSpace(ch)) {
|
|
|
|
lastWasWhiteSpace = true;
|
|
|
|
ifs.get(ch);
|
|
|
|
continue;
|
|
|
|
}
|
2008-04-30 15:57:53 +00:00
|
|
|
// We output the space only after we stop getting
|
|
|
|
// whitespace so as not to output any whitespace
|
|
|
|
// at the end of the value.
|
2007-08-16 01:59:20 +00:00
|
|
|
if (lastWasWhiteSpace) {
|
|
|
|
lastWasWhiteSpace = false;
|
|
|
|
val += ' ';
|
|
|
|
}
|
2009-01-16 23:23:14 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
val += ch;
|
2007-05-28 22:27:45 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
// update nesting level
|
|
|
|
switch (ch) {
|
|
|
|
case '{':
|
|
|
|
++nestLevel;
|
|
|
|
break;
|
|
|
|
case '}':
|
|
|
|
--nestLevel;
|
|
|
|
if (nestLevel < 0) return false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
ifs.get(ch);
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
// reading a string name
|
|
|
|
docstring strName;
|
|
|
|
|
|
|
|
while (ifs && !isSpace(ch) && ch != '#' && ch != ',' && ch != '}' && ch != ')') {
|
|
|
|
strName += lowercase(ch);
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// replace the string with its assigned value or
|
|
|
|
// discard it if it's not assigned
|
|
|
|
if (strName.length()) {
|
|
|
|
VarMap::const_iterator pos = strings.find(strName);
|
|
|
|
if (pos != strings.end()) {
|
|
|
|
val += pos->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// skip WS
|
|
|
|
while (ifs && isSpace(ch)) {
|
|
|
|
ifs.get(ch);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!ifs)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// continue reading next value on concatenate with '#'
|
2007-05-28 22:27:45 +00:00
|
|
|
} while (ch == '#');
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
ifs.putback(ch);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2003-02-18 12:36:02 +00:00
|
|
|
|
2001-08-20 13:42:29 +00:00
|
|
|
// This method returns a comma separated list of Bibtex entries
|
2008-02-27 20:43:16 +00:00
|
|
|
void InsetBibtex::fillWithBibKeys(BiblioInfo & keylist,
|
|
|
|
InsetIterator const & /*di*/) const
|
2001-08-20 13:42:29 +00:00
|
|
|
{
|
2008-03-29 14:38:38 +00:00
|
|
|
// This bibtex parser is a first step to parse bibtex files
|
|
|
|
// more precisely.
|
|
|
|
//
|
|
|
|
// - it reads the whole bibtex entry and does a syntax check
|
|
|
|
// (matching delimiters, missing commas,...
|
|
|
|
// - it recovers from errors starting with the next @-character
|
|
|
|
// - it reads @string definitions and replaces them in the
|
|
|
|
// field values.
|
|
|
|
// - it accepts more characters in keys or value names than
|
|
|
|
// bibtex does.
|
|
|
|
//
|
|
|
|
// Officially bibtex does only support ASCII, but in practice
|
|
|
|
// you can use the encoding of the main document as long as
|
|
|
|
// some elements like keys and names are pure ASCII. Therefore
|
|
|
|
// we convert the file from the buffer encoding.
|
|
|
|
// We don't restrict keys to ASCII in LyX, since our own
|
|
|
|
// InsetBibitem can generate non-ASCII keys, and nonstandard
|
|
|
|
// 8bit clean bibtex forks exist.
|
2008-04-22 20:55:44 +00:00
|
|
|
support::FileNameList const files = getBibFiles();
|
2008-04-19 15:52:35 +00:00
|
|
|
support::FileNameList::const_iterator it = files.begin();
|
|
|
|
support::FileNameList::const_iterator en = files.end();
|
2008-03-29 14:38:38 +00:00
|
|
|
for (; it != en; ++ it) {
|
2008-11-16 12:21:29 +00:00
|
|
|
ifdocstream ifs(it->toFilesystemEncoding().c_str(),
|
2008-02-27 20:43:16 +00:00
|
|
|
ios_base::in, buffer().params().encoding().iconvName());
|
2007-05-28 22:27:45 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
char_type ch;
|
|
|
|
VarMap strings;
|
|
|
|
|
|
|
|
while (ifs) {
|
|
|
|
|
|
|
|
ifs.get(ch);
|
2008-04-30 16:02:20 +00:00
|
|
|
if (!ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
if (ch != '@')
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
|
|
|
|
|
|
|
docstring entryType;
|
|
|
|
|
2008-04-30 21:04:49 +00:00
|
|
|
if (!readTypeOrKey(entryType, ifs, from_ascii("{("), docstring(), makeLowerCase)) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Error reading entry type." << std::endl;
|
2006-12-17 10:52:04 +00:00
|
|
|
continue;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2008-04-30 21:04:49 +00:00
|
|
|
if (!ifs) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unexpected end of file." << std::endl;
|
2008-04-30 15:57:53 +00:00
|
|
|
continue;
|
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2008-04-30 15:57:53 +00:00
|
|
|
if (entryType == from_ascii("comment")) {
|
2007-12-12 19:28:07 +00:00
|
|
|
ifs.ignore(numeric_limits<int>::max(), '\n');
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
2007-05-28 22:27:45 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
ifs.get(ch);
|
2008-04-30 15:57:53 +00:00
|
|
|
if (!ifs) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unexpected end of file." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2007-04-23 15:18:01 +00:00
|
|
|
if ((ch != '(') && (ch != '{')) {
|
2008-04-30 15:57:53 +00:00
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Invalid entry delimiter." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
ifs.putback(ch);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// process the entry
|
|
|
|
if (entryType == from_ascii("string")) {
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// read string and add it to the strings map
|
2007-04-04 20:06:34 +00:00
|
|
|
// (or replace it's old value)
|
|
|
|
docstring name;
|
|
|
|
docstring value;
|
|
|
|
|
2008-04-30 21:04:49 +00:00
|
|
|
if (!readTypeOrKey(name, ifs, from_ascii("="), from_ascii("#{}(),"), makeLowerCase)) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Error reading string name." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
|
|
|
|
2008-04-30 21:04:49 +00:00
|
|
|
if (!ifs) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unexpected end of file." << std::endl;
|
2008-04-30 15:57:53 +00:00
|
|
|
continue;
|
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2007-07-02 18:01:27 +00:00
|
|
|
// next char must be an equal sign
|
2007-04-04 20:06:34 +00:00
|
|
|
ifs.get(ch);
|
2008-04-30 15:57:53 +00:00
|
|
|
if (!ifs || ch != '=') {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: No `=' after string name: " <<
|
|
|
|
name << "." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2008-04-30 15:57:53 +00:00
|
|
|
if (!readValue(value, ifs, strings)) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unable to read value for string: " <<
|
|
|
|
name << "." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
strings[name] = value;
|
|
|
|
|
|
|
|
} else if (entryType == from_ascii("preamble")) {
|
|
|
|
|
2007-05-28 22:27:45 +00:00
|
|
|
// preamble definitions are discarded.
|
2007-04-04 20:06:34 +00:00
|
|
|
// can they be of any use in lyx?
|
|
|
|
docstring value;
|
|
|
|
|
2008-04-30 15:57:53 +00:00
|
|
|
if (!readValue(value, ifs, strings)) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unable to read preamble value." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
continue;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
} else {
|
|
|
|
|
2007-08-16 01:59:20 +00:00
|
|
|
// Citation entry. Try to read the key.
|
2007-04-04 20:06:34 +00:00
|
|
|
docstring key;
|
|
|
|
|
2008-04-30 15:57:53 +00:00
|
|
|
if (!readTypeOrKey(key, ifs, from_ascii(","), from_ascii("}"), keepCase)) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unable to read key for entry type:" <<
|
|
|
|
entryType << "." << std::endl;
|
|
|
|
continue;
|
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2008-04-30 21:04:49 +00:00
|
|
|
if (!ifs) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unexpected end of file." << std::endl;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2007-08-16 01:59:20 +00:00
|
|
|
/////////////////////////////////////////////
|
|
|
|
// now we have a key, so we will add an entry
|
|
|
|
// (even if it's empty, as bibtex does)
|
2007-05-28 22:27:45 +00:00
|
|
|
//
|
2007-08-16 01:59:20 +00:00
|
|
|
// we now read the field = value pairs.
|
|
|
|
// all items must be separated by a comma. If
|
|
|
|
// it is missing the scanning of this entry is
|
|
|
|
// stopped and the next is searched.
|
|
|
|
docstring fields;
|
|
|
|
docstring name;
|
|
|
|
docstring value;
|
|
|
|
docstring commaNewline;
|
|
|
|
docstring data;
|
2008-02-14 05:00:54 +00:00
|
|
|
BibTeXInfo keyvalmap(key, entryType);
|
2007-08-16 01:59:20 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
bool readNext = removeWSAndComma(ifs);
|
2007-08-16 01:59:20 +00:00
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
while (ifs && readNext) {
|
|
|
|
|
|
|
|
// read field name
|
2007-07-02 18:01:27 +00:00
|
|
|
if (!readTypeOrKey(name, ifs, from_ascii("="),
|
|
|
|
from_ascii("{}(),"), makeLowerCase) || !ifs)
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
// next char must be an equal sign
|
|
|
|
ifs.get(ch);
|
2008-04-30 15:57:53 +00:00
|
|
|
if (!ifs) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unexpected end of file." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
if (ch != '=') {
|
2008-04-30 15:57:53 +00:00
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Missing `=' after field name: " <<
|
|
|
|
name << ", for key: " << key << "." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
ifs.putback(ch);
|
|
|
|
break;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
// read field value
|
2008-04-30 15:57:53 +00:00
|
|
|
if (!readValue(value, ifs, strings)) {
|
|
|
|
lyxerr << "InsetBibtex::fillWithBibKeys: Unable to read value for field: " <<
|
|
|
|
name << ", for key: " << key << "." << std::endl;
|
2007-04-04 20:06:34 +00:00
|
|
|
break;
|
2008-04-30 15:57:53 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2007-08-16 01:59:20 +00:00
|
|
|
keyvalmap[name] = value;
|
|
|
|
data += "\n\n" + value;
|
2008-02-14 05:28:59 +00:00
|
|
|
keylist.addFieldName(name);
|
2007-04-04 20:06:34 +00:00
|
|
|
readNext = removeWSAndComma(ifs);
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
2007-04-04 20:06:34 +00:00
|
|
|
|
|
|
|
// add the new entry
|
2008-02-14 05:28:59 +00:00
|
|
|
keylist.addEntryType(entryType);
|
2008-02-14 07:10:12 +00:00
|
|
|
keyvalmap.setAllData(data);
|
2007-08-20 16:30:02 +00:00
|
|
|
keylist[key] = keyvalmap;
|
2008-04-30 15:57:53 +00:00
|
|
|
} //< else (citation entry)
|
2007-04-04 20:06:34 +00:00
|
|
|
} //< searching '@'
|
|
|
|
} //< for loop over files
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
1999-11-24 22:14:46 +00:00
|
|
|
|
2008-03-29 15:32:36 +00:00
|
|
|
FileName InsetBibtex::getBibTeXPath(docstring const & filename, Buffer const & buf)
|
|
|
|
{
|
|
|
|
string texfile = changeExtension(to_utf8(filename), "bib");
|
|
|
|
// note that, if the filename can be found directly from the path,
|
|
|
|
// findtexfile will just return a FileName object for that path.
|
|
|
|
FileName file(findtexfile(texfile, "bib"));
|
|
|
|
if (file.empty())
|
|
|
|
file = FileName(makeAbsPath(texfile, buf.filePath()));
|
|
|
|
return file;
|
|
|
|
}
|
|
|
|
|
2007-04-04 20:06:34 +00:00
|
|
|
|
2008-03-29 15:32:36 +00:00
|
|
|
bool InsetBibtex::addDatabase(docstring const & db)
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2008-04-19 15:52:35 +00:00
|
|
|
docstring bibfiles = getParam("bibfiles");
|
2008-04-22 23:09:20 +00:00
|
|
|
if (tokenPos(bibfiles, ',', db) != -1)
|
2008-04-22 21:55:33 +00:00
|
|
|
return false;
|
|
|
|
if (!bibfiles.empty())
|
2008-04-19 15:52:35 +00:00
|
|
|
bibfiles += ',';
|
2008-04-22 21:55:33 +00:00
|
|
|
setParam("bibfiles", bibfiles + db);
|
|
|
|
return true;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-03-29 15:32:36 +00:00
|
|
|
bool InsetBibtex::delDatabase(docstring const & db)
|
1999-09-27 18:44:28 +00:00
|
|
|
{
|
2008-04-19 15:52:35 +00:00
|
|
|
docstring bibfiles = getParam("bibfiles");
|
|
|
|
if (contains(bibfiles, db)) {
|
|
|
|
int const n = tokenPos(bibfiles, ',', db);
|
|
|
|
docstring bd = db;
|
|
|
|
if (n > 0) {
|
|
|
|
// this is not the first database
|
|
|
|
docstring tmp = ',' + bd;
|
|
|
|
setParam("bibfiles", subst(bibfiles, tmp, docstring()));
|
|
|
|
} else if (n == 0)
|
|
|
|
// this is the first (or only) database
|
|
|
|
setParam("bibfiles", split(bibfiles, bd, ','));
|
|
|
|
else
|
|
|
|
return false;
|
|
|
|
}
|
2008-03-29 15:32:36 +00:00
|
|
|
return true;
|
1999-09-27 18:44:28 +00:00
|
|
|
}
|
2004-03-29 13:17:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
void InsetBibtex::validate(LaTeXFeatures & features) const
|
|
|
|
{
|
|
|
|
if (features.bufferParams().use_bibtopic)
|
|
|
|
features.require("bibtopic");
|
|
|
|
}
|
2006-10-21 00:16:43 +00:00
|
|
|
|
|
|
|
|
2009-06-11 22:39:16 +00:00
|
|
|
namespace {
|
|
|
|
// used in xhtml to sort a list of BibTeXInfo objects
|
|
|
|
bool lSorter(BibTeXInfo const * lhs, BibTeXInfo const * rhs)
|
|
|
|
{
|
|
|
|
return lhs->getAbbreviatedAuthor() < rhs->getAbbreviatedAuthor();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-06-12 17:23:17 +00:00
|
|
|
docstring InsetBibtex::xhtml(odocstream & os, OutputParams const &) const
|
2009-06-11 21:23:18 +00:00
|
|
|
{
|
2009-06-11 22:39:16 +00:00
|
|
|
// We are going to collect all the citation keys used in the document,
|
|
|
|
// getting them from the TOC.
|
|
|
|
Toc const & toc = buffer().tocBackend().toc("citation");
|
|
|
|
Toc::const_iterator it = toc.begin();
|
|
|
|
Toc::const_iterator en = toc.end();
|
|
|
|
vector<docstring> citekeys;
|
|
|
|
for (; it != en; ++it) {
|
|
|
|
if (it->str().empty())
|
|
|
|
continue;
|
|
|
|
vector<docstring> keys = getVectorFromString(it->str());
|
|
|
|
vector<docstring>::const_iterator dit = keys.begin();
|
|
|
|
vector<docstring>::const_iterator den = keys.end();
|
|
|
|
for (; dit != den; ++dit)
|
|
|
|
citekeys.push_back(*dit);
|
|
|
|
}
|
|
|
|
if (citekeys.empty())
|
2009-06-12 17:23:17 +00:00
|
|
|
return docstring();
|
2009-06-11 22:39:16 +00:00
|
|
|
sort(citekeys.begin(), citekeys.end());
|
|
|
|
unique(citekeys.begin(), citekeys.end());
|
|
|
|
// We now have a sorted, unique list of the keys used in this document.
|
|
|
|
// We will now convert it to a list of the BibTeXInfo objects used in
|
|
|
|
// this document...
|
|
|
|
// FIXME We need to do something here about cross-references, if we
|
|
|
|
// want to be able to display them AS cross-references. Probably the
|
|
|
|
// easiest thing to do is to loop over the list again and add whatever
|
|
|
|
// cross-references we find, then sort and unique it, planning just to
|
|
|
|
// add the cross-references to the bibliography.
|
|
|
|
vector<BibTeXInfo const *> binfo;
|
|
|
|
vector<docstring>::const_iterator cit = citekeys.begin();
|
|
|
|
vector<docstring>::const_iterator cen = citekeys.end();
|
|
|
|
BiblioInfo const & bi = buffer().masterBibInfo();
|
|
|
|
for (; cit != cen; ++cit) {
|
|
|
|
BiblioInfo::const_iterator bt = bi.find(*cit);
|
|
|
|
if (bt == bi.end())
|
|
|
|
continue;
|
|
|
|
binfo.push_back(&(bt->second));
|
|
|
|
}
|
|
|
|
// ...and sort it.
|
|
|
|
sort(binfo.begin(), binfo.end(), lSorter);
|
|
|
|
// Finally, then, we are ready for output.
|
|
|
|
os << "<h2 class='bibliography'>" << _("References") << "</h2>\n";
|
|
|
|
os << "<div class='bibliography'>\n";
|
|
|
|
vector<BibTeXInfo const *>::const_iterator vit = binfo.begin();
|
|
|
|
vector<BibTeXInfo const *>::const_iterator ven = binfo.end();
|
|
|
|
// Now we loop over the entries
|
|
|
|
for (; vit != ven; ++vit) {
|
|
|
|
BibTeXInfo const * bip = *vit;
|
|
|
|
os << "<p class='bibliography'>";
|
|
|
|
os << "<a name='" << html::htmlize(bip->key()) << "'></a>";
|
|
|
|
docstring label = bip->label();
|
|
|
|
if (label.empty())
|
|
|
|
label = bip->key();
|
|
|
|
os << "<span class='biblabel'>[" << label << "]</span> ";
|
|
|
|
// FIXME Right now, we are calling BibInfo::getInfo on the key,
|
|
|
|
// which will give us all the cross-referenced info. But for every
|
|
|
|
// entry.
|
|
|
|
os << "<span class='bibinfo'>" << bi.getInfo(bip->key()) << "</span>";
|
|
|
|
os << "</p>\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
os << "</div>\n";
|
2009-06-12 17:23:17 +00:00
|
|
|
return docstring();
|
2009-06-11 21:23:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-04-28 15:14:18 +00:00
|
|
|
docstring InsetBibtex::contextMenu(BufferView const &, int, int) const
|
|
|
|
{
|
|
|
|
return from_ascii("context-bibtex");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2006-10-21 00:16:43 +00:00
|
|
|
} // namespace lyx
|