2011-05-03 13:12:55 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# This file is part of lyx2lyx
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright (C) 2011 The LyX team
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU General Public License
|
|
|
|
# as published by the Free Software Foundation; either version 2
|
|
|
|
# of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
2011-08-25 23:10:36 +00:00
|
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
2011-05-03 13:12:55 +00:00
|
|
|
|
|
|
|
""" Convert files to the file format generated by lyx 2.1"""
|
|
|
|
|
|
|
|
import re, string
|
|
|
|
import unicodedata
|
|
|
|
import sys, os
|
|
|
|
|
|
|
|
# Uncomment only what you need to import, please.
|
|
|
|
|
2011-12-12 14:40:34 +00:00
|
|
|
from parser_tools import del_token, find_token, find_end_of_inset, get_value, \
|
|
|
|
get_quoted_value
|
2011-07-23 18:40:21 +00:00
|
|
|
|
2011-05-03 13:12:55 +00:00
|
|
|
#from parser_tools import find_token, find_end_of, find_tokens, \
|
|
|
|
#find_token_exact, find_end_of_inset, find_end_of_layout, \
|
|
|
|
#find_token_backwards, is_in_inset, get_value, get_quoted_value, \
|
|
|
|
#del_token, check_token, get_option_value
|
2011-07-23 18:40:21 +00:00
|
|
|
|
2011-08-10 03:37:33 +00:00
|
|
|
from lyx2lyx_tools import add_to_preamble, put_cmd_in_ert
|
2011-07-23 18:40:21 +00:00
|
|
|
|
2011-05-03 13:12:55 +00:00
|
|
|
#from lyx2lyx_tools import add_to_preamble, insert_to_preamble, \
|
|
|
|
# put_cmd_in_ert, lyx2latex, latex_length, revert_flex_inset, \
|
|
|
|
# revert_font_attrs, hex2ratio, str2bool
|
|
|
|
|
|
|
|
####################################################################
|
|
|
|
# Private helper functions
|
|
|
|
|
|
|
|
#def remove_option(lines, m, option):
|
|
|
|
#''' removes option from line m. returns whether we did anything '''
|
|
|
|
#l = lines[m].find(option)
|
|
|
|
#if l == -1:
|
|
|
|
#return False
|
|
|
|
#val = lines[m][l:].split('"')[1]
|
|
|
|
#lines[m] = lines[m][:l - 1] + lines[m][l+len(option + '="' + val + '"'):]
|
|
|
|
#return True
|
|
|
|
|
|
|
|
|
|
|
|
###############################################################################
|
|
|
|
###
|
|
|
|
### Conversion and reversion routines
|
|
|
|
###
|
|
|
|
###############################################################################
|
|
|
|
|
2011-07-23 18:40:21 +00:00
|
|
|
def revert_visible_space(document):
|
|
|
|
"Revert InsetSpace visible into its ERT counterpart"
|
|
|
|
i = 0
|
|
|
|
while True:
|
2011-08-29 14:07:30 +00:00
|
|
|
i = find_token(document.body, "\\begin_inset space \\textvisiblespace{}", i)
|
|
|
|
if i == -1:
|
|
|
|
return
|
|
|
|
end = find_end_of_inset(document.body, i)
|
|
|
|
subst = put_cmd_in_ert("\\textvisiblespace{}")
|
|
|
|
document.body[i:end + 1] = subst
|
2011-05-03 13:12:55 +00:00
|
|
|
|
|
|
|
|
2011-08-10 03:37:33 +00:00
|
|
|
def convert_undertilde(document):
|
|
|
|
" Load undertilde automatically "
|
|
|
|
i = find_token(document.header, "\\use_mathdots" , 0)
|
|
|
|
if i != -1:
|
|
|
|
document.header.insert(i + 1, "\\use_undertilde 1")
|
|
|
|
|
|
|
|
|
|
|
|
def revert_undertilde(document):
|
|
|
|
" Load undertilde if used in the document "
|
|
|
|
undertilde = find_token(document.header, "\\use_undertilde" , 0)
|
|
|
|
if undertilde == -1:
|
|
|
|
document.warning("No \\use_undertilde line. Assuming auto.")
|
|
|
|
else:
|
|
|
|
val = get_value(document.header, "\\use_undertilde", undertilde)
|
|
|
|
del document.header[undertilde]
|
|
|
|
try:
|
|
|
|
usetilde = int(val)
|
|
|
|
except:
|
|
|
|
document.warning("Invalid \\use_undertilde value: " + val + ". Assuming auto.")
|
|
|
|
# probably usedots has not been changed, but be safe.
|
|
|
|
usetilde = 1
|
|
|
|
|
|
|
|
if usetilde == 0:
|
|
|
|
# do not load case
|
|
|
|
return
|
|
|
|
if usetilde == 2:
|
|
|
|
# force load case
|
|
|
|
add_to_preamble(document, ["\\usepackage{undertilde}"])
|
|
|
|
return
|
|
|
|
|
|
|
|
# so we are in the auto case. we want to load undertilde if \utilde is used.
|
|
|
|
i = 0
|
|
|
|
while True:
|
|
|
|
i = find_token(document.body, '\\begin_inset Formula', i)
|
|
|
|
if i == -1:
|
|
|
|
return
|
|
|
|
j = find_end_of_inset(document.body, i)
|
|
|
|
if j == -1:
|
|
|
|
document.warning("Malformed LyX document: Can't find end of Formula inset at line " + str(i))
|
|
|
|
i += 1
|
|
|
|
continue
|
|
|
|
code = "\n".join(document.body[i:j])
|
|
|
|
if code.find("\\utilde") != -1:
|
|
|
|
add_to_preamble(document, ["\\@ifundefined{utilde}{\\usepackage{undertilde}}"])
|
|
|
|
return
|
|
|
|
i = j
|
|
|
|
|
|
|
|
|
2011-08-29 14:07:30 +00:00
|
|
|
def revert_negative_space(document):
|
|
|
|
"Revert InsetSpace negmedspace and negthickspace into its TeX-code counterpart"
|
|
|
|
i = 0
|
|
|
|
j = 0
|
|
|
|
reverted = False
|
|
|
|
while True:
|
|
|
|
i = find_token(document.body, "\\begin_inset space \\negmedspace{}", i)
|
|
|
|
if i == -1:
|
|
|
|
j = find_token(document.body, "\\begin_inset space \\negthickspace{}", j)
|
|
|
|
if j == -1:
|
|
|
|
# load amsmath in the preamble if not already loaded if we are at the end of checking
|
|
|
|
if reverted == True:
|
|
|
|
i = find_token(document.header, "\\use_amsmath 2", 0)
|
|
|
|
if i == -1:
|
|
|
|
add_to_preamble(document, ["\\@ifundefined{negthickspace}{\\usepackage{amsmath}}"])
|
|
|
|
return
|
|
|
|
if i == -1:
|
|
|
|
return
|
|
|
|
end = find_end_of_inset(document.body, i)
|
|
|
|
subst = put_cmd_in_ert("\\negmedspace{}")
|
|
|
|
document.body[i:end + 1] = subst
|
|
|
|
j = find_token(document.body, "\\begin_inset space \\negthickspace{}", j)
|
|
|
|
if j == -1:
|
|
|
|
return
|
|
|
|
end = find_end_of_inset(document.body, j)
|
|
|
|
subst = put_cmd_in_ert("\\negthickspace{}")
|
|
|
|
document.body[j:end + 1] = subst
|
|
|
|
reverted = True
|
|
|
|
|
|
|
|
|
|
|
|
def revert_math_spaces(document):
|
|
|
|
"Revert formulas with protected custom space and protected hfills to TeX-code"
|
|
|
|
i = 0
|
|
|
|
while True:
|
|
|
|
i = find_token(document.body, "\\begin_inset Formula", i)
|
|
|
|
if i == -1:
|
|
|
|
return
|
|
|
|
j = document.body[i].find("\\hspace*")
|
|
|
|
if j != -1:
|
|
|
|
end = find_end_of_inset(document.body, i)
|
|
|
|
subst = put_cmd_in_ert(document.body[i][21:])
|
|
|
|
document.body[i:end + 1] = subst
|
|
|
|
i = i + 1
|
|
|
|
|
|
|
|
|
2011-11-07 18:36:56 +00:00
|
|
|
def convert_japanese_encodings(document):
|
|
|
|
" Rename the japanese encodings to names understood by platex "
|
|
|
|
jap_enc_dict = {
|
|
|
|
"EUC-JP-pLaTeX": "euc",
|
|
|
|
"JIS-pLaTeX": "jis",
|
|
|
|
"SJIS-pLaTeX": "sjis"
|
|
|
|
}
|
|
|
|
i = find_token(document.header, "\\inputencoding" , 0)
|
|
|
|
if i == -1:
|
|
|
|
return
|
|
|
|
val = get_value(document.header, "\\inputencoding", i)
|
|
|
|
if val in jap_enc_dict.keys():
|
|
|
|
document.header[i] = "\\inputencoding %s" % jap_enc_dict[val]
|
|
|
|
|
|
|
|
|
|
|
|
def revert_japanese_encodings(document):
|
|
|
|
" Revert the japanese encodings name changes "
|
|
|
|
jap_enc_dict = {
|
|
|
|
"euc": "EUC-JP-pLaTeX",
|
|
|
|
"jis": "JIS-pLaTeX",
|
|
|
|
"sjis": "SJIS-pLaTeX"
|
|
|
|
}
|
|
|
|
i = find_token(document.header, "\\inputencoding" , 0)
|
|
|
|
if i == -1:
|
|
|
|
return
|
|
|
|
val = get_value(document.header, "\\inputencoding", i)
|
|
|
|
if val in jap_enc_dict.keys():
|
|
|
|
document.header[i] = "\\inputencoding %s" % jap_enc_dict[val]
|
|
|
|
|
|
|
|
|
2011-12-07 22:33:25 +00:00
|
|
|
def revert_justification(document):
|
|
|
|
" Revert the \\justification buffer param"
|
|
|
|
if not del_token(document.header, '\\justification', 0):
|
|
|
|
document.warning("Malformed LyX document: Missing \\justification.")
|
|
|
|
|
2011-12-08 23:58:30 +00:00
|
|
|
|
|
|
|
def revert_australian(document):
|
|
|
|
"Set English language variants Australian and Newzealand to English"
|
|
|
|
|
|
|
|
if document.language == "australian" or document.language == "newzealand":
|
|
|
|
document.language = "english"
|
|
|
|
i = find_token(document.header, "\\language", 0)
|
|
|
|
if i != -1:
|
|
|
|
document.header[i] = "\\language english"
|
|
|
|
|
|
|
|
j = 0
|
|
|
|
while True:
|
|
|
|
j = find_token(document.body, "\\lang australian", j)
|
|
|
|
if j == -1:
|
|
|
|
j = find_token(document.body, "\\lang newzealand", 0)
|
|
|
|
if j == -1:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
document.body[j] = document.body[j].replace("\\lang newzealand", "\\lang english")
|
|
|
|
else:
|
|
|
|
document.body[j] = document.body[j].replace("\\lang australian", "\\lang english")
|
|
|
|
j += 1
|
2011-12-07 22:33:25 +00:00
|
|
|
|
|
|
|
|
2011-12-12 14:40:34 +00:00
|
|
|
def convert_biblio_style(document):
|
|
|
|
"Add a sensible default for \\biblio_style based on the citation engine."
|
|
|
|
i = find_token(document.header, "\\cite_engine", 0)
|
|
|
|
if i != -1:
|
|
|
|
engine = get_value(document.header, "\\cite_engine", i).split("_")[0]
|
|
|
|
style = {"basic": "plain", "natbib": "plainnat", "jurabib": "jurabib"}
|
|
|
|
document.header.insert(i + 1, "\\biblio_style " + style[engine])
|
|
|
|
|
|
|
|
|
|
|
|
def revert_biblio_style(document):
|
|
|
|
"BibTeX insets with default option use the style defined by \\biblio_style."
|
|
|
|
i = find_token(document.header, "\\biblio_style" , 0)
|
|
|
|
if i == -1:
|
|
|
|
document.warning("No \\biblio_style line. Nothing to do.")
|
|
|
|
return
|
|
|
|
|
|
|
|
default_style = get_value(document.header, "\\biblio_style", i)
|
|
|
|
del document.header[i]
|
|
|
|
|
|
|
|
# We are looking for bibtex insets having the default option
|
|
|
|
i = 0
|
|
|
|
while True:
|
|
|
|
i = find_token(document.body, "\\begin_inset CommandInset bibtex", i)
|
|
|
|
if i == -1:
|
|
|
|
return
|
|
|
|
j = find_end_of_inset(document.body, i)
|
|
|
|
if j == -1:
|
|
|
|
document.warning("Malformed LyX document: Can't find end of bibtex inset at line " + str(i))
|
|
|
|
i += 1
|
|
|
|
return
|
|
|
|
k = find_token(document.body, "options", i, j)
|
|
|
|
if k != -1:
|
|
|
|
options = get_quoted_value(document.body, "options", k)
|
|
|
|
if "default" in options.split(","):
|
|
|
|
document.body[k] = 'options "%s"' \
|
|
|
|
% options.replace("default", default_style)
|
|
|
|
i = j
|
|
|
|
|
|
|
|
|
2011-05-03 13:12:55 +00:00
|
|
|
##
|
|
|
|
# Conversion hub
|
|
|
|
#
|
|
|
|
|
|
|
|
supported_versions = ["2.1.0","2.1"]
|
2011-11-07 18:36:56 +00:00
|
|
|
convert = [
|
|
|
|
[414, []],
|
2011-08-29 14:07:30 +00:00
|
|
|
[415, [convert_undertilde]],
|
2011-11-07 18:36:56 +00:00
|
|
|
[416, []],
|
|
|
|
[417, [convert_japanese_encodings]],
|
2011-12-07 22:33:25 +00:00
|
|
|
[418, []],
|
2011-12-12 14:40:34 +00:00
|
|
|
[419, []],
|
|
|
|
[420, [convert_biblio_style]],
|
2011-05-03 13:12:55 +00:00
|
|
|
]
|
|
|
|
|
2011-11-07 18:36:56 +00:00
|
|
|
revert = [
|
2011-12-12 14:40:34 +00:00
|
|
|
[419, [revert_biblio_style]],
|
2011-12-08 23:58:30 +00:00
|
|
|
[418, [revert_australian]],
|
2011-12-07 22:33:25 +00:00
|
|
|
[417, [revert_justification]],
|
2011-11-07 18:36:56 +00:00
|
|
|
[416, [revert_japanese_encodings]],
|
|
|
|
[415, [revert_negative_space,revert_math_spaces]],
|
2011-08-29 14:07:30 +00:00
|
|
|
[414, [revert_undertilde]],
|
2011-08-10 03:37:33 +00:00
|
|
|
[413, [revert_visible_space]]
|
2011-05-03 13:12:55 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
pass
|