mirror of
https://git.lyx.org/repos/lyx.git
synced 2024-11-29 05:01:49 +00:00
Refactor lyx2lyx for speed and simplicity.
This commit is contained in:
parent
a483fff07d
commit
0d49918c0c
@ -32,7 +32,7 @@ from parser_tools import (del_token, del_value, del_complete_lines,
|
|||||||
# find_tokens, find_token_exact, check_token, get_option_value
|
# find_tokens, find_token_exact, check_token, get_option_value
|
||||||
|
|
||||||
from lyx2lyx_tools import (add_to_preamble, put_cmd_in_ert, revert_font_attrs,
|
from lyx2lyx_tools import (add_to_preamble, put_cmd_in_ert, revert_font_attrs,
|
||||||
insert_to_preamble, latex_length, is_document_option,
|
insert_to_preamble, latex_length, is_document_option,
|
||||||
insert_document_option, remove_document_option, revert_language)
|
insert_document_option, remove_document_option, revert_language)
|
||||||
|
|
||||||
####################################################################
|
####################################################################
|
||||||
@ -1308,9 +1308,9 @@ def convert_literalparam(document):
|
|||||||
i += 1
|
i += 1
|
||||||
# href is already fully latexified. Here we can switch off literal.
|
# href is already fully latexified. Here we can switch off literal.
|
||||||
if inset == "href":
|
if inset == "href":
|
||||||
document.body.insert(i, "literal \"false\"")
|
document.body.insert(i, 'literal "false"')
|
||||||
else:
|
else:
|
||||||
document.body.insert(i, "literal \"true\"")
|
document.body.insert(i, 'literal "true"')
|
||||||
i = j + 1
|
i = j + 1
|
||||||
|
|
||||||
|
|
||||||
@ -1320,19 +1320,14 @@ def revert_literalparam(document):
|
|||||||
for inset in command_insets:
|
for inset in command_insets:
|
||||||
i = 0
|
i = 0
|
||||||
while True:
|
while True:
|
||||||
i = find_token(document.body, '\\begin_inset CommandInset %s' % inset, i)
|
i = find_token(document.body, '\\begin_inset CommandInset %s' % inset, i+1)
|
||||||
if i == -1:
|
if i == -1:
|
||||||
break
|
break
|
||||||
j = find_end_of_inset(document.body, i)
|
j = find_end_of_inset(document.body, i)
|
||||||
if j == -1:
|
if j == -1:
|
||||||
document.warning("Malformed LyX document: Can't find end of %s inset at line %d" % (inset, i))
|
document.warning("Malformed LyX document: Can't find end of %s inset at line %d" % (inset, i))
|
||||||
i += 1
|
|
||||||
continue
|
continue
|
||||||
k = find_token(document.body, 'literal', i, j)
|
del_token(document.body, 'literal', i, j)
|
||||||
if k == -1:
|
|
||||||
i += 1
|
|
||||||
continue
|
|
||||||
del document.body[k]
|
|
||||||
|
|
||||||
|
|
||||||
def revert_multibib(document):
|
def revert_multibib(document):
|
||||||
|
@ -27,14 +27,15 @@ from datetime import (datetime, date, time)
|
|||||||
# Uncomment only what you need to import, please.
|
# Uncomment only what you need to import, please.
|
||||||
|
|
||||||
from parser_tools import (count_pars_in_inset, del_token, find_end_of_inset,
|
from parser_tools import (count_pars_in_inset, del_token, find_end_of_inset,
|
||||||
find_end_of_layout, find_token, find_token_backwards, find_re, get_bool_value,
|
find_end_of_layout, find_token, find_token_backwards, find_token_exact,
|
||||||
|
find_re, get_bool_value,
|
||||||
get_containing_layout, get_option_value, get_value, get_quoted_value)
|
get_containing_layout, get_option_value, get_value, get_quoted_value)
|
||||||
# del_value, del_complete_lines,
|
# del_value, del_complete_lines,
|
||||||
# find_complete_lines, find_end_of,
|
# find_complete_lines, find_end_of,
|
||||||
# find_re, find_substring,
|
# find_re, find_substring,
|
||||||
# get_containing_inset,
|
# get_containing_inset,
|
||||||
# is_in_inset, set_bool_value
|
# is_in_inset, set_bool_value
|
||||||
# find_tokens, find_token_exact, check_token
|
# find_tokens, check_token
|
||||||
|
|
||||||
from lyx2lyx_tools import (put_cmd_in_ert, add_to_preamble, lyx2latex, revert_language, revert_flex_inset)
|
from lyx2lyx_tools import (put_cmd_in_ert, add_to_preamble, lyx2latex, revert_language, revert_flex_inset)
|
||||||
# revert_font_attrs, insert_to_preamble, latex_length
|
# revert_font_attrs, insert_to_preamble, latex_length
|
||||||
@ -52,7 +53,7 @@ def add_preamble_fonts(document, fontmap):
|
|||||||
xoption = "[" + ",".join(fontmap[pkg]) + "]"
|
xoption = "[" + ",".join(fontmap[pkg]) + "]"
|
||||||
else:
|
else:
|
||||||
xoption = ""
|
xoption = ""
|
||||||
preamble = "\\usepackage" + xoption + "{%s}" % pkg
|
preamble = "\\usepackage%s{%s}" % (xoption, pkg)
|
||||||
add_to_preamble(document, [preamble])
|
add_to_preamble(document, [preamble])
|
||||||
|
|
||||||
|
|
||||||
@ -351,26 +352,31 @@ def removeFrontMatterStyles(document):
|
|||||||
" Remove styles Begin/EndFrontmatter"
|
" Remove styles Begin/EndFrontmatter"
|
||||||
|
|
||||||
layouts = ['BeginFrontmatter', 'EndFrontmatter']
|
layouts = ['BeginFrontmatter', 'EndFrontmatter']
|
||||||
for layout in layouts:
|
tokenend = len('\\begin_layout ')
|
||||||
i = 0
|
i = 0
|
||||||
while True:
|
while True:
|
||||||
i = find_token(document.body, '\\begin_layout ' + layout, i)
|
i = find_token_exact(document.body, '\\begin_layout ', i)
|
||||||
if i == -1:
|
if i == -1:
|
||||||
break
|
return
|
||||||
j = find_end_of_layout(document.body, i)
|
layout = document.body[i][tokenend:].strip()
|
||||||
if j == -1:
|
if layout not in layouts:
|
||||||
document.warning("Malformed LyX document: Can't find end of layout at line %d" % i)
|
i += 1
|
||||||
i += 1
|
continue
|
||||||
continue
|
j = find_end_of_layout(document.body, i)
|
||||||
while i > 0 and document.body[i-1].strip() == '':
|
if j == -1:
|
||||||
i -= 1
|
document.warning("Malformed LyX document: Can't find end of layout at line %d" % i)
|
||||||
while document.body[j+1].strip() == '':
|
i += 1
|
||||||
j = j + 1
|
continue
|
||||||
document.body[i:j+1] = ['']
|
while document.body[j+1].strip() == '':
|
||||||
|
j += 1
|
||||||
|
document.body[i:j+1] = []
|
||||||
|
|
||||||
def addFrontMatterStyles(document):
|
def addFrontMatterStyles(document):
|
||||||
" Use styles Begin/EndFrontmatter for elsarticle"
|
" Use styles Begin/EndFrontmatter for elsarticle"
|
||||||
|
|
||||||
|
if document.textclass != "elsarticle":
|
||||||
|
return
|
||||||
|
|
||||||
def insertFrontmatter(prefix, line):
|
def insertFrontmatter(prefix, line):
|
||||||
above = line
|
above = line
|
||||||
while above > 0 and document.body[above-1].strip() == '':
|
while above > 0 and document.body[above-1].strip() == '':
|
||||||
@ -387,31 +393,32 @@ def addFrontMatterStyles(document):
|
|||||||
'\\end_inset', '', '',
|
'\\end_inset', '', '',
|
||||||
'\\end_layout', '']
|
'\\end_layout', '']
|
||||||
|
|
||||||
if document.textclass == "elsarticle":
|
layouts = ['Title', 'Title footnote', 'Author', 'Author footnote',
|
||||||
layouts = ['Title', 'Title footnote', 'Author', 'Author footnote',
|
'Corresponding author', 'Address', 'Email', 'Abstract', 'Keywords']
|
||||||
'Corresponding author', 'Address', 'Email', 'Abstract', 'Keywords']
|
tokenend = len('\\begin_layout ')
|
||||||
first = -1
|
first = -1
|
||||||
last = -1
|
i = 0
|
||||||
for layout in layouts:
|
while True:
|
||||||
i = 0
|
i = find_token_exact(document.body, '\\begin_layout ', i)
|
||||||
while True:
|
if i == -1:
|
||||||
i = find_token(document.body, '\\begin_layout ' + layout, i)
|
break
|
||||||
if i == -1:
|
layout = document.body[i][tokenend:].strip()
|
||||||
break
|
if layout not in layouts:
|
||||||
k = find_end_of_layout(document.body, i)
|
i += 1
|
||||||
if k == -1:
|
continue
|
||||||
document.warning("Malformed LyX document: Can't find end of layout at line %d" % i)
|
k = find_end_of_layout(document.body, i)
|
||||||
i += 1;
|
if k == -1:
|
||||||
continue
|
document.warning("Malformed LyX document: Can't find end of layout at line %d" % i)
|
||||||
if first == -1 or i < first:
|
i += 1;
|
||||||
first = i
|
continue
|
||||||
if last == -1 or last <= k:
|
|
||||||
last = k+1
|
|
||||||
i = k+1
|
|
||||||
if first == -1:
|
if first == -1:
|
||||||
return
|
first = i
|
||||||
insertFrontmatter('End', last)
|
i = k+1
|
||||||
insertFrontmatter('Begin', first)
|
if first == -1:
|
||||||
|
return
|
||||||
|
insertFrontmatter('End', k+1)
|
||||||
|
insertFrontmatter('Begin', first)
|
||||||
|
|
||||||
|
|
||||||
def convert_lst_literalparam(document):
|
def convert_lst_literalparam(document):
|
||||||
" Add param literal to include inset "
|
" Add param literal to include inset "
|
||||||
@ -428,7 +435,7 @@ def convert_lst_literalparam(document):
|
|||||||
continue
|
continue
|
||||||
while i < j and document.body[i].strip() != '':
|
while i < j and document.body[i].strip() != '':
|
||||||
i += 1
|
i += 1
|
||||||
document.body.insert(i, "literal \"true\"")
|
document.body.insert(i, 'literal "true"')
|
||||||
|
|
||||||
|
|
||||||
def revert_lst_literalparam(document):
|
def revert_lst_literalparam(document):
|
||||||
@ -436,19 +443,14 @@ def revert_lst_literalparam(document):
|
|||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
while True:
|
while True:
|
||||||
i = find_token(document.body, '\\begin_inset CommandInset include', i)
|
i = find_token(document.body, '\\begin_inset CommandInset include', i+1)
|
||||||
if i == -1:
|
if i == -1:
|
||||||
break
|
break
|
||||||
j = find_end_of_inset(document.body, i)
|
j = find_end_of_inset(document.body, i)
|
||||||
if j == -1:
|
if j == -1:
|
||||||
document.warning("Malformed LyX document: Can't find end of include inset at line %d" % i)
|
document.warning("Malformed LyX document: Can't find end of include inset at line %d" % i)
|
||||||
i += 1
|
|
||||||
continue
|
continue
|
||||||
k = find_token(document.body, 'literal', i, j)
|
del_token(document.body, 'literal', i, j)
|
||||||
if k == -1:
|
|
||||||
i += 1
|
|
||||||
continue
|
|
||||||
del document.body[k]
|
|
||||||
|
|
||||||
|
|
||||||
def revert_paratype(document):
|
def revert_paratype(document):
|
||||||
@ -1731,24 +1733,34 @@ def convert_lineno(document):
|
|||||||
def revert_new_languages(document):
|
def revert_new_languages(document):
|
||||||
"""Emulate support for Azerbaijani, Bengali, Church Slavonic, Korean,
|
"""Emulate support for Azerbaijani, Bengali, Church Slavonic, Korean,
|
||||||
and Russian (Petrine orthography)."""
|
and Russian (Petrine orthography)."""
|
||||||
|
|
||||||
#revert_language(document, lyxname, babelname="", polyglossianame="")
|
# lyxname: (babelname, polyglossianame)
|
||||||
revert_language(document, "azerbaijani", "azerbaijani", "")
|
new_languages = {"azerbaijani": ("azerbaijani", ""),
|
||||||
revert_language(document, "bengali", "", "bengali")
|
"bengali": ("", "bengali"),
|
||||||
revert_language(document, "churchslavonic", "", "churchslavonic")
|
"churchslavonic": ("", "churchslavonic"),
|
||||||
revert_language(document, "oldrussian", "", "russian")
|
"oldrussian": ("", "russian"),
|
||||||
|
"korean": ("", "korean"),
|
||||||
|
}
|
||||||
|
used_languages = set()
|
||||||
|
if document.language in new_languages:
|
||||||
|
used_languages.add(document.language)
|
||||||
|
i = 0
|
||||||
|
while True:
|
||||||
|
i = find_token(document.body, "\\lang", i+1)
|
||||||
|
if i == -1:
|
||||||
|
break
|
||||||
|
if document.body[i][6:].strip() in new_languages:
|
||||||
|
used_languages.add(document.language)
|
||||||
|
|
||||||
# Korean is already supported via CJK, so leave as-is for Babel
|
# Korean is already supported via CJK, so leave as-is for Babel
|
||||||
if not get_bool_value(document.header, "\\use_non_tex_fonts"):
|
if ("korean" in used_languages
|
||||||
return
|
and get_bool_value(document.header, "\\use_non_tex_fonts")
|
||||||
langpack = get_value(document.header, "\\language_package")
|
and get_value(document.header, "\\language_package") in ("default", "auto")):
|
||||||
if langpack not in ("default", "auto"):
|
|
||||||
return
|
|
||||||
if document.language == "korean":
|
|
||||||
add_to_preamble(document, ["\\usepackage{polyglossia}",
|
|
||||||
"\\setdefaultlanguage{korean}"])
|
|
||||||
elif find_token(document.body, "\\lang korean") != -1:
|
|
||||||
revert_language(document, "korean", "", "korean")
|
revert_language(document, "korean", "", "korean")
|
||||||
|
used_languages.discard("korean")
|
||||||
|
|
||||||
|
for lang in used_languages:
|
||||||
|
revert(lang, *new_languages[lang])
|
||||||
|
|
||||||
|
|
||||||
gloss_inset_def = [
|
gloss_inset_def = [
|
||||||
|
Loading…
Reference in New Issue
Block a user