...
 
Commits (2)
......@@ -2,6 +2,11 @@
Changelog
=========
Next
----
- Add support for localization replacement.
0.6.5 (2017-03-26)
------------------
......
......@@ -4,7 +4,6 @@ wowui-builder
The primary purpose of the wowui-builder is to imitate the functionality of
the curseforge packager, with the following known limitations:
* The localization replacement functionality is not supported.
* The tools used and dependency pkgmeta options are not supported.
* The markup type for manual-changelog pkgmeta option is not yet used.
* The license output pkgmeta option is not used.
......
......@@ -4,7 +4,8 @@ import os
import re
import shutil
from . localizer import LocalizationError
from .localizer import LocalizationError
from .utils import join_bytes
KEYWORD_REGEX = re.compile(br'@([-A-Za-z]+)@')
......@@ -170,8 +171,7 @@ class PreProcessorError(ValueError):
class SpecialPattern(object):
def __init__(self, name, regex, func):
self.name = name
print(regex)
self.regex = regex.replace(b"(?P<inner>", b"(?P<%s_inner>"%(name.encode('utf-8'),))
self.regex = regex.replace(b"(?P<inner>", join_bytes(b"(?P<", name.encode('utf-8'), b"_inner>"))
self.func = func
def __call__(self, match):
......@@ -194,15 +194,15 @@ class BaseProcessor(object):
self.localizer = localizer
for i, (from_s, to_s) in enumerate(self.basic.items()):
tokens.append(b"(?P<b%d>%s)"%(i, re.escape(from_s)))
tokens.append(join_bytes(b"(?P<b", str(i).encode('utf-8'), b">", re.escape(from_s), b")"))
self.rules["b%d"%(i,)] = BasicPattern(to_s)
for i, (regex, func) in enumerate(self.special.items()):
pattern = SpecialPattern("s%d"%(i,), regex, func)
tokens.append(b'(?P<s%d>%s)'%(i, pattern.regex))
pattern = SpecialPattern("s%d"%i, regex, func)
tokens.append(join_bytes(b'(?P<s', str(i).encode('utf-8'), b'>', pattern.regex, b")"))
self.rules["s%d"%(i,)] = pattern
self.token_re = re.compile(b"(%s)"%(b"|".join(tokens)), re.S)
self.token_re = re.compile(join_bytes(b"(", b"|".join(tokens), b")"), re.S)
def process(self, text):
result = []
......@@ -291,7 +291,7 @@ class TOCPreProcessor(BaseProcessor):
return self.text_toc_comment(inner, b"@no-lib-strip@", b"@end-no-lib-strip@")
def text_toc_comment(self, match, start, end):
return b"#%s\n%s\n#%s"%(start, b"\n".join(b"#"+s for s in match[:-1].split(b"\n")), end)
return join_bytes(b"#", start, b"\n", b"\n".join(b"#"+s for s in match[:-1].split(b"\n")), b"\n#", end)
class XMLPreProcessor(BaseProcessor):
def __init__(self, alpha=True, lib_strip=False, **kwargs):
......@@ -345,7 +345,7 @@ class PreProcessor(object):
val = self.keyword_func(repofile, keyword.decode('utf-8'))
if val is not None:
return str(val).encode('utf-8')
return b"@%s@"%(keyword,)
return join_bytes(b"@", keyword, b"@")
def copy(self, src, dest, repofile):
proc = self.procs.get(file_ext(src.lower()))
......
......@@ -37,3 +37,6 @@ def str_or_none(s):
if s is None:
return s
return str(s)
def join_bytes(*args):
return b''.join(args)