Various fixes, cleanup and improvements to i18n module.

* Remove multi-processing in messages class update, was no giving much
  speedup if any at all.
* Remove some debug prints.
* Make messages class pickleable.

* Filter better actual setting values from Settings class.
* Make settings pickleable.

All this will allow to use multi-processing in the i18n addon itself.
This commit is contained in:
2020-05-05 18:04:22 +02:00
parent 527c81c6f8
commit c9ff8b5cd6
2 changed files with 55 additions and 28 deletions

View File

@@ -28,6 +28,7 @@
import json import json
import os import os
import sys import sys
import types
import bpy import bpy
@@ -556,6 +557,10 @@ def _gen_get_set_path(ref, name):
return _get, _set return _get, _set
def _check_valid_data(uid, val):
return not uid.startswith("_") and type(val) not in tuple(types.__dict__.values()) + (type,)
class I18nSettings: class I18nSettings:
""" """
Class allowing persistence of our settings! Class allowing persistence of our settings!
@@ -567,20 +572,32 @@ class I18nSettings:
# Addon preferences are singleton by definition, so is this class! # Addon preferences are singleton by definition, so is this class!
if not I18nSettings._settings: if not I18nSettings._settings:
cls._settings = super(I18nSettings, cls).__new__(cls) cls._settings = super(I18nSettings, cls).__new__(cls)
cls._settings.__dict__ = {uid: data for uid, data in globals().items() if not uid.startswith("_")} cls._settings.__dict__ = {uid: val for uid, val in globals().items() if _check_valid_data(uid, val)}
return I18nSettings._settings return I18nSettings._settings
def from_json(self, string): def __getstate__(self):
data = dict(json.loads(string)) return self.to_dict()
def __setstate__(self, mapping):
return self.from_dict(mapping)
def from_dict(self, mapping):
# Special case... :/ # Special case... :/
if "INTERN_PY_SYS_PATHS" in data: if "INTERN_PY_SYS_PATHS" in mapping:
self.PY_SYS_PATHS = data["INTERN_PY_SYS_PATHS"] self.PY_SYS_PATHS = mapping["INTERN_PY_SYS_PATHS"]
self.__dict__.update(data) self.__dict__.update(mapping)
def to_dict(self):
glob = globals()
return {uid: val for uid, val in self.__dict__.items() if _check_valid_data(uid, val) and uid in glob}
def from_json(self, string):
self.from_dict(dict(json.loads(string)))
def to_json(self): def to_json(self):
# Only save the diff from default i18n_settings! # Only save the diff from default i18n_settings!
glob = globals() glob = globals()
export_dict = {uid: val for uid, val in self.__dict__.items() if glob.get(uid) != val} export_dict = {uid: val for uid, val in self.__dict__.items() if _check_valid_data(uid, val) and glob.get(uid) != val}
return json.dumps(export_dict) return json.dumps(export_dict)
def load(self, fname, reset=False): def load(self, fname, reset=False):

View File

@@ -21,7 +21,6 @@
# Some misc utilities... # Some misc utilities...
import collections import collections
import concurrent.futures
import copy import copy
import hashlib import hashlib
import os import os
@@ -238,6 +237,12 @@ class I18nMessage:
self.is_fuzzy = is_fuzzy self.is_fuzzy = is_fuzzy
self.is_commented = is_commented self.is_commented = is_commented
# ~ def __getstate__(self):
# ~ return {key: getattr(self, key) for key in self.__slots__}
# ~ def __getstate__(self):
# ~ return {key: getattr(self, key) for key in self.__slots__}
def _get_msgctxt(self): def _get_msgctxt(self):
return "".join(self.msgctxt_lines) return "".join(self.msgctxt_lines)
@@ -426,6 +431,14 @@ class I18nMessages:
self._reverse_cache = None self._reverse_cache = None
def __getstate__(self):
return (self.settings, self.uid, self.msgs, self.parsing_errors)
def __setstate__(self, data):
self.__init__()
self.settings, self.uid, self.msgs, self.parsing_errors = data
self.update_info()
@staticmethod @staticmethod
def _new_messages(): def _new_messages():
return getattr(collections, 'OrderedDict', dict)() return getattr(collections, 'OrderedDict', dict)()
@@ -566,24 +579,23 @@ class I18nMessages:
# Next process new keys. # Next process new keys.
if use_similar > 0.0: if use_similar > 0.0:
with concurrent.futures.ProcessPoolExecutor() as exctr: for key, msgid in map(get_best_similar,
for key, msgid in exctr.map(get_best_similar, tuple((nk, use_similar, tuple(similar_pool.keys())) for nk in new_keys)):
tuple((nk, use_similar, tuple(similar_pool.keys())) for nk in new_keys)): if msgid:
if msgid: # Try to get the same context, else just get one...
# Try to get the same context, else just get one... skey = (key[0], msgid)
skey = (key[0], msgid) if skey not in similar_pool[msgid]:
if skey not in similar_pool[msgid]: skey = tuple(similar_pool[msgid])[0]
skey = tuple(similar_pool[msgid])[0] # We keep org translation and comments, and mark message as fuzzy.
# We keep org translation and comments, and mark message as fuzzy. msg, refmsg = self.msgs[skey].copy(), ref.msgs[key]
msg, refmsg = self.msgs[skey].copy(), ref.msgs[key] msg.msgctxt = refmsg.msgctxt
msg.msgctxt = refmsg.msgctxt msg.msgid = refmsg.msgid
msg.msgid = refmsg.msgid msg.sources = refmsg.sources
msg.sources = refmsg.sources msg.is_fuzzy = True
msg.is_fuzzy = True msg.is_commented = refmsg.is_commented
msg.is_commented = refmsg.is_commented msgs[key] = msg
msgs[key] = msg else:
else: msgs[key] = ref.msgs[key]
msgs[key] = ref.msgs[key]
else: else:
for key in new_keys: for key in new_keys:
msgs[key] = ref.msgs[key] msgs[key] = ref.msgs[key]
@@ -1075,9 +1087,7 @@ class I18nMessages:
"-o", "-o",
fname, fname,
) )
print("Running ", " ".join(cmd))
ret = subprocess.call(cmd) ret = subprocess.call(cmd)
print("Finished.")
return return
# XXX Code below is currently broken (generates corrupted mo files it seems :( )! # XXX Code below is currently broken (generates corrupted mo files it seems :( )!
# Using http://www.gnu.org/software/gettext/manual/html_node/MO-Files.html notation. # Using http://www.gnu.org/software/gettext/manual/html_node/MO-Files.html notation.