Re-design of submodules used in blender.git
This commit implements described in the #104573. The goal is to fix the confusion of the submodule hashes change, which are not ideal for any of the supported git-module configuration (they are either always visible causing confusion, or silently staged and committed, also causing confusion). This commit replaces submodules with a checkout of addons and addons_contrib, covered by the .gitignore, and locale and developer tools are moved to the main repository. This also changes the paths: - /release/scripts are moved to the /scripts - /source/tools are moved to the /tools - /release/datafiles/locale is moved to /locale This is done to avoid conflicts when using bisect, and also allow buildbot to automatically "recover" wgen building older or newer branches/patches. Running `make update` will initialize the local checkout to the changed repository configuration. Another aspect of the change is that the make update will support Github style of remote organization (origin remote pointing to thy fork, upstream remote pointing to the upstream blender/blender.git). Pull Request #104755
This commit is contained in:
3
scripts/modules/bl_i18n_utils/__init__.py
Normal file
3
scripts/modules/bl_i18n_utils/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""Package for translation (i18n) tools."""
|
||||
1137
scripts/modules/bl_i18n_utils/bl_extract_messages.py
Normal file
1137
scripts/modules/bl_i18n_utils/bl_extract_messages.py
Normal file
File diff suppressed because it is too large
Load Diff
130
scripts/modules/bl_i18n_utils/merge_po.py
Executable file
130
scripts/modules/bl_i18n_utils/merge_po.py
Executable file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Merge one or more .po files into the first dest one.
|
||||
# If a msgkey is present in more than one merged po, the one in the first file wins, unless
|
||||
# it’s marked as fuzzy and one later is not.
|
||||
# The fuzzy flag is removed if necessary.
|
||||
# All other comments are never modified.
|
||||
# However, commented messages in dst will always remain commented, and commented messages are
|
||||
# never merged from sources.
|
||||
|
||||
import sys
|
||||
|
||||
if __package__ is None:
|
||||
import settings
|
||||
import utils
|
||||
else:
|
||||
from . import (
|
||||
settings,
|
||||
utils,
|
||||
)
|
||||
|
||||
|
||||
# XXX This is a quick hack to make it work with new I18n... objects! To be reworked!
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Merge one or more .po files into the first dest one.\n"
|
||||
"If a msgkey (msgctxt, msgid) is present in more than one merged po, the one in the first file "
|
||||
"wins, unless it’s marked as fuzzy and one later is not.\n"
|
||||
"The fuzzy flag is removed if necessary.\n"
|
||||
"All other comments are never modified.\n"
|
||||
"Commented messages in dst will always remain commented, and commented messages are never merged "
|
||||
"from sources."
|
||||
),
|
||||
)
|
||||
parser.add_argument('-s', '--stats', action="store_true", help="Show statistics info.")
|
||||
parser.add_argument('-r', '--replace', action="store_true",
|
||||
help="Replace existing messages of same \"level\" already in dest po.")
|
||||
parser.add_argument('dst', metavar='dst.po', help="The dest po into which merge the others.")
|
||||
parser.add_argument('src', metavar='src.po', nargs='+', help="The po's to merge into the dst.po one.")
|
||||
args = parser.parse_args()
|
||||
|
||||
ret = 0
|
||||
done_msgkeys = set()
|
||||
done_fuzzy_msgkeys = set()
|
||||
nbr_merged = 0
|
||||
nbr_replaced = 0
|
||||
nbr_added = 0
|
||||
nbr_unfuzzied = 0
|
||||
|
||||
dst_msgs = utils.I18nMessages(kind='PO', src=args.dst)
|
||||
if dst_msgs.parsing_errors:
|
||||
print("Dest po is BROKEN, aborting.")
|
||||
return 1
|
||||
if args.stats:
|
||||
print("Dest po, before merging:")
|
||||
dst_msgs.print_stats(prefix="\t")
|
||||
# If we don’t want to replace existing valid translations, pre-populate done_msgkeys and done_fuzzy_msgkeys.
|
||||
if not args.replace:
|
||||
done_msgkeys = dst_msgs.trans_msgs.copy()
|
||||
done_fuzzy_msgkeys = dst_msgs.fuzzy_msgs.copy()
|
||||
for po in args.src:
|
||||
msgs = utils.I18nMessages(kind='PO', src=po)
|
||||
if msgs.parsing_errors:
|
||||
print("\tSrc po {} is BROKEN, skipping.".format(po))
|
||||
ret = 1
|
||||
continue
|
||||
print("\tMerging {}...".format(po))
|
||||
if args.stats:
|
||||
print("\t\tMerged po stats:")
|
||||
msgs.print_stats(prefix="\t\t\t")
|
||||
for msgkey, msg in msgs.msgs.items():
|
||||
msgctxt, msgid = msgkey
|
||||
# This msgkey has already been completely merged, or is a commented one,
|
||||
# or the new message is commented, skip it.
|
||||
if msgkey in (done_msgkeys | dst_msgs.comm_msgs | msgs.comm_msgs):
|
||||
continue
|
||||
is_ttip = msg.is_tooltip
|
||||
# New messages does not yet exists in dest.
|
||||
if msgkey not in dst_msgs.msgs:
|
||||
dst_msgs[msgkey] = msgs.msgs[msgkey]
|
||||
if msgkey in msgs.fuzzy_msgs:
|
||||
done_fuzzy_msgkeys.add(msgkey)
|
||||
dst_msgs.fuzzy_msgs.add(msgkey)
|
||||
elif msgkey in msgs.trans_msgs:
|
||||
done_msgkeys.add(msgkey)
|
||||
dst_msgs.trans_msgs.add(msgkey)
|
||||
nbr_added += 1
|
||||
# From now on, the new messages is already in dst.
|
||||
# New message is neither translated nor fuzzy, skip it.
|
||||
elif msgkey not in (msgs.trans_msgs | msgs.fuzzy_msgs):
|
||||
continue
|
||||
# From now on, the new message is either translated or fuzzy!
|
||||
# The new message is translated.
|
||||
elif msgkey in msgs.trans_msgs:
|
||||
dst_msgs.msgs[msgkey].msgstr = msg.msgstr
|
||||
done_msgkeys.add(msgkey)
|
||||
done_fuzzy_msgkeys.discard(msgkey)
|
||||
if msgkey in dst_msgs.fuzzy_msgs:
|
||||
dst_msgs.fuzzy_msgs.remove(msgkey)
|
||||
nbr_unfuzzied += 1
|
||||
if msgkey not in dst_msgs.trans_msgs:
|
||||
dst_msgs.trans_msgs.add(msgkey)
|
||||
else:
|
||||
nbr_replaced += 1
|
||||
nbr_merged += 1
|
||||
# The new message is fuzzy, org one is fuzzy too, and this msgkey has not yet been merged.
|
||||
elif msgkey not in (dst_msgs.trans_msgs | done_fuzzy_msgkeys):
|
||||
dst_msgs[msgkey].msgstr = msg.msgstr
|
||||
done_fuzzy_msgkeys.add(msgkey)
|
||||
dst_msgs.fuzzy_msgs.add(msgkey)
|
||||
nbr_merged += 1
|
||||
nbr_replaced += 1
|
||||
|
||||
dst_msgs.write(kind='PO', dest=args.dst)
|
||||
|
||||
print("Merged completed. {} messages were merged (among which {} were replaced), {} were added, "
|
||||
"{} were \"un-fuzzied\".".format(nbr_merged, nbr_replaced, nbr_added, nbr_unfuzzied))
|
||||
if args.stats:
|
||||
dst_msgs.update_info()
|
||||
print("Final merged po stats:")
|
||||
dst_msgs.print_stats(prefix="\t")
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n\n *** Running {} *** \n".format(__file__))
|
||||
sys.exit(main())
|
||||
710
scripts/modules/bl_i18n_utils/settings.py
Normal file
710
scripts/modules/bl_i18n_utils/settings.py
Normal file
@@ -0,0 +1,710 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Global settings used by all scripts in this dir.
|
||||
# XXX Before any use of the tools in this dir, please make a copy of this file
|
||||
# named "setting.py"
|
||||
# XXX This is a template, most values should be OK, but some you’ll have to
|
||||
# edit (most probably, BLENDER_EXEC and SOURCE_DIR).
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
|
||||
try:
|
||||
import bpy
|
||||
except ModuleNotFoundError:
|
||||
print("Could not import bpy, some features are not available when not run from Blender.")
|
||||
bpy = None
|
||||
|
||||
###############################################################################
|
||||
# MISC
|
||||
###############################################################################
|
||||
|
||||
# The languages defined in Blender.
|
||||
LANGUAGES_CATEGORIES = (
|
||||
# Min completeness level, UI english label.
|
||||
(0.95, "Complete"),
|
||||
(0.33, "In Progress"),
|
||||
(-1.0, "Starting"),
|
||||
)
|
||||
LANGUAGES = (
|
||||
# ID, UI english label, ISO code.
|
||||
(0, "Automatic (Automatic)", "DEFAULT"),
|
||||
(1, "English (English)", "en_US"),
|
||||
(2, "Japanese (日本語)", "ja_JP"),
|
||||
(3, "Dutch (Nederlandse taal)", "nl_NL"),
|
||||
(4, "Italian (Italiano)", "it_IT"),
|
||||
(5, "German (Deutsch)", "de_DE"),
|
||||
(6, "Finnish (Suomi)", "fi_FI"),
|
||||
(7, "Swedish (Svenska)", "sv_SE"),
|
||||
(8, "French (Français)", "fr_FR"),
|
||||
(9, "Spanish (Español)", "es"),
|
||||
(10, "Catalan (Català)", "ca_AD"),
|
||||
(11, "Czech (Český)", "cs_CZ"),
|
||||
(12, "Portuguese (Português)", "pt_PT"),
|
||||
(13, "Simplified Chinese (简体中文)", "zh_CN"),
|
||||
(14, "Traditional Chinese (繁體中文)", "zh_TW"),
|
||||
(15, "Russian (Русский)", "ru_RU"),
|
||||
(16, "Croatian (Hrvatski)", "hr_HR"),
|
||||
(17, "Serbian (Српски)", "sr_RS"),
|
||||
(18, "Ukrainian (Українська)", "uk_UA"),
|
||||
(19, "Polish (Polski)", "pl_PL"),
|
||||
(20, "Romanian (Român)", "ro_RO"),
|
||||
# Using the utf8 flipped form of Arabic (العربية).
|
||||
(21, "Arabic (ﺔﻴﺑﺮﻌﻟﺍ)", "ar_EG"),
|
||||
(22, "Bulgarian (Български)", "bg_BG"),
|
||||
(23, "Greek (Ελληνικά)", "el_GR"),
|
||||
(24, "Korean (한국어)", "ko_KR"),
|
||||
(25, "Nepali (नेपाली)", "ne_NP"),
|
||||
# Using the utf8 flipped form of Persian (فارسی).
|
||||
(26, "Persian (ﯽﺳﺭﺎﻓ)", "fa_IR"),
|
||||
(27, "Indonesian (Bahasa indonesia)", "id_ID"),
|
||||
(28, "Serbian Latin (Srpski latinica)", "sr_RS@latin"),
|
||||
(29, "Kyrgyz (Кыргыз тили)", "ky_KG"),
|
||||
(30, "Turkish (Türkçe)", "tr_TR"),
|
||||
(31, "Hungarian (Magyar)", "hu_HU"),
|
||||
(32, "Brazilian Portuguese (Português do Brasil)", "pt_BR"),
|
||||
# Using the utf8 flipped form of Hebrew (עִבְרִית)).
|
||||
(33, "Hebrew (תירִבְעִ)", "he_IL"),
|
||||
(34, "Estonian (Eestlane)", "et_EE"),
|
||||
(35, "Esperanto (Esperanto)", "eo"),
|
||||
(36, "Spanish from Spain (Español de España)", "es_ES"),
|
||||
(37, "Amharic (አማርኛ)", "am_ET"),
|
||||
(38, "Uzbek (Oʻzbek)", "uz_UZ"),
|
||||
(39, "Uzbek Cyrillic (Ўзбек)", "uz_UZ@cyrillic"),
|
||||
(40, "Hindi (मानक हिन्दी)", "hi_IN"),
|
||||
(41, "Vietnamese (tiếng Việt)", "vi_VN"),
|
||||
(42, "Basque (Euskara)", "eu_EU"),
|
||||
(43, "Hausa (Hausa)", "ha"),
|
||||
(44, "Kazakh (қазақша)", "kk_KZ"),
|
||||
(45, "Abkhaz (Аԥсуа бызшәа)", "ab"),
|
||||
(46, "Thai (ภาษาไทย)", "th_TH"),
|
||||
(47, "Slovak (Slovenčina)", "sk_SK"),
|
||||
(48, "Georgian (ქართული)", "ka"),
|
||||
)
|
||||
|
||||
# Default context, in py (keep in sync with `BLT_translation.h`)!
|
||||
if bpy is not None:
|
||||
assert bpy.app.translations.contexts.default == "*"
|
||||
DEFAULT_CONTEXT = "*"
|
||||
|
||||
# Name of language file used by Blender to generate translations' menu.
|
||||
LANGUAGES_FILE = "languages"
|
||||
|
||||
# The min level of completeness for a po file to be imported from /branches into /trunk, as a percentage.
|
||||
IMPORT_MIN_LEVEL = 0.0
|
||||
|
||||
# Languages in /branches we do not want to import in /trunk currently...
|
||||
IMPORT_LANGUAGES_SKIP = {
|
||||
'am_ET', 'bg_BG', 'el_GR', 'et_EE', 'ne_NP', 'ro_RO', 'uz_UZ', 'uz_UZ@cyrillic', 'kk_KZ', 'es_ES',
|
||||
}
|
||||
|
||||
# Languages that need RTL pre-processing.
|
||||
IMPORT_LANGUAGES_RTL = {
|
||||
'ar_EG', 'fa_IR', 'he_IL',
|
||||
}
|
||||
|
||||
# The comment prefix used in generated messages.txt file.
|
||||
MSG_COMMENT_PREFIX = "#~ "
|
||||
|
||||
# The comment prefix used in generated messages.txt file.
|
||||
MSG_CONTEXT_PREFIX = "MSGCTXT:"
|
||||
|
||||
# The default comment prefix used in po's.
|
||||
PO_COMMENT_PREFIX = "# "
|
||||
|
||||
# The comment prefix used to mark sources of msgids, in po's.
|
||||
PO_COMMENT_PREFIX_SOURCE = "#: "
|
||||
|
||||
# The comment prefix used to mark sources of msgids, in po's.
|
||||
PO_COMMENT_PREFIX_SOURCE_CUSTOM = "#. :src: "
|
||||
|
||||
# The general "generated" comment prefix, in po's.
|
||||
PO_COMMENT_PREFIX_GENERATED = "#. "
|
||||
|
||||
# The comment prefix used to comment entries in po's.
|
||||
PO_COMMENT_PREFIX_MSG = "#~ "
|
||||
|
||||
# The comment prefix used to mark fuzzy msgids, in po's.
|
||||
PO_COMMENT_FUZZY = "#, fuzzy"
|
||||
|
||||
# The prefix used to define context, in po's.
|
||||
PO_MSGCTXT = "msgctxt "
|
||||
|
||||
# The prefix used to define msgid, in po's.
|
||||
PO_MSGID = "msgid "
|
||||
|
||||
# The prefix used to define msgstr, in po's.
|
||||
PO_MSGSTR = "msgstr "
|
||||
|
||||
# The 'header' key of po files.
|
||||
PO_HEADER_KEY = (DEFAULT_CONTEXT, "")
|
||||
|
||||
PO_HEADER_MSGSTR = (
|
||||
"Project-Id-Version: {blender_ver} ({blender_hash})\\n\n"
|
||||
"Report-Msgid-Bugs-To: \\n\n"
|
||||
"POT-Creation-Date: {time}\\n\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\\n\n"
|
||||
"Language: {uid}\\n\n"
|
||||
"MIME-Version: 1.0\\n\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\\n\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
)
|
||||
PO_HEADER_COMMENT_COPYRIGHT = (
|
||||
"# Blender's translation file (po format).\n"
|
||||
"# Copyright (C) {year} The Blender Foundation.\n"
|
||||
"# This file is distributed under the same license as the Blender package.\n"
|
||||
"#\n"
|
||||
)
|
||||
PO_HEADER_COMMENT = (
|
||||
"# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.\n"
|
||||
"#"
|
||||
)
|
||||
|
||||
TEMPLATE_ISO_ID = "__TEMPLATE__"
|
||||
|
||||
# Num buttons report their label with a trailing ': '...
|
||||
NUM_BUTTON_SUFFIX = ": "
|
||||
|
||||
# Undocumented operator placeholder string.
|
||||
UNDOC_OPS_STR = "(undocumented operator)"
|
||||
|
||||
# The gettext domain.
|
||||
DOMAIN = "blender"
|
||||
|
||||
# Our own "gettext" stuff.
|
||||
# File type (ext) to parse.
|
||||
PYGETTEXT_ALLOWED_EXTS = {".c", ".cc", ".cpp", ".cxx", ".hh", ".hpp", ".hxx", ".h"}
|
||||
|
||||
# Max number of contexts into a BLT_I18N_MSGID_MULTI_CTXT macro...
|
||||
PYGETTEXT_MAX_MULTI_CTXT = 16
|
||||
|
||||
# Where to search contexts definitions, relative to SOURCE_DIR (defined below).
|
||||
PYGETTEXT_CONTEXTS_DEFSRC = os.path.join("source", "blender", "blentranslation", "BLT_translation.h")
|
||||
|
||||
# Regex to extract contexts defined in BLT_translation.h
|
||||
# XXX Not full-proof, but should be enough here!
|
||||
PYGETTEXT_CONTEXTS = "#define\\s+(BLT_I18NCONTEXT_[A-Z_0-9]+)\\s+\"([^\"]*)\""
|
||||
|
||||
# autopep8: off
|
||||
|
||||
# Keywords' regex.
|
||||
# XXX Most unfortunately, we can't use named backreferences inside character sets,
|
||||
# which makes the regexes even more twisty... :/
|
||||
_str_base = (
|
||||
# Match void string
|
||||
"(?P<{_}1>[\"'])(?P={_}1)" # Get opening quote (' or "), and closing immediately.
|
||||
"|"
|
||||
# Or match non-void string
|
||||
"(?P<{_}2>[\"'])" # Get opening quote (' or ").
|
||||
"(?{capt}(?:"
|
||||
# This one is for crazy things like "hi \\\\\" folks!"...
|
||||
r"(?:(?!<\\)(?:\\\\)*\\(?=(?P={_}2)))|"
|
||||
# The most common case.
|
||||
".(?!(?P={_}2))"
|
||||
")+.)" # Don't forget the last char!
|
||||
"(?P={_}2)" # And closing quote.
|
||||
)
|
||||
str_clean_re = _str_base.format(_="g", capt="P<clean>")
|
||||
_inbetween_str_re = (
|
||||
# XXX Strings may have comments between their pieces too, not only spaces!
|
||||
r"(?:\s*(?:"
|
||||
# A C comment
|
||||
r"/\*.*(?!\*/).\*/|"
|
||||
# Or a C++ one!
|
||||
r"//[^\n]*\n"
|
||||
# And we are done!
|
||||
r")?)*"
|
||||
)
|
||||
# Here we have to consider two different cases (empty string and other).
|
||||
_str_whole_re = (
|
||||
_str_base.format(_="{_}1_", capt=":") +
|
||||
# Optional loop start, this handles "split" strings...
|
||||
"(?:(?<=[\"'])" + _inbetween_str_re + "(?=[\"'])(?:"
|
||||
+ _str_base.format(_="{_}2_", capt=":") +
|
||||
# End of loop.
|
||||
"))*"
|
||||
)
|
||||
_ctxt_re_gen = lambda uid : r"(?P<ctxt_raw{uid}>(?:".format(uid=uid) + \
|
||||
_str_whole_re.format(_="_ctxt{uid}".format(uid=uid)) + \
|
||||
r")|(?:[A-Z_0-9]+))"
|
||||
_ctxt_re = _ctxt_re_gen("")
|
||||
_msg_re = r"(?P<msg_raw>" + _str_whole_re.format(_="_msg") + r")"
|
||||
PYGETTEXT_KEYWORDS = (() +
|
||||
tuple((r"{}\(\s*" + _msg_re + r"\s*\)").format(it)
|
||||
for it in ("IFACE_", "TIP_", "DATA_", "N_")) +
|
||||
|
||||
tuple((r"{}\(\s*" + _ctxt_re + r"\s*,\s*" + _msg_re + r"\s*\)").format(it)
|
||||
for it in ("CTX_IFACE_", "CTX_TIP_", "CTX_DATA_", "CTX_N_")) +
|
||||
|
||||
tuple(("{}\\((?:[^\"',]+,){{1,2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
|
||||
for it in ("BKE_report", "BKE_reportf", "BKE_reports_prepend", "BKE_reports_prependf",
|
||||
"CTX_wm_operator_poll_msg_set")) +
|
||||
|
||||
tuple(("{}\\((?:[^\"',]+,){{3}}\\s*" + _msg_re + r"\s*\)").format(it)
|
||||
for it in ("BMO_error_raise",)) +
|
||||
|
||||
tuple(("{}\\((?:[^\"',]+,){{2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
|
||||
for it in ("BKE_modifier_set_error",)) +
|
||||
|
||||
# This one is a tad more risky, but in practice would not expect a name/uid string parameter
|
||||
# (the second one in those functions) to ever have a comma in it, so think this is fine.
|
||||
tuple(("{}\\((?:[^,]+,){{2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
|
||||
for it in ("modifier_subpanel_register", "gpencil_modifier_subpanel_register")) +
|
||||
|
||||
# bUnitDef unit names.
|
||||
# NOTE: regex is a bit more complex than it would need too. Since the actual
|
||||
# identifier (`B_UNIT_DEF_`) is at the end, if it's simpler/too general it
|
||||
# becomes extremely slow to process some (unrelated) source files.
|
||||
((r"\{(?:(?:\s*\"[^\",]+\"\s*,)|(?:\s*\"\\\"\",)|(?:\s*NULL\s*,)){4}\s*" +
|
||||
_msg_re + r"\s*,(?:(?:\s*\"[^\"',]+\"\s*,)|(?:\s*NULL\s*,))(?:[^,]+,){2}"
|
||||
+ "(?:\|?\s*B_UNIT_DEF_[_A-Z]+\s*)+\}"),) +
|
||||
|
||||
tuple((r"{}\(\s*" + _msg_re + r"\s*,\s*(?:" +
|
||||
r"\s*,\s*)?(?:".join(_ctxt_re_gen(i) for i in range(PYGETTEXT_MAX_MULTI_CTXT)) + r")?\s*\)").format(it)
|
||||
for it in ("BLT_I18N_MSGID_MULTI_CTXT",))
|
||||
)
|
||||
|
||||
# autopep8: on
|
||||
|
||||
|
||||
# Check printf mismatches between msgid and msgstr.
|
||||
CHECK_PRINTF_FORMAT = (
|
||||
r"(?!<%)(?:%%)*%" # Beginning, with handling for crazy things like '%%%%%s'
|
||||
r"[-+#0]?" # Flags (note: do not add the ' ' (space) flag here, generates too much false positives!)
|
||||
r"(?:\*|[0-9]+)?" # Width
|
||||
r"(?:\.(?:\*|[0-9]+))?" # Precision
|
||||
r"(?:[hljztL]|hh|ll)?" # Length
|
||||
r"[tldiuoxXfFeEgGaAcspn]" # Specifiers (note we have Blender-specific %t and %l ones too)
|
||||
)
|
||||
|
||||
# Should po parser warn when finding a first letter not capitalized?
|
||||
WARN_MSGID_NOT_CAPITALIZED = True
|
||||
|
||||
# Strings that should not raise above warning!
|
||||
WARN_MSGID_NOT_CAPITALIZED_ALLOWED = {
|
||||
"", # Simplifies things... :p
|
||||
"ac3",
|
||||
"along X",
|
||||
"along Y",
|
||||
"along Z",
|
||||
"along %s X",
|
||||
"along %s Y",
|
||||
"along %s Z",
|
||||
"along local Z",
|
||||
"arccos(A)",
|
||||
"arcsin(A)",
|
||||
"arctan(A)",
|
||||
"ascii",
|
||||
"author", # Addons' field. :/
|
||||
"bItasc",
|
||||
"blender.org",
|
||||
"color_index is invalid",
|
||||
"cos(A)",
|
||||
"cosh(A)",
|
||||
"dbl-", # Compacted for 'double', for keymap items.
|
||||
"description", # Addons' field. :/
|
||||
"dx",
|
||||
"fBM",
|
||||
"flac",
|
||||
"fps: %.2f",
|
||||
"fps: %i",
|
||||
"gimbal",
|
||||
"global",
|
||||
"glTF 2.0 (.glb/.gltf)",
|
||||
"glTF Binary (.glb)",
|
||||
"glTF Embedded (.gltf)",
|
||||
"glTF Material Output",
|
||||
"glTF Original PBR data",
|
||||
"glTF Separate (.gltf + .bin + textures)",
|
||||
"invoke() needs to be called before execute()",
|
||||
"iScale",
|
||||
"iso-8859-15",
|
||||
"iTaSC",
|
||||
"iTaSC parameters",
|
||||
"kb",
|
||||
"local",
|
||||
"location", # Addons' field. :/
|
||||
"locking %s X",
|
||||
"locking %s Y",
|
||||
"locking %s Z",
|
||||
"mkv",
|
||||
"mm",
|
||||
"mp2",
|
||||
"mp3",
|
||||
"normal",
|
||||
"ogg",
|
||||
"oneAPI",
|
||||
"p0",
|
||||
"px",
|
||||
"re",
|
||||
"res",
|
||||
"rv",
|
||||
"sin(A)",
|
||||
"sin(x) / x",
|
||||
"sinh(A)",
|
||||
"sqrt(x*x+y*y+z*z)",
|
||||
"sRGB",
|
||||
"sRGB display space",
|
||||
"sRGB display space with Filmic view transform",
|
||||
"tan(A)",
|
||||
"tanh(A)",
|
||||
"utf-8",
|
||||
"uv_on_emitter() requires a modifier from an evaluated object",
|
||||
"var",
|
||||
"vBVH",
|
||||
"view",
|
||||
"wav",
|
||||
"wmOwnerID '%s' not in workspace '%s'",
|
||||
"y",
|
||||
"y = (Ax + B)",
|
||||
# Sub-strings.
|
||||
"all",
|
||||
"all and invert unselected",
|
||||
"and AMD driver version 22.10 or newer",
|
||||
"and AMD Radeon Pro 21.Q4 driver or newer",
|
||||
"and Linux driver version xx.xx.23904 or newer",
|
||||
"and NVIDIA driver version 470 or newer",
|
||||
"and Windows driver version 101.3430 or newer",
|
||||
"available with",
|
||||
"brown fox",
|
||||
"can't save image while rendering",
|
||||
"category",
|
||||
"constructive modifier",
|
||||
"cursor",
|
||||
"custom",
|
||||
"custom matrix",
|
||||
"custom orientation",
|
||||
"edge data",
|
||||
"exp(A)",
|
||||
"expected a timeline/animation area to be active",
|
||||
"expected a view3d region",
|
||||
"expected a view3d region & editcurve",
|
||||
"expected a view3d region & editmesh",
|
||||
"face data",
|
||||
"gimbal",
|
||||
"global",
|
||||
"glTF Settings",
|
||||
"image file not found",
|
||||
"image format is read-only",
|
||||
"image path can't be written to",
|
||||
"in memory to enable editing!",
|
||||
"insufficient content",
|
||||
"into",
|
||||
"jumps over",
|
||||
"left",
|
||||
"local",
|
||||
"matrices", "no matrices",
|
||||
"multi-res modifier",
|
||||
"name",
|
||||
"non-triangle face",
|
||||
"normal",
|
||||
"or AMD with macOS 12.3 or newer",
|
||||
"performance impact!",
|
||||
"positions", "no positions",
|
||||
"read",
|
||||
"remove",
|
||||
"right",
|
||||
"selected",
|
||||
"selected and lock unselected",
|
||||
"selected and unlock unselected",
|
||||
"screen",
|
||||
"the lazy dog",
|
||||
"this legacy pose library to pose assets",
|
||||
"to the top level of the tree",
|
||||
"unable to load movie clip",
|
||||
"unable to load text",
|
||||
"unable to open the file",
|
||||
"unknown error reading file",
|
||||
"unknown error stating file",
|
||||
"unknown error writing file",
|
||||
"unselected",
|
||||
"unsupported font format",
|
||||
"unsupported format",
|
||||
"unsupported image format",
|
||||
"unsupported movie clip format",
|
||||
"untitled",
|
||||
"vertex data",
|
||||
"verts only",
|
||||
"view",
|
||||
"virtual parents",
|
||||
"which was replaced by the Asset Browser",
|
||||
"write",
|
||||
}
|
||||
WARN_MSGID_NOT_CAPITALIZED_ALLOWED |= set(lng[2] for lng in LANGUAGES)
|
||||
|
||||
WARN_MSGID_END_POINT_ALLOWED = {
|
||||
"Circle|Alt .",
|
||||
"Float Neg. Exp.",
|
||||
"Max Ext.",
|
||||
"Newer graphics drivers may be available to improve Blender support.",
|
||||
"Numpad .",
|
||||
"Pad.",
|
||||
" RNA Path: bpy.types.",
|
||||
"Temp. Diff.",
|
||||
"Temperature Diff.",
|
||||
"The program will now close.",
|
||||
"Your graphics card or driver has limited support. It may work, but with issues.",
|
||||
"Your graphics card or driver is not supported.",
|
||||
"Invalid surface UVs on %d curves.",
|
||||
}
|
||||
|
||||
PARSER_CACHE_HASH = 'sha1'
|
||||
|
||||
PARSER_TEMPLATE_ID = "__POT__"
|
||||
PARSER_PY_ID = "__PY__"
|
||||
|
||||
PARSER_PY_MARKER_BEGIN = "\n# ##### BEGIN AUTOGENERATED I18N SECTION #####\n"
|
||||
PARSER_PY_MARKER_END = "\n# ##### END AUTOGENERATED I18N SECTION #####\n"
|
||||
|
||||
PARSER_MAX_FILE_SIZE = 2 ** 24 # in bytes, i.e. 16 Mb.
|
||||
|
||||
###############################################################################
|
||||
# PATHS
|
||||
###############################################################################
|
||||
|
||||
# The Python3 executable.You’ll likely have to edit it in your user_settings.py
|
||||
# if you’re under Windows.
|
||||
PYTHON3_EXEC = "python3"
|
||||
|
||||
# The Blender executable!
|
||||
# This is just an example, you’ll have to edit it in your user_settings.py!
|
||||
BLENDER_EXEC = os.path.abspath(os.path.join("foo", "bar", "blender"))
|
||||
# check for blender.bin
|
||||
if not os.path.exists(BLENDER_EXEC):
|
||||
if os.path.exists(BLENDER_EXEC + ".bin"):
|
||||
BLENDER_EXEC = BLENDER_EXEC + ".bin"
|
||||
|
||||
# The gettext msgfmt "compiler". You’ll likely have to edit it in your user_settings.py if you’re under Windows.
|
||||
GETTEXT_MSGFMT_EXECUTABLE = "msgfmt"
|
||||
|
||||
# The FriBidi C compiled library (.so under Linux, .dll under windows...).
|
||||
# You’ll likely have to edit it in your user_settings.py if you’re under Windows., e.g. using the included one:
|
||||
# FRIBIDI_LIB = os.path.join(TOOLS_DIR, "libfribidi.dll")
|
||||
FRIBIDI_LIB = "libfribidi.so.0"
|
||||
|
||||
# The name of the (currently empty) file that must be present in a po's directory to enable rtl-preprocess.
|
||||
RTL_PREPROCESS_FILE = "is_rtl"
|
||||
|
||||
# The Blender source root path.
|
||||
# This is just an example, you’ll have to override it in your user_settings.py!
|
||||
SOURCE_DIR = os.path.abspath(os.path.join("blender"))
|
||||
|
||||
# The bf-translation repository (you'll have to override this in your user_settings.py).
|
||||
I18N_DIR = os.path.abspath(os.path.join("i18n"))
|
||||
|
||||
# The /branches path (relative to I18N_DIR).
|
||||
REL_BRANCHES_DIR = os.path.join("branches")
|
||||
|
||||
# The /trunk path (relative to I18N_DIR).
|
||||
REL_TRUNK_DIR = os.path.join("trunk")
|
||||
|
||||
# The /trunk/po path (relative to I18N_DIR).
|
||||
REL_TRUNK_PO_DIR = os.path.join(REL_TRUNK_DIR, "po")
|
||||
|
||||
# The /trunk/mo path (relative to I18N_DIR).
|
||||
REL_TRUNK_MO_DIR = os.path.join(REL_TRUNK_DIR, "locale")
|
||||
|
||||
|
||||
# The path to the *git* translation repository (relative to SOURCE_DIR).
|
||||
REL_GIT_I18N_DIR = os.path.join("locale")
|
||||
|
||||
|
||||
# The /po path of the *git* translation repository (relative to REL_GIT_I18N_DIR).
|
||||
REL_GIT_I18N_PO_DIR = os.path.join("po")
|
||||
|
||||
|
||||
# The Blender source path to check for i18n macros (relative to SOURCE_DIR).
|
||||
REL_POTFILES_SOURCE_DIR = os.path.join("source")
|
||||
|
||||
# Where to search for preset names (relative to SOURCE_DIR).
|
||||
REL_PRESETS_DIR = os.path.join("release", "scripts", "presets")
|
||||
|
||||
# Where to search for templates (relative to SOURCE_DIR).
|
||||
REL_TEMPLATES_DIR = os.path.join("release", "scripts", "startup",
|
||||
"bl_app_templates_system")
|
||||
|
||||
# The template messages file (relative to I18N_DIR).
|
||||
REL_FILE_NAME_POT = os.path.join(REL_BRANCHES_DIR, DOMAIN + ".pot")
|
||||
|
||||
# Mo root datapath.
|
||||
REL_MO_PATH_ROOT = os.path.join(REL_TRUNK_DIR, "locale")
|
||||
|
||||
# Mo path generator for a given language.
|
||||
REL_MO_PATH_TEMPLATE = os.path.join(REL_MO_PATH_ROOT, "{}", "LC_MESSAGES")
|
||||
|
||||
# Mo path generator for a given language (relative to any "locale" dir).
|
||||
MO_PATH_ROOT_RELATIVE = os.path.join("locale")
|
||||
MO_PATH_TEMPLATE_RELATIVE = os.path.join(MO_PATH_ROOT_RELATIVE, "{}", "LC_MESSAGES")
|
||||
|
||||
# Mo file name.
|
||||
MO_FILE_NAME = DOMAIN + ".mo"
|
||||
|
||||
# Where to search for py files that may contain ui strings (relative to one of the 'resource_path' of Blender).
|
||||
CUSTOM_PY_UI_FILES = [
|
||||
os.path.join("scripts", "startup", "bl_ui"),
|
||||
os.path.join("scripts", "startup", "bl_operators"),
|
||||
os.path.join("scripts", "modules", "rna_prop_ui.py"),
|
||||
os.path.join("scripts", "modules", "rna_keymap_ui.py"),
|
||||
os.path.join("scripts", "modules", "bpy_types.py"),
|
||||
os.path.join("scripts", "presets", "keyconfig"),
|
||||
]
|
||||
|
||||
# An optional text file listing files to force include/exclude from py_xgettext process.
|
||||
SRC_POTFILES = ""
|
||||
|
||||
# A cache storing validated msgids, to avoid re-spellchecking them.
|
||||
SPELL_CACHE = os.path.join("/tmp", ".spell_cache")
|
||||
|
||||
# Threshold defining whether a new msgid is similar enough with an old one to reuse its translation...
|
||||
SIMILAR_MSGID_THRESHOLD = 0.75
|
||||
|
||||
# Additional import paths to add to sys.path (';' separated)...
|
||||
INTERN_PY_SYS_PATHS = ""
|
||||
|
||||
# Custom override settings must be one dir above i18n tools itself!
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
try:
|
||||
from bl_i18n_settings_override import *
|
||||
except ImportError: # If no i18n_override_settings available, it’s no error!
|
||||
pass
|
||||
|
||||
# Override with custom user settings, if available.
|
||||
try:
|
||||
from settings_user import *
|
||||
except ImportError: # If no user_settings available, it’s no error!
|
||||
pass
|
||||
|
||||
|
||||
for p in set(INTERN_PY_SYS_PATHS.split(";")):
|
||||
if p:
|
||||
sys.path.append(p)
|
||||
|
||||
|
||||
# The settings class itself!
|
||||
def _do_get(ref, path):
|
||||
return os.path.normpath(os.path.join(ref, path))
|
||||
|
||||
|
||||
def _do_set(ref, path):
|
||||
path = os.path.normpath(path)
|
||||
# If given path is absolute, make it relative to current ref one (else we consider it is already the case!)
|
||||
if os.path.isabs(path):
|
||||
# can't always find the relative path (between drive letters on windows)
|
||||
try:
|
||||
return os.path.relpath(path, ref)
|
||||
except ValueError:
|
||||
pass
|
||||
return path
|
||||
|
||||
|
||||
def _gen_get_set_path(ref, name):
|
||||
def _get(self):
|
||||
return _do_get(getattr(self, ref), getattr(self, name))
|
||||
|
||||
def _set(self, value):
|
||||
setattr(self, name, _do_set(getattr(self, ref), value))
|
||||
return _get, _set
|
||||
|
||||
|
||||
def _check_valid_data(uid, val):
|
||||
return not uid.startswith("_") and type(val) not in tuple(types.__dict__.values()) + (type,)
|
||||
|
||||
|
||||
class I18nSettings:
|
||||
"""
|
||||
Class allowing persistence of our settings!
|
||||
Saved in JSon format, so settings should be JSon'able objects!
|
||||
"""
|
||||
_settings = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# Addon preferences are singleton by definition, so is this class!
|
||||
if not I18nSettings._settings:
|
||||
cls._settings = super(I18nSettings, cls).__new__(cls)
|
||||
cls._settings.__dict__ = {uid: val for uid, val in globals().items() if _check_valid_data(uid, val)}
|
||||
return I18nSettings._settings
|
||||
|
||||
def __getstate__(self):
|
||||
return self.to_dict()
|
||||
|
||||
def __setstate__(self, mapping):
|
||||
return self.from_dict(mapping)
|
||||
|
||||
def from_dict(self, mapping):
|
||||
# Special case... :/
|
||||
if "INTERN_PY_SYS_PATHS" in mapping:
|
||||
self.PY_SYS_PATHS = mapping["INTERN_PY_SYS_PATHS"]
|
||||
self.__dict__.update(mapping)
|
||||
|
||||
def to_dict(self):
|
||||
glob = globals()
|
||||
return {uid: val for uid, val in self.__dict__.items() if _check_valid_data(uid, val) and uid in glob}
|
||||
|
||||
def from_json(self, string):
|
||||
self.from_dict(dict(json.loads(string)))
|
||||
|
||||
def to_json(self):
|
||||
# Only save the diff from default i18n_settings!
|
||||
glob = globals()
|
||||
export_dict = {
|
||||
uid: val for uid, val in self.__dict__.items()
|
||||
if _check_valid_data(uid, val) and glob.get(uid) != val
|
||||
}
|
||||
return json.dumps(export_dict)
|
||||
|
||||
def load(self, fname, reset=False):
|
||||
reset = reset or fname is None
|
||||
if reset:
|
||||
self.__dict__ = {uid: data for uid, data in globals().items() if not uid.startswith("_")}
|
||||
if fname is None:
|
||||
return
|
||||
if isinstance(fname, str):
|
||||
if not os.path.isfile(fname):
|
||||
# Assume it is already real JSon string...
|
||||
self.from_json(fname)
|
||||
return
|
||||
with open(fname, encoding="utf8") as f:
|
||||
self.from_json(f.read())
|
||||
# Else assume fname is already a file(like) object!
|
||||
else:
|
||||
self.from_json(fname.read())
|
||||
|
||||
def save(self, fname):
|
||||
if isinstance(fname, str):
|
||||
with open(fname, 'w', encoding="utf8") as f:
|
||||
f.write(self.to_json())
|
||||
# Else assume fname is already a file(like) object!
|
||||
else:
|
||||
fname.write(self.to_json())
|
||||
|
||||
BRANCHES_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_BRANCHES_DIR")))
|
||||
TRUNK_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_TRUNK_DIR")))
|
||||
TRUNK_PO_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_TRUNK_PO_DIR")))
|
||||
TRUNK_MO_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_TRUNK_MO_DIR")))
|
||||
GIT_I18N_ROOT = property(*(_gen_get_set_path("SOURCE_DIR", "REL_GIT_I18N_DIR")))
|
||||
GIT_I18N_PO_DIR = property(*(_gen_get_set_path("GIT_I18N_ROOT", "REL_GIT_I18N_PO_DIR")))
|
||||
POTFILES_SOURCE_DIR = property(*(_gen_get_set_path("SOURCE_DIR", "REL_POTFILES_SOURCE_DIR")))
|
||||
PRESETS_DIR = property(*(_gen_get_set_path("SOURCE_DIR", "REL_PRESETS_DIR")))
|
||||
TEMPLATES_DIR = property(*(_gen_get_set_path("SOURCE_DIR", "REL_TEMPLATES_DIR")))
|
||||
FILE_NAME_POT = property(*(_gen_get_set_path("I18N_DIR", "REL_FILE_NAME_POT")))
|
||||
MO_PATH_ROOT = property(*(_gen_get_set_path("I18N_DIR", "REL_MO_PATH_ROOT")))
|
||||
MO_PATH_TEMPLATE = property(*(_gen_get_set_path("I18N_DIR", "REL_MO_PATH_TEMPLATE")))
|
||||
|
||||
def _get_py_sys_paths(self):
|
||||
return self.INTERN_PY_SYS_PATHS
|
||||
|
||||
def _set_py_sys_paths(self, val):
|
||||
old_paths = set(self.INTERN_PY_SYS_PATHS.split(";")) - {""}
|
||||
new_paths = set(val.split(";")) - {""}
|
||||
for p in old_paths - new_paths:
|
||||
if p in sys.path:
|
||||
sys.path.remove(p)
|
||||
for p in new_paths - old_paths:
|
||||
sys.path.append(p)
|
||||
self.INTERN_PY_SYS_PATHS = val
|
||||
PY_SYS_PATHS = property(_get_py_sys_paths, _set_py_sys_paths)
|
||||
5
scripts/modules/bl_i18n_utils/settings_user.py
Normal file
5
scripts/modules/bl_i18n_utils/settings_user.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import os
|
||||
|
||||
import settings
|
||||
1592
scripts/modules/bl_i18n_utils/utils.py
Normal file
1592
scripts/modules/bl_i18n_utils/utils.py
Normal file
File diff suppressed because it is too large
Load Diff
142
scripts/modules/bl_i18n_utils/utils_cli.py
Normal file
142
scripts/modules/bl_i18n_utils/utils_cli.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Some useful operations from utils' I18nMessages class exposed as a CLI.
|
||||
|
||||
import os
|
||||
|
||||
if __package__ is None:
|
||||
import settings as settings_i18n
|
||||
import utils as utils_i18n
|
||||
import utils_languages_menu
|
||||
else:
|
||||
from . import settings as settings_i18n
|
||||
from . import utils as utils_i18n
|
||||
from . import utils_languages_menu
|
||||
|
||||
|
||||
def update_po(args, settings):
|
||||
pot = utils_i18n.I18nMessages(uid=None, kind='PO', src=args.template, settings=settings)
|
||||
if os.path.isfile(args.dst):
|
||||
uid = os.path.splitext(os.path.basename(args.dst))[0]
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.dst, settings=settings)
|
||||
po.update(pot)
|
||||
else:
|
||||
po = pot
|
||||
po.write(kind="PO", dest=args.dst)
|
||||
|
||||
|
||||
def cleanup_po(args, settings):
|
||||
uid = os.path.splitext(os.path.basename(args.src))[0]
|
||||
if not args.dst:
|
||||
args.dst = args.src
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.src, settings=settings)
|
||||
po.check(fix=True)
|
||||
po.clean_commented()
|
||||
po.write(kind="PO", dest=args.dst)
|
||||
|
||||
|
||||
def strip_po(args, settings):
|
||||
uid = os.path.splitext(os.path.basename(args.src))[0]
|
||||
if not args.dst:
|
||||
args.dst = args.src
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.src, settings=settings)
|
||||
po.clean_commented()
|
||||
po.write(kind="PO_COMPACT", dest=args.dst)
|
||||
|
||||
|
||||
def rtl_process_po(args, settings):
|
||||
uid = os.path.splitext(os.path.basename(args.src))[0]
|
||||
if not args.dst:
|
||||
args.dst = args.src
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.src, settings=settings)
|
||||
po.rtl_process()
|
||||
po.write(kind="PO", dest=args.dst)
|
||||
|
||||
|
||||
def language_menu(args, settings):
|
||||
# 'DEFAULT' and en_US are always valid, fully-translated "languages"!
|
||||
stats = {"DEFAULT": 1.0, "en_US": 1.0}
|
||||
|
||||
po_to_uid = {
|
||||
os.path.basename(po_path_branch): uid
|
||||
for can_use, uid, _num_id, _name, _isocode, po_path_branch
|
||||
in utils_i18n.list_po_dir(settings.BRANCHES_DIR, settings)
|
||||
if can_use
|
||||
}
|
||||
for po_dir in os.listdir(settings.BRANCHES_DIR):
|
||||
po_dir = os.path.join(settings.BRANCHES_DIR, po_dir)
|
||||
if not os.path.isdir(po_dir):
|
||||
continue
|
||||
for po_path in os.listdir(po_dir):
|
||||
uid = po_to_uid.get(po_path, None)
|
||||
#print("Checking %s, found uid %s" % (po_path, uid))
|
||||
po_path = os.path.join(settings.TRUNK_PO_DIR, po_path)
|
||||
if uid is not None:
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=po_path, settings=settings)
|
||||
stats[uid] = po.nbr_trans_msgs / po.nbr_msgs if po.nbr_msgs > 0 else 0
|
||||
utils_languages_menu.gen_menu_file(stats, settings)
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Tool to perform common actions over PO/MO files.")
|
||||
parser.add_argument(
|
||||
'-s', '--settings', default=None,
|
||||
help="Override (some) default settings. Either a JSon file name, or a JSon string.",
|
||||
)
|
||||
sub_parsers = parser.add_subparsers()
|
||||
|
||||
sub_parser = sub_parsers.add_parser('update_po', help="Update a PO file from a given POT template file")
|
||||
sub_parser.add_argument(
|
||||
'--template', metavar='template.pot', required=True,
|
||||
help="The source pot file to use as template for the update.",
|
||||
)
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', required=True, help="The destination po to update.")
|
||||
sub_parser.set_defaults(func=update_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'cleanup_po',
|
||||
help="Cleanup a PO file (check for and fix some common errors, remove commented messages).",
|
||||
)
|
||||
sub_parser.add_argument('--src', metavar='src.po', required=True, help="The source po file to clean up.")
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', help="The destination po to write to.")
|
||||
sub_parser.set_defaults(func=cleanup_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'strip_po',
|
||||
help="Reduce all non-essential data from given PO file (reduce its size).",
|
||||
)
|
||||
sub_parser.add_argument('--src', metavar='src.po', required=True, help="The source po file to strip.")
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', help="The destination po to write to.")
|
||||
sub_parser.set_defaults(func=strip_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'rtl_process_po',
|
||||
help="Pre-process PO files for RTL languages.",
|
||||
)
|
||||
sub_parser.add_argument('--src', metavar='src.po', required=True, help="The source po file to process.")
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', help="The destination po to write to.")
|
||||
sub_parser.set_defaults(func=rtl_process_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'language_menu',
|
||||
help="Generate the text file used by Blender to create its language menu.",
|
||||
)
|
||||
sub_parser.set_defaults(func=language_menu)
|
||||
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
|
||||
settings = settings_i18n.I18nSettings()
|
||||
settings.load(args.settings)
|
||||
|
||||
if getattr(args, 'template', None) is not None:
|
||||
settings.FILE_NAME_POT = args.template
|
||||
|
||||
args.func(args=args, settings=settings)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n\n *** Running {} *** \n".format(__file__))
|
||||
main()
|
||||
81
scripts/modules/bl_i18n_utils/utils_languages_menu.py
Executable file
81
scripts/modules/bl_i18n_utils/utils_languages_menu.py
Executable file
@@ -0,0 +1,81 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Update "languages" text file used by Blender at runtime to build translations menu.
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
OK = 0
|
||||
MISSING = 1
|
||||
TOOLOW = 2
|
||||
SKIPPED = 3
|
||||
FLAG_MESSAGES = {
|
||||
OK: "",
|
||||
MISSING: "No translation yet.",
|
||||
TOOLOW: "Not complete enough to be included.",
|
||||
SKIPPED: "Skipped (see IMPORT_LANGUAGES_SKIP in settings.py).",
|
||||
}
|
||||
|
||||
|
||||
def gen_menu_file(stats, settings):
|
||||
# Generate languages file used by Blender's i18n system.
|
||||
# First, match all entries in LANGUAGES to a lang in stats, if possible!
|
||||
tmp = []
|
||||
for uid_num, label, uid in settings.LANGUAGES:
|
||||
if uid in stats:
|
||||
if uid in settings.IMPORT_LANGUAGES_SKIP:
|
||||
tmp.append((stats[uid], uid_num, label, uid, SKIPPED))
|
||||
else:
|
||||
tmp.append((stats[uid], uid_num, label, uid, OK))
|
||||
else:
|
||||
tmp.append((0.0, uid_num, label, uid, MISSING))
|
||||
stats = tmp
|
||||
limits = sorted(settings.LANGUAGES_CATEGORIES, key=lambda it: it[0], reverse=True)
|
||||
idx = 0
|
||||
stats = sorted(stats, key=lambda it: it[0], reverse=True)
|
||||
langs_cats = [[] for i in range(len(limits))]
|
||||
highest_uid = 0
|
||||
for lvl, uid_num, label, uid, flag in stats:
|
||||
if lvl < limits[idx][0]:
|
||||
# Sub-sort languages by iso-codes.
|
||||
langs_cats[idx].sort(key=lambda it: it[2])
|
||||
idx += 1
|
||||
if lvl < settings.IMPORT_MIN_LEVEL and flag == OK:
|
||||
flag = TOOLOW
|
||||
langs_cats[idx].append((uid_num, label, uid, flag))
|
||||
if abs(uid_num) > highest_uid:
|
||||
highest_uid = abs(uid_num)
|
||||
# Sub-sort last group of languages by iso-codes!
|
||||
langs_cats[idx].sort(key=lambda it: it[2])
|
||||
data_lines = [
|
||||
"# File used by Blender to know which languages (translations) are available, ",
|
||||
"# and to generate translation menu.",
|
||||
"#",
|
||||
"# File format:",
|
||||
"# ID:MENULABEL:ISOCODE",
|
||||
"# ID must be unique, except for 0 value (marks categories for menu).",
|
||||
"# Line starting with a # are comments!",
|
||||
"#",
|
||||
"# Automatically generated by bl_i18n_utils/update_languages_menu.py script.",
|
||||
"# Highest ID currently in use: {}".format(highest_uid),
|
||||
]
|
||||
for cat, langs_cat in zip(limits, langs_cats):
|
||||
data_lines.append("#")
|
||||
# Write "category menu label"...
|
||||
if langs_cat:
|
||||
data_lines.append("0:{}:".format(cat[1]))
|
||||
else:
|
||||
# Do not write the category if it has no language!
|
||||
data_lines.append("# Void category! #0:{}:".format(cat[1]))
|
||||
# ...and all matching language entries!
|
||||
for uid_num, label, uid, flag in langs_cat:
|
||||
if flag == OK:
|
||||
data_lines.append("{}:{}:{}".format(uid_num, label, uid))
|
||||
else:
|
||||
# Non-existing, commented entry!
|
||||
data_lines.append("# {} #{}:{}:{}".format(FLAG_MESSAGES[flag], uid_num, label, uid))
|
||||
with open(os.path.join(settings.TRUNK_MO_DIR, settings.LANGUAGES_FILE), 'w', encoding="utf8") as f:
|
||||
f.write("\n".join(data_lines))
|
||||
with open(os.path.join(settings.GIT_I18N_ROOT, settings.LANGUAGES_FILE), 'w', encoding="utf8") as f:
|
||||
f.write("\n".join(data_lines))
|
||||
175
scripts/modules/bl_i18n_utils/utils_rtl.py
Executable file
175
scripts/modules/bl_i18n_utils/utils_rtl.py
Executable file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Pre-process right-to-left languages.
|
||||
# You can use it either standalone, or through import_po_from_branches or
|
||||
# update_trunk.
|
||||
#
|
||||
# Notes: This has been tested on Linux, not 100% it will work nicely on
|
||||
# Windows or OsX.
|
||||
# This uses ctypes, as there is no py3 binding for fribidi currently.
|
||||
# This implies you only need the compiled C library to run it.
|
||||
# Finally, note that it handles some formatting/escape codes (like
|
||||
# \", %s, %x12, %.4f, etc.), protecting them from ugly (evil) fribidi,
|
||||
# which seems completely unaware of such things (as unicode is...).
|
||||
|
||||
import ctypes
|
||||
import re
|
||||
|
||||
|
||||
# define FRIBIDI_MASK_NEUTRAL 0x00000040L /* Is neutral */
|
||||
FRIBIDI_PAR_ON = 0x00000040
|
||||
|
||||
|
||||
# define FRIBIDI_FLAG_SHAPE_MIRRORING 0x00000001
|
||||
# define FRIBIDI_FLAG_REORDER_NSM 0x00000002
|
||||
|
||||
# define FRIBIDI_FLAG_SHAPE_ARAB_PRES 0x00000100
|
||||
# define FRIBIDI_FLAG_SHAPE_ARAB_LIGA 0x00000200
|
||||
# define FRIBIDI_FLAG_SHAPE_ARAB_CONSOLE 0x00000400
|
||||
|
||||
# define FRIBIDI_FLAG_REMOVE_BIDI 0x00010000
|
||||
# define FRIBIDI_FLAG_REMOVE_JOINING 0x00020000
|
||||
# define FRIBIDI_FLAG_REMOVE_SPECIALS 0x00040000
|
||||
|
||||
# define FRIBIDI_FLAGS_DEFAULT ( \
|
||||
# FRIBIDI_FLAG_SHAPE_MIRRORING | \
|
||||
# FRIBIDI_FLAG_REORDER_NSM | \
|
||||
# FRIBIDI_FLAG_REMOVE_SPECIALS )
|
||||
|
||||
# define FRIBIDI_FLAGS_ARABIC ( \
|
||||
# FRIBIDI_FLAG_SHAPE_ARAB_PRES | \
|
||||
# FRIBIDI_FLAG_SHAPE_ARAB_LIGA )
|
||||
|
||||
FRIBIDI_FLAG_SHAPE_MIRRORING = 0x00000001
|
||||
FRIBIDI_FLAG_REORDER_NSM = 0x00000002
|
||||
FRIBIDI_FLAG_REMOVE_SPECIALS = 0x00040000
|
||||
|
||||
FRIBIDI_FLAG_SHAPE_ARAB_PRES = 0x00000100
|
||||
FRIBIDI_FLAG_SHAPE_ARAB_LIGA = 0x00000200
|
||||
|
||||
FRIBIDI_FLAGS_DEFAULT = FRIBIDI_FLAG_SHAPE_MIRRORING | FRIBIDI_FLAG_REORDER_NSM | FRIBIDI_FLAG_REMOVE_SPECIALS
|
||||
|
||||
FRIBIDI_FLAGS_ARABIC = FRIBIDI_FLAG_SHAPE_ARAB_PRES | FRIBIDI_FLAG_SHAPE_ARAB_LIGA
|
||||
|
||||
|
||||
MENU_DETECT_REGEX = re.compile("%x\\d+\\|")
|
||||
|
||||
|
||||
##### Kernel processing funcs. #####
|
||||
def protect_format_seq(msg):
|
||||
"""
|
||||
Find some specific escaping/formatting sequences (like \", %s, etc.,
|
||||
and protect them from any modification!
|
||||
"""
|
||||
# LRM = "\u200E"
|
||||
# RLM = "\u200F"
|
||||
LRE = "\u202A"
|
||||
# RLE = "\u202B"
|
||||
PDF = "\u202C"
|
||||
LRO = "\u202D"
|
||||
# RLO = "\u202E"
|
||||
# uctrl = {LRE, RLE, PDF, LRO, RLO}
|
||||
# Most likely incomplete, but seems to cover current needs.
|
||||
format_codes = set("tslfd")
|
||||
digits = set(".0123456789")
|
||||
|
||||
if not msg:
|
||||
return msg
|
||||
elif MENU_DETECT_REGEX.search(msg):
|
||||
# An ugly "menu" message, just force it whole LRE if not yet done.
|
||||
if msg[0] not in {LRE, LRO}:
|
||||
msg = LRE + msg
|
||||
|
||||
idx = 0
|
||||
ret = []
|
||||
ln = len(msg)
|
||||
while idx < ln:
|
||||
dlt = 1
|
||||
# # If we find a control char, skip any additional protection!
|
||||
# if msg[idx] in uctrl:
|
||||
# ret.append(msg[idx:])
|
||||
# break
|
||||
# \" or \'
|
||||
if idx < (ln - 1) and msg[idx] == '\\' and msg[idx + 1] in "\"\'":
|
||||
dlt = 2
|
||||
# %x12|
|
||||
elif idx < (ln - 2) and msg[idx] == '%' and msg[idx + 1] in "x" and msg[idx + 2] in digits:
|
||||
dlt = 2
|
||||
while (idx + dlt) < ln and msg[idx + dlt] in digits:
|
||||
dlt += 1
|
||||
if (idx + dlt) < ln and msg[idx + dlt] == '|':
|
||||
dlt += 1
|
||||
# %.4f
|
||||
elif idx < (ln - 3) and msg[idx] == '%' and msg[idx + 1] in digits:
|
||||
dlt = 2
|
||||
while (idx + dlt) < ln and msg[idx + dlt] in digits:
|
||||
dlt += 1
|
||||
if (idx + dlt) < ln and msg[idx + dlt] in format_codes:
|
||||
dlt += 1
|
||||
else:
|
||||
dlt = 1
|
||||
# %s
|
||||
elif idx < (ln - 1) and msg[idx] == '%' and msg[idx + 1] in format_codes:
|
||||
dlt = 2
|
||||
|
||||
if dlt > 1:
|
||||
ret.append(LRE)
|
||||
ret += msg[idx:idx + dlt]
|
||||
idx += dlt
|
||||
if dlt > 1:
|
||||
ret.append(PDF)
|
||||
|
||||
return "".join(ret)
|
||||
|
||||
|
||||
def log2vis(msgs, settings):
|
||||
"""
|
||||
Globally mimics deprecated fribidi_log2vis.
|
||||
msgs should be an iterable of messages to rtl-process.
|
||||
"""
|
||||
fbd = ctypes.CDLL(settings.FRIBIDI_LIB)
|
||||
|
||||
for msg in msgs:
|
||||
msg = protect_format_seq(msg)
|
||||
|
||||
fbc_str = ctypes.create_unicode_buffer(msg)
|
||||
ln = len(fbc_str) - 1
|
||||
# print(fbc_str.value, ln)
|
||||
btypes = (ctypes.c_int * ln)()
|
||||
embed_lvl = (ctypes.c_uint8 * ln)()
|
||||
pbase_dir = ctypes.c_int(FRIBIDI_PAR_ON)
|
||||
jtypes = (ctypes.c_uint8 * ln)()
|
||||
flags = FRIBIDI_FLAGS_DEFAULT | FRIBIDI_FLAGS_ARABIC
|
||||
|
||||
# Find out direction of each char.
|
||||
fbd.fribidi_get_bidi_types(fbc_str, ln, ctypes.byref(btypes))
|
||||
|
||||
# print(*btypes)
|
||||
|
||||
fbd.fribidi_get_par_embedding_levels(btypes, ln,
|
||||
ctypes.byref(pbase_dir),
|
||||
embed_lvl)
|
||||
|
||||
# print(*embed_lvl)
|
||||
|
||||
# Joinings for arabic chars.
|
||||
fbd.fribidi_get_joining_types(fbc_str, ln, jtypes)
|
||||
# print(*jtypes)
|
||||
fbd.fribidi_join_arabic(btypes, ln, embed_lvl, jtypes)
|
||||
# print(*jtypes)
|
||||
|
||||
# Final Shaping!
|
||||
fbd.fribidi_shape(flags, embed_lvl, ln, jtypes, fbc_str)
|
||||
|
||||
# print(fbc_str.value)
|
||||
# print(*(ord(c) for c in fbc_str))
|
||||
# And now, the reordering.
|
||||
# Note that here, we expect a single line, so no need to do
|
||||
# fancy things...
|
||||
fbd.fribidi_reorder_line(flags, btypes, ln, 0, pbase_dir, embed_lvl,
|
||||
fbc_str, None)
|
||||
# print(fbc_str.value)
|
||||
# print(*(ord(c) for c in fbc_str))
|
||||
|
||||
yield fbc_str.value
|
||||
885
scripts/modules/bl_i18n_utils/utils_spell_check.py
Normal file
885
scripts/modules/bl_i18n_utils/utils_spell_check.py
Normal file
@@ -0,0 +1,885 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import enchant
|
||||
import os
|
||||
import pickle
|
||||
import re
|
||||
|
||||
|
||||
class SpellChecker:
|
||||
"""
|
||||
A basic spell checker.
|
||||
"""
|
||||
|
||||
# These must be all lower case for comparisons
|
||||
uimsgs = {
|
||||
# OK words
|
||||
"adaptively", "adaptivity",
|
||||
"aren", # aren't
|
||||
"betweens", # yuck! in-betweens!
|
||||
"boolean", "booleans",
|
||||
"chamfer",
|
||||
"couldn", # couldn't
|
||||
"customizable",
|
||||
"decrement",
|
||||
"derivate",
|
||||
"deterministically",
|
||||
"doesn", # doesn't
|
||||
"duplications",
|
||||
"effector",
|
||||
"equi", # equi-angular, etc.
|
||||
"fader",
|
||||
"globbing",
|
||||
"gridded",
|
||||
"haptics",
|
||||
"hasn", # hasn't
|
||||
"hetero",
|
||||
"hoc", # ad-hoc
|
||||
"incompressible",
|
||||
"indices",
|
||||
"instantiation",
|
||||
"iridas",
|
||||
"isn", # isn't
|
||||
"iterable",
|
||||
"kyrgyz",
|
||||
"latin",
|
||||
"merchantability",
|
||||
"mplayer",
|
||||
"ons", # add-ons
|
||||
"pong", # ping pong
|
||||
"resumable",
|
||||
"runtimes",
|
||||
"scalable",
|
||||
"shadeless",
|
||||
"shouldn", # shouldn't
|
||||
"smoothen",
|
||||
"spacings",
|
||||
"teleport", "teleporting",
|
||||
"tangency",
|
||||
"vertices",
|
||||
"wasn", # wasn't
|
||||
"zig", "zag",
|
||||
|
||||
# Brands etc.
|
||||
"htc",
|
||||
"huawei",
|
||||
"radeon",
|
||||
"vive",
|
||||
"xbox",
|
||||
|
||||
# Merged words
|
||||
"antialiasing", "antialias",
|
||||
"arcsine", "arccosine", "arctangent",
|
||||
"autoclip",
|
||||
"autocomplete",
|
||||
"autoexec",
|
||||
"autoexecution",
|
||||
"autogenerated",
|
||||
"autolock",
|
||||
"automask", "automasking",
|
||||
"automerge",
|
||||
"autoname",
|
||||
"autopack",
|
||||
"autosave",
|
||||
"autoscale",
|
||||
"autosmooth",
|
||||
"autosplit",
|
||||
"backface", "backfacing",
|
||||
"backimage",
|
||||
"backscattered",
|
||||
"bandnoise",
|
||||
"bindcode",
|
||||
"bitdepth",
|
||||
"bitflag", "bitflags",
|
||||
"bitrate",
|
||||
"blackbody",
|
||||
"blendfile",
|
||||
"blendin",
|
||||
"bonesize",
|
||||
"boundbox",
|
||||
"boxpack",
|
||||
"buffersize",
|
||||
"builtin", "builtins",
|
||||
"bytecode",
|
||||
"chunksize",
|
||||
"codebase",
|
||||
"customdata",
|
||||
"dataset", "datasets",
|
||||
"de",
|
||||
"deadzone",
|
||||
"deconstruct",
|
||||
"defocus",
|
||||
"denoise", "denoised", "denoising", "denoiser",
|
||||
"deselect", "deselecting", "deselection",
|
||||
"despill", "despilling",
|
||||
"dirtree",
|
||||
"editcurve",
|
||||
"editmesh",
|
||||
"faceforward",
|
||||
"filebrowser",
|
||||
"filelist",
|
||||
"filename", "filenames",
|
||||
"filepath", "filepaths",
|
||||
"forcefield", "forcefields",
|
||||
"framerange",
|
||||
"frontmost",
|
||||
"fulldome", "fulldomes",
|
||||
"fullscreen",
|
||||
"gamepad",
|
||||
"gridline", "gridlines",
|
||||
"hardlight",
|
||||
"hemi",
|
||||
"hostname",
|
||||
"inbetween",
|
||||
"inscatter", "inscattering",
|
||||
"libdata",
|
||||
"lightcache",
|
||||
"lightgroup", "lightgroups",
|
||||
"lightprobe", "lightprobes",
|
||||
"lightless",
|
||||
"lineset",
|
||||
"linestyle", "linestyles",
|
||||
"localview",
|
||||
"lookup", "lookups",
|
||||
"mathutils",
|
||||
"micropolygon",
|
||||
"midlevel",
|
||||
"midground",
|
||||
"mixdown",
|
||||
"monospaced",
|
||||
"multi",
|
||||
"multifractal",
|
||||
"multiframe",
|
||||
"multilayer",
|
||||
"multipaint",
|
||||
"multires", "multiresolution",
|
||||
"multisampling",
|
||||
"multiscatter",
|
||||
"multitexture",
|
||||
"multithreaded",
|
||||
"multiuser",
|
||||
"multiview",
|
||||
"namespace",
|
||||
"nodetree", "nodetrees",
|
||||
"keyconfig",
|
||||
"offscreen",
|
||||
"online",
|
||||
"playhead",
|
||||
"popup", "popups",
|
||||
"pointcloud",
|
||||
"pre",
|
||||
"precache", "precaching",
|
||||
"precalculate",
|
||||
"precomputing",
|
||||
"prefetch",
|
||||
"prefilter", "prefiltering",
|
||||
"preload",
|
||||
"premultiply", "premultiplied",
|
||||
"prepass",
|
||||
"prepend",
|
||||
"preprocess", "preprocessing", "preprocessor", "preprocessed",
|
||||
"preseek",
|
||||
"preselect", "preselected",
|
||||
"promillage",
|
||||
"pushdown",
|
||||
"raytree",
|
||||
"readonly",
|
||||
"realtime",
|
||||
"reinject", "reinjected",
|
||||
"rekey",
|
||||
"relink",
|
||||
"remesh",
|
||||
"reprojection", "reproject", "reprojecting",
|
||||
"resample",
|
||||
"resize",
|
||||
"restpose",
|
||||
"resync", "resynced",
|
||||
"retarget", "retargets", "retargeting", "retargeted",
|
||||
"retiming",
|
||||
"rigidbody",
|
||||
"ringnoise",
|
||||
"rolloff",
|
||||
"runtime",
|
||||
"scanline",
|
||||
"screenshot", "screenshots",
|
||||
"seekability",
|
||||
"selfcollision",
|
||||
"shadowbuffer", "shadowbuffers",
|
||||
"singletexture",
|
||||
"softbox",
|
||||
"spellcheck", "spellchecking",
|
||||
"startup",
|
||||
"stateful",
|
||||
"starfield",
|
||||
"studiolight",
|
||||
"subflare", "subflares",
|
||||
"subframe", "subframes",
|
||||
"subclass", "subclasses", "subclassing",
|
||||
"subdirectory", "subdirectories", "subdir", "subdirs",
|
||||
"subitem",
|
||||
"submode",
|
||||
"submodule", "submodules",
|
||||
"subpath",
|
||||
"subsize",
|
||||
"substep", "substeps",
|
||||
"substring",
|
||||
"targetless",
|
||||
"textbox", "textboxes",
|
||||
"tilemode",
|
||||
"timestamp", "timestamps",
|
||||
"timestep", "timesteps",
|
||||
"todo",
|
||||
"tradeoff",
|
||||
"un",
|
||||
"unadjust", "unadjusted",
|
||||
"unassociate", "unassociated",
|
||||
"unbake",
|
||||
"uncheck",
|
||||
"unclosed",
|
||||
"uncomment",
|
||||
"unculled",
|
||||
"undeformed",
|
||||
"undistort", "undistorted", "undistortion",
|
||||
"ungroup", "ungrouped",
|
||||
"unhide",
|
||||
"unindent",
|
||||
"unitless",
|
||||
"unkeyed",
|
||||
"unlink", "unlinked",
|
||||
"unmute",
|
||||
"unphysical",
|
||||
"unpremultiply",
|
||||
"unprojected",
|
||||
"unprotect",
|
||||
"unreacted",
|
||||
"unreferenced",
|
||||
"unregister",
|
||||
"unselect", "unselected", "unselectable",
|
||||
"unsets",
|
||||
"unshadowed",
|
||||
"unspill",
|
||||
"unstitchable", "unstitch",
|
||||
"unsubdivided", "unsubdivide",
|
||||
"untrusted",
|
||||
"vectorscope",
|
||||
"whitespace", "whitespaces",
|
||||
"worldspace",
|
||||
"workflow",
|
||||
"workspace", "workspaces",
|
||||
|
||||
# Neologisms, slangs
|
||||
"affectable",
|
||||
"animatable",
|
||||
"automagic", "automagically",
|
||||
"blobby",
|
||||
"blockiness", "blocky",
|
||||
"collider", "colliders",
|
||||
"deformer", "deformers",
|
||||
"determinator",
|
||||
"editability",
|
||||
"effectors",
|
||||
"expander",
|
||||
"instancer",
|
||||
"keyer",
|
||||
"lacunarity",
|
||||
"linkable",
|
||||
"numerics",
|
||||
"occluder", "occluders",
|
||||
"overridable",
|
||||
"passepartout",
|
||||
"perspectively",
|
||||
"pixelate",
|
||||
"pointiness",
|
||||
"polycount",
|
||||
"polygonization", "polygonalization", # yuck!
|
||||
"scalings",
|
||||
"selectable", "selectability",
|
||||
"shaper",
|
||||
"smoothen", "smoothening",
|
||||
"spherize", "spherized",
|
||||
"stitchable",
|
||||
"symmetrize",
|
||||
"trackability",
|
||||
"transmissivity",
|
||||
"rasterized", "rasterization", "rasterizer",
|
||||
"renderer", "renderers", "renderable", "renderability",
|
||||
|
||||
# Really bad!!!
|
||||
"convertor",
|
||||
"fullscr",
|
||||
|
||||
# Abbreviations
|
||||
"aero",
|
||||
"amb",
|
||||
"anim",
|
||||
"aov",
|
||||
"app",
|
||||
"bbox", "bboxes",
|
||||
"bksp", # Backspace
|
||||
"bool",
|
||||
"calc",
|
||||
"cfl",
|
||||
"config", "configs",
|
||||
"const",
|
||||
"coord", "coords",
|
||||
"degr",
|
||||
"diff",
|
||||
"dof",
|
||||
"dupli", "duplis",
|
||||
"eg",
|
||||
"esc",
|
||||
"expr",
|
||||
"fac",
|
||||
"fra",
|
||||
"fract",
|
||||
"frs",
|
||||
"grless",
|
||||
"http",
|
||||
"init",
|
||||
"irr", # Irradiance
|
||||
"kbit", "kb",
|
||||
"lang", "langs",
|
||||
"lclick", "rclick",
|
||||
"lensdist",
|
||||
"loc", "rot", "pos",
|
||||
"lorem",
|
||||
"luma",
|
||||
"mbs", # mouse button 'select'.
|
||||
"mem",
|
||||
"multicam",
|
||||
"num",
|
||||
"ok",
|
||||
"orco",
|
||||
"ortho",
|
||||
"pano",
|
||||
"persp",
|
||||
"pref", "prefs",
|
||||
"prev",
|
||||
"param",
|
||||
"premul",
|
||||
"quad", "quads",
|
||||
"quat", "quats",
|
||||
"recalc", "recalcs",
|
||||
"refl",
|
||||
"sce",
|
||||
"sel",
|
||||
"spec",
|
||||
"struct", "structs",
|
||||
"subdiv",
|
||||
"sys",
|
||||
"tex",
|
||||
"texcoord",
|
||||
"tmr", # timer
|
||||
"tri", "tris",
|
||||
"udim", "udims",
|
||||
"upres", # Upresolution
|
||||
"usd",
|
||||
"uv", "uvs", "uvw", "uw", "uvmap",
|
||||
"ve",
|
||||
"vec",
|
||||
"vel", # velocity!
|
||||
"vert", "verts",
|
||||
"vis",
|
||||
"vram",
|
||||
"xor",
|
||||
"xyz", "xzy", "yxz", "yzx", "zxy", "zyx",
|
||||
"xy", "xz", "yx", "yz", "zx", "zy",
|
||||
|
||||
# General computer/science terms
|
||||
"affine",
|
||||
"albedo",
|
||||
"anamorphic",
|
||||
"anisotropic", "anisotropy",
|
||||
"arcminute", "arcminutes",
|
||||
"arcsecond", "arcseconds",
|
||||
"bimanual", # OpenXR?
|
||||
"bitangent",
|
||||
"boid", "boids",
|
||||
"ceil",
|
||||
"centum", # From 'centum weight'
|
||||
"compressibility",
|
||||
"coplanar",
|
||||
"curvilinear",
|
||||
"dekameter", "dekameters",
|
||||
"equiangular",
|
||||
"equisolid",
|
||||
"euler", "eulers",
|
||||
"fribidi",
|
||||
"gettext",
|
||||
"hashable",
|
||||
"hotspot",
|
||||
"hydrostatic",
|
||||
"interocular",
|
||||
"intrinsics",
|
||||
"irradiance",
|
||||
"isosurface",
|
||||
"jitter", "jittering", "jittered",
|
||||
"keymap", "keymaps",
|
||||
"lambertian",
|
||||
"laplacian",
|
||||
"metadata",
|
||||
"microwatt", "microwatts",
|
||||
"milliwatt", "milliwatts",
|
||||
"msgfmt",
|
||||
"nand", "xnor",
|
||||
"nanowatt", "nanowatts",
|
||||
"normals",
|
||||
"numpad",
|
||||
"octahedral",
|
||||
"octree",
|
||||
"omnidirectional",
|
||||
"opengl",
|
||||
"openmp",
|
||||
"parametrization",
|
||||
"photoreceptor",
|
||||
"poly",
|
||||
"polyline", "polylines",
|
||||
"probabilistically",
|
||||
"pulldown", "pulldowns",
|
||||
"quadratically",
|
||||
"quantized",
|
||||
"quartic",
|
||||
"quaternion", "quaternions",
|
||||
"quintic",
|
||||
"samplerate",
|
||||
"sawtooth",
|
||||
"scrollback",
|
||||
"scrollbar",
|
||||
"scroller",
|
||||
"searchable",
|
||||
"spacebar",
|
||||
"subtractive",
|
||||
"superellipse",
|
||||
"thumbstick",
|
||||
"tooltip", "tooltips",
|
||||
"touchpad", "trackpad",
|
||||
"tuple",
|
||||
"unicode",
|
||||
"viewport", "viewports",
|
||||
"viscoelastic",
|
||||
"vorticity",
|
||||
"waveform", "waveforms",
|
||||
"wildcard", "wildcards",
|
||||
"wintab", # Some Windows tablet API
|
||||
|
||||
# General computer graphics terms
|
||||
"anaglyph",
|
||||
"bezier", "beziers",
|
||||
"bicubic",
|
||||
"bilinear",
|
||||
"bindpose",
|
||||
"binormal",
|
||||
"blackpoint", "whitepoint",
|
||||
"blinn",
|
||||
"bokeh",
|
||||
"catadioptric",
|
||||
"centroid",
|
||||
"chroma",
|
||||
"chrominance",
|
||||
"clearcoat",
|
||||
"codec", "codecs",
|
||||
"collada",
|
||||
"compositing",
|
||||
"crossfade",
|
||||
"cubemap", "cubemaps",
|
||||
"cuda",
|
||||
"deinterlace",
|
||||
"dropoff",
|
||||
"duotone",
|
||||
"dv",
|
||||
"eigenvectors",
|
||||
"emissive",
|
||||
"equirectangular",
|
||||
"filmlike",
|
||||
"fisheye",
|
||||
"framerate",
|
||||
"gimbal",
|
||||
"grayscale",
|
||||
"icosahedron",
|
||||
"icosphere",
|
||||
"inpaint",
|
||||
"kerning",
|
||||
"lightmap",
|
||||
"linearlight",
|
||||
"lossless", "lossy",
|
||||
"luminance",
|
||||
"mantaflow",
|
||||
"matcap",
|
||||
"microfacet",
|
||||
"midtones",
|
||||
"mipmap", "mipmaps", "mip",
|
||||
"ngon", "ngons",
|
||||
"ntsc",
|
||||
"nurb", "nurbs",
|
||||
"perlin",
|
||||
"phong",
|
||||
"photorealistic",
|
||||
"pinlight",
|
||||
"posterize",
|
||||
"qi",
|
||||
"radiosity",
|
||||
"raycast", "raycasting",
|
||||
"raytrace", "raytracing", "raytraced",
|
||||
"refractions",
|
||||
"remesher", "remeshing", "remesh",
|
||||
"renderfarm",
|
||||
"scanfill",
|
||||
"shader", "shaders",
|
||||
"shadowmap", "shadowmaps",
|
||||
"softlight",
|
||||
"specular", "specularity",
|
||||
"spillmap",
|
||||
"sobel",
|
||||
"stereoscopy",
|
||||
"texel",
|
||||
"timecode",
|
||||
"tonemap",
|
||||
"toon",
|
||||
"transmissive",
|
||||
"uvproject",
|
||||
"vividlight",
|
||||
"volumetrics",
|
||||
"voronoi",
|
||||
"voxel", "voxels",
|
||||
"vsync",
|
||||
"vulkan",
|
||||
"wireframe",
|
||||
"zmask",
|
||||
"ztransp",
|
||||
|
||||
# Blender terms
|
||||
"audaspace",
|
||||
"azone", # action zone
|
||||
"backwire",
|
||||
"bbone",
|
||||
"bendy", # bones
|
||||
"bmesh",
|
||||
"breakdowner",
|
||||
"bspline",
|
||||
"bweight",
|
||||
"colorband",
|
||||
"crazyspace",
|
||||
"datablock", "datablocks",
|
||||
"despeckle",
|
||||
"depsgraph",
|
||||
"dopesheet",
|
||||
"dupliface", "duplifaces",
|
||||
"dupliframe", "dupliframes",
|
||||
"dupliobject", "dupliob",
|
||||
"dupligroup",
|
||||
"duplivert",
|
||||
"dyntopo",
|
||||
"editbone",
|
||||
"editmode",
|
||||
"eevee",
|
||||
"fcurve", "fcurves",
|
||||
"fedge", "fedges",
|
||||
"filmic",
|
||||
"fluidsim",
|
||||
"freestyle",
|
||||
"enum", "enums",
|
||||
"gizmogroup",
|
||||
"gon", "gons", # N-Gon(s)
|
||||
"gpencil",
|
||||
"idcol",
|
||||
"keyframe", "keyframes", "keyframing", "keyframed",
|
||||
"lookdev",
|
||||
"luminocity",
|
||||
"mathvis",
|
||||
"metaball", "metaballs", "mball",
|
||||
"metaelement", "metaelements",
|
||||
"metastrip", "metastrips",
|
||||
"movieclip",
|
||||
"mpoly",
|
||||
"mtex",
|
||||
"nabla",
|
||||
"navmesh",
|
||||
"outliner",
|
||||
"overscan",
|
||||
"paintmap", "paintmaps",
|
||||
"polygroup", "polygroups",
|
||||
"poselib",
|
||||
"pushpull",
|
||||
"pyconstraint", "pyconstraints",
|
||||
"qe", # keys...
|
||||
"shaderfx", "shaderfxs",
|
||||
"shapekey", "shapekeys",
|
||||
"shrinkfatten",
|
||||
"shrinkwrap",
|
||||
"softbody",
|
||||
"stucci",
|
||||
"subdiv",
|
||||
"subtype",
|
||||
"sunsky",
|
||||
"tessface", "tessfaces",
|
||||
"texface",
|
||||
"timeline", "timelines",
|
||||
"tosphere",
|
||||
"uilist",
|
||||
"userpref",
|
||||
"vcol", "vcols",
|
||||
"vgroup", "vgroups",
|
||||
"vinterlace",
|
||||
"vse",
|
||||
"wasd", "wasdqe", # keys...
|
||||
"wetmap", "wetmaps",
|
||||
"wpaint",
|
||||
"uvwarp",
|
||||
|
||||
# UOC (Ugly Operator Categories)
|
||||
"cachefile",
|
||||
"paintcurve",
|
||||
"ptcache",
|
||||
"dpaint",
|
||||
|
||||
# Algorithm/library names
|
||||
"ashikhmin", # Ashikhmin-Shirley
|
||||
"arsloe", # Texel-Marsen-Arsloe
|
||||
"beckmann",
|
||||
"blackman", # Blackman-Harris
|
||||
"blosc",
|
||||
"burley", # Christensen-Burley
|
||||
"catmull",
|
||||
"catrom",
|
||||
"chebychev",
|
||||
"conrady", # Brown-Conrady
|
||||
"courant",
|
||||
"cryptomatte", "crypto",
|
||||
"embree",
|
||||
"gmp",
|
||||
"hosek",
|
||||
"kutta",
|
||||
"lennard",
|
||||
"marsen", # Texel-Marsen-Arsloe
|
||||
"mikktspace",
|
||||
"minkowski",
|
||||
"minnaert",
|
||||
"mises", # von Mises-Fisher
|
||||
"moskowitz", # Pierson-Moskowitz
|
||||
"musgrave",
|
||||
"nayar",
|
||||
"netravali",
|
||||
"nishita",
|
||||
"ogawa",
|
||||
"oren",
|
||||
"peucker", # Ramer-Douglas-Peucker
|
||||
"pierson", # Pierson-Moskowitz
|
||||
"preetham",
|
||||
"prewitt",
|
||||
"ramer", # Ramer-Douglas-Peucker
|
||||
"runge",
|
||||
"sobol",
|
||||
"verlet",
|
||||
"von", # von Mises-Fisher
|
||||
"wilkie",
|
||||
"worley",
|
||||
|
||||
# Acronyms
|
||||
"aa", "msaa",
|
||||
"acescg", # ACEScg color space.
|
||||
"ao",
|
||||
"aov", "aovs",
|
||||
"api",
|
||||
"apic", # Affine Particle-In-Cell
|
||||
"asc", "cdl",
|
||||
"ascii",
|
||||
"atrac",
|
||||
"avx",
|
||||
"bsdf", "bsdfs",
|
||||
"bssrdf",
|
||||
"bw",
|
||||
"ccd",
|
||||
"cmd",
|
||||
"cmos",
|
||||
"cpus",
|
||||
"ctrl",
|
||||
"cw", "ccw",
|
||||
"dev",
|
||||
"dls",
|
||||
"djv",
|
||||
"dpi",
|
||||
"dvar",
|
||||
"dx",
|
||||
"eo",
|
||||
"ewa",
|
||||
"fh",
|
||||
"fk",
|
||||
"fov",
|
||||
"fft",
|
||||
"futura",
|
||||
"fx",
|
||||
"gfx",
|
||||
"ggx",
|
||||
"gl",
|
||||
"glsl",
|
||||
"gpl",
|
||||
"gpu", "gpus",
|
||||
"hc",
|
||||
"hdc",
|
||||
"hdr", "hdri", "hdris",
|
||||
"hh", "mm", "ss", "ff", # hh:mm:ss:ff timecode
|
||||
"hpg", # Intel Xe-HPG architecture
|
||||
"hsv", "hsva", "hsl",
|
||||
"id",
|
||||
"ies",
|
||||
"ior",
|
||||
"itu",
|
||||
"jonswap",
|
||||
"lfe",
|
||||
"lhs",
|
||||
"lmb", "mmb", "rmb",
|
||||
"lscm",
|
||||
"lx", # Lux light unit
|
||||
"kb",
|
||||
"mis",
|
||||
"mocap",
|
||||
"msgid", "msgids",
|
||||
"mux",
|
||||
"ndof",
|
||||
"pbr", # Physically Based Rendering
|
||||
"ppc",
|
||||
"precisa",
|
||||
"px",
|
||||
"qmc",
|
||||
"rdna",
|
||||
"rdp",
|
||||
"rgb", "rgba",
|
||||
"rhs",
|
||||
"rv",
|
||||
"sdl",
|
||||
"sdls",
|
||||
"sl",
|
||||
"smpte",
|
||||
"ssao",
|
||||
"ssr",
|
||||
"svn",
|
||||
"tma",
|
||||
"ui",
|
||||
"unix",
|
||||
"uuid",
|
||||
"vbo", "vbos",
|
||||
"vfx",
|
||||
"vmm",
|
||||
"vr",
|
||||
"wxyz",
|
||||
"xr",
|
||||
"ycc", "ycca",
|
||||
"yrgb",
|
||||
"yuv", "yuva",
|
||||
|
||||
# Blender acronyms
|
||||
"bli",
|
||||
"bpy",
|
||||
"bvh",
|
||||
"dbvt",
|
||||
"dop", # BLI K-Dop BVH
|
||||
"ik",
|
||||
"nla",
|
||||
"py",
|
||||
"qbvh",
|
||||
"rna",
|
||||
"rvo",
|
||||
"simd",
|
||||
"sph",
|
||||
"svbvh",
|
||||
|
||||
# Files types/formats
|
||||
"aac",
|
||||
"avi",
|
||||
"attrac",
|
||||
"autocad",
|
||||
"autodesk",
|
||||
"bmp",
|
||||
"btx",
|
||||
"cineon",
|
||||
"dpx",
|
||||
"dwaa",
|
||||
"dwab",
|
||||
"dxf",
|
||||
"eps",
|
||||
"exr",
|
||||
"fbx",
|
||||
"fbxnode",
|
||||
"ffmpeg",
|
||||
"flac",
|
||||
"gltf",
|
||||
"gzip",
|
||||
"ico",
|
||||
"jpg", "jpeg", "jpegs",
|
||||
"json",
|
||||
"lzw",
|
||||
"matroska",
|
||||
"mdd",
|
||||
"mkv",
|
||||
"mpeg", "mjpeg",
|
||||
"mtl",
|
||||
"ogg",
|
||||
"openjpeg",
|
||||
"osl",
|
||||
"oso",
|
||||
"pcm",
|
||||
"piz",
|
||||
"png", "pngs",
|
||||
"po",
|
||||
"quicktime",
|
||||
"rle",
|
||||
"sgi",
|
||||
"stl",
|
||||
"svg",
|
||||
"targa", "tga",
|
||||
"tiff",
|
||||
"theora",
|
||||
"vorbis",
|
||||
"vp9",
|
||||
"wav",
|
||||
"webm",
|
||||
"xiph",
|
||||
"xml",
|
||||
"xna",
|
||||
"xvid",
|
||||
}
|
||||
|
||||
_valid_before = "(?<=[\\s*'\"`])|(?<=[a-zA-Z][/-])|(?<=^)"
|
||||
_valid_after = "(?=[\\s'\"`.!?,;:])|(?=[/-]\\s*[a-zA-Z])|(?=$)"
|
||||
_valid_words = "(?:{})(?:(?:[A-Z]+[a-z]*)|[A-Z]*|[a-z]*)(?:{})".format(_valid_before, _valid_after)
|
||||
_split_words = re.compile(_valid_words).findall
|
||||
|
||||
@classmethod
|
||||
def split_words(cls, text):
|
||||
return [w for w in cls._split_words(text) if w]
|
||||
|
||||
def __init__(self, settings, lang="en_US"):
|
||||
self.settings = settings
|
||||
self.dict_spelling = enchant.Dict(lang)
|
||||
self.cache = set(self.uimsgs)
|
||||
|
||||
cache = self.settings.SPELL_CACHE
|
||||
if cache and os.path.exists(cache):
|
||||
with open(cache, 'rb') as f:
|
||||
self.cache |= set(pickle.load(f))
|
||||
|
||||
def __del__(self):
|
||||
cache = self.settings.SPELL_CACHE
|
||||
if cache and os.path.exists(cache):
|
||||
with open(cache, 'wb') as f:
|
||||
pickle.dump(self.cache, f)
|
||||
|
||||
def check(self, txt):
|
||||
ret = []
|
||||
|
||||
if txt in self.cache:
|
||||
return ret
|
||||
|
||||
for w in self.split_words(txt):
|
||||
w_lower = w.lower()
|
||||
if w_lower in self.cache:
|
||||
continue
|
||||
if not self.dict_spelling.check(w):
|
||||
ret.append((w, self.dict_spelling.suggest(w)))
|
||||
else:
|
||||
self.cache.add(w_lower)
|
||||
|
||||
if not ret:
|
||||
self.cache.add(txt)
|
||||
|
||||
return ret
|
||||
Reference in New Issue
Block a user