Re-design of submodules used in blender.git
This commit implements described in the #104573. The goal is to fix the confusion of the submodule hashes change, which are not ideal for any of the supported git-module configuration (they are either always visible causing confusion, or silently staged and committed, also causing confusion). This commit replaces submodules with a checkout of addons and addons_contrib, covered by the .gitignore, and locale and developer tools are moved to the main repository. This also changes the paths: - /release/scripts are moved to the /scripts - /source/tools are moved to the /tools - /release/datafiles/locale is moved to /locale This is done to avoid conflicts when using bisect, and also allow buildbot to automatically "recover" wgen building older or newer branches/patches. Running `make update` will initialize the local checkout to the changed repository configuration. Another aspect of the change is that the make update will support Github style of remote organization (origin remote pointing to thy fork, upstream remote pointing to the upstream blender/blender.git). Pull Request #104755
This commit is contained in:
534
scripts/modules/addon_utils.py
Normal file
534
scripts/modules/addon_utils.py
Normal file
@@ -0,0 +1,534 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"paths",
|
||||
"modules",
|
||||
"check",
|
||||
"enable",
|
||||
"disable",
|
||||
"disable_all",
|
||||
"reset_all",
|
||||
"module_bl_info",
|
||||
)
|
||||
|
||||
import bpy as _bpy
|
||||
_preferences = _bpy.context.preferences
|
||||
|
||||
error_encoding = False
|
||||
# (name, file, path)
|
||||
error_duplicates = []
|
||||
addons_fake_modules = {}
|
||||
|
||||
|
||||
# called only once at startup, avoids calling 'reset_all', correct but slower.
|
||||
def _initialize():
|
||||
path_list = paths()
|
||||
for path in path_list:
|
||||
_bpy.utils._sys_path_ensure_append(path)
|
||||
for addon in _preferences.addons:
|
||||
enable(addon.module)
|
||||
|
||||
|
||||
def paths():
|
||||
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
||||
addon_paths = _bpy.utils.script_paths(subdir="addons")
|
||||
|
||||
# CONTRIB SCRIPTS: good for testing but not official scripts yet
|
||||
# if folder addons_contrib/ exists, scripts in there will be loaded too
|
||||
addon_paths += _bpy.utils.script_paths(subdir="addons_contrib")
|
||||
|
||||
return addon_paths
|
||||
|
||||
|
||||
def modules_refresh(*, module_cache=addons_fake_modules):
|
||||
global error_encoding
|
||||
import os
|
||||
|
||||
error_encoding = False
|
||||
error_duplicates.clear()
|
||||
|
||||
path_list = paths()
|
||||
|
||||
# fake module importing
|
||||
def fake_module(mod_name, mod_path, speedy=True, force_support=None):
|
||||
global error_encoding
|
||||
|
||||
if _bpy.app.debug_python:
|
||||
print("fake_module", mod_path, mod_name)
|
||||
import ast
|
||||
ModuleType = type(ast)
|
||||
try:
|
||||
file_mod = open(mod_path, "r", encoding='UTF-8')
|
||||
except OSError as ex:
|
||||
print("Error opening file:", mod_path, ex)
|
||||
return None
|
||||
|
||||
with file_mod:
|
||||
if speedy:
|
||||
lines = []
|
||||
line_iter = iter(file_mod)
|
||||
l = ""
|
||||
while not l.startswith("bl_info"):
|
||||
try:
|
||||
l = line_iter.readline()
|
||||
except UnicodeDecodeError as ex:
|
||||
if not error_encoding:
|
||||
error_encoding = True
|
||||
print("Error reading file as UTF-8:", mod_path, ex)
|
||||
return None
|
||||
|
||||
if len(l) == 0:
|
||||
break
|
||||
while l.rstrip():
|
||||
lines.append(l)
|
||||
try:
|
||||
l = line_iter.readline()
|
||||
except UnicodeDecodeError as ex:
|
||||
if not error_encoding:
|
||||
error_encoding = True
|
||||
print("Error reading file as UTF-8:", mod_path, ex)
|
||||
return None
|
||||
|
||||
data = "".join(lines)
|
||||
|
||||
else:
|
||||
data = file_mod.read()
|
||||
del file_mod
|
||||
|
||||
try:
|
||||
ast_data = ast.parse(data, filename=mod_path)
|
||||
except:
|
||||
print("Syntax error 'ast.parse' can't read:", repr(mod_path))
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
ast_data = None
|
||||
|
||||
body_info = None
|
||||
|
||||
if ast_data:
|
||||
for body in ast_data.body:
|
||||
if body.__class__ == ast.Assign:
|
||||
if len(body.targets) == 1:
|
||||
if getattr(body.targets[0], "id", "") == "bl_info":
|
||||
body_info = body
|
||||
break
|
||||
|
||||
if body_info:
|
||||
try:
|
||||
mod = ModuleType(mod_name)
|
||||
mod.bl_info = ast.literal_eval(body.value)
|
||||
mod.__file__ = mod_path
|
||||
mod.__time__ = os.path.getmtime(mod_path)
|
||||
except:
|
||||
print("AST error parsing bl_info for:", repr(mod_path))
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return None
|
||||
|
||||
if force_support is not None:
|
||||
mod.bl_info["support"] = force_support
|
||||
|
||||
return mod
|
||||
else:
|
||||
print(
|
||||
"fake_module: addon missing 'bl_info' "
|
||||
"gives bad performance!:",
|
||||
repr(mod_path),
|
||||
)
|
||||
return None
|
||||
|
||||
modules_stale = set(module_cache.keys())
|
||||
|
||||
for path in path_list:
|
||||
|
||||
# force all contrib addons to be 'TESTING'
|
||||
if path.endswith(("addons_contrib", )):
|
||||
force_support = 'TESTING'
|
||||
else:
|
||||
force_support = None
|
||||
|
||||
for mod_name, mod_path in _bpy.path.module_names(path):
|
||||
modules_stale.discard(mod_name)
|
||||
mod = module_cache.get(mod_name)
|
||||
if mod:
|
||||
if mod.__file__ != mod_path:
|
||||
print(
|
||||
"multiple addons with the same name:\n"
|
||||
" %r\n"
|
||||
" %r" % (mod.__file__, mod_path)
|
||||
)
|
||||
error_duplicates.append((mod.bl_info["name"], mod.__file__, mod_path))
|
||||
|
||||
elif mod.__time__ != os.path.getmtime(mod_path):
|
||||
print(
|
||||
"reloading addon:",
|
||||
mod_name,
|
||||
mod.__time__,
|
||||
os.path.getmtime(mod_path),
|
||||
repr(mod_path),
|
||||
)
|
||||
del module_cache[mod_name]
|
||||
mod = None
|
||||
|
||||
if mod is None:
|
||||
mod = fake_module(
|
||||
mod_name,
|
||||
mod_path,
|
||||
force_support=force_support,
|
||||
)
|
||||
if mod:
|
||||
module_cache[mod_name] = mod
|
||||
|
||||
# just in case we get stale modules, not likely
|
||||
for mod_stale in modules_stale:
|
||||
del module_cache[mod_stale]
|
||||
del modules_stale
|
||||
|
||||
|
||||
def modules(*, module_cache=addons_fake_modules, refresh=True):
|
||||
if refresh or ((module_cache is addons_fake_modules) and modules._is_first):
|
||||
modules_refresh(module_cache=module_cache)
|
||||
modules._is_first = False
|
||||
|
||||
mod_list = list(module_cache.values())
|
||||
mod_list.sort(
|
||||
key=lambda mod: (
|
||||
mod.bl_info.get("category", ""),
|
||||
mod.bl_info.get("name", ""),
|
||||
)
|
||||
)
|
||||
return mod_list
|
||||
|
||||
|
||||
modules._is_first = True
|
||||
|
||||
|
||||
def check(module_name):
|
||||
"""
|
||||
Returns the loaded state of the addon.
|
||||
|
||||
:arg module_name: The name of the addon and module.
|
||||
:type module_name: string
|
||||
:return: (loaded_default, loaded_state)
|
||||
:rtype: tuple of booleans
|
||||
"""
|
||||
import sys
|
||||
loaded_default = module_name in _preferences.addons
|
||||
|
||||
mod = sys.modules.get(module_name)
|
||||
loaded_state = (
|
||||
(mod is not None) and
|
||||
getattr(mod, "__addon_enabled__", Ellipsis)
|
||||
)
|
||||
|
||||
if loaded_state is Ellipsis:
|
||||
print(
|
||||
"Warning: addon-module", module_name, "found module "
|
||||
"but without '__addon_enabled__' field, "
|
||||
"possible name collision from file:",
|
||||
repr(getattr(mod, "__file__", "<unknown>")),
|
||||
)
|
||||
|
||||
loaded_state = False
|
||||
|
||||
if mod and getattr(mod, "__addon_persistent__", False):
|
||||
loaded_default = True
|
||||
|
||||
return loaded_default, loaded_state
|
||||
|
||||
# utility functions
|
||||
|
||||
|
||||
def _addon_ensure(module_name):
|
||||
addons = _preferences.addons
|
||||
addon = addons.get(module_name)
|
||||
if not addon:
|
||||
addon = addons.new()
|
||||
addon.module = module_name
|
||||
|
||||
|
||||
def _addon_remove(module_name):
|
||||
addons = _preferences.addons
|
||||
|
||||
while module_name in addons:
|
||||
addon = addons.get(module_name)
|
||||
if addon:
|
||||
addons.remove(addon)
|
||||
|
||||
|
||||
def enable(module_name, *, default_set=False, persistent=False, handle_error=None):
|
||||
"""
|
||||
Enables an addon by name.
|
||||
|
||||
:arg module_name: the name of the addon and module.
|
||||
:type module_name: string
|
||||
:arg default_set: Set the user-preference.
|
||||
:type default_set: bool
|
||||
:arg persistent: Ensure the addon is enabled for the entire session (after loading new files).
|
||||
:type persistent: bool
|
||||
:arg handle_error: Called in the case of an error, taking an exception argument.
|
||||
:type handle_error: function
|
||||
:return: the loaded module or None on failure.
|
||||
:rtype: module
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from bpy_restrict_state import RestrictBlend
|
||||
|
||||
if handle_error is None:
|
||||
def handle_error(_ex):
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# reload if the mtime changes
|
||||
mod = sys.modules.get(module_name)
|
||||
# chances of the file _not_ existing are low, but it could be removed
|
||||
if mod and os.path.exists(mod.__file__):
|
||||
|
||||
if getattr(mod, "__addon_enabled__", False):
|
||||
# This is an unlikely situation,
|
||||
# re-register if the module is enabled.
|
||||
# Note: the UI doesn't allow this to happen,
|
||||
# in most cases the caller should 'check()' first.
|
||||
try:
|
||||
mod.unregister()
|
||||
except Exception as ex:
|
||||
print(
|
||||
"Exception in module unregister():",
|
||||
repr(getattr(mod, "__file__", module_name)),
|
||||
)
|
||||
handle_error(ex)
|
||||
return None
|
||||
|
||||
mod.__addon_enabled__ = False
|
||||
mtime_orig = getattr(mod, "__time__", 0)
|
||||
mtime_new = os.path.getmtime(mod.__file__)
|
||||
if mtime_orig != mtime_new:
|
||||
import importlib
|
||||
print("module changed on disk:", repr(mod.__file__), "reloading...")
|
||||
|
||||
try:
|
||||
importlib.reload(mod)
|
||||
except Exception as ex:
|
||||
handle_error(ex)
|
||||
del sys.modules[module_name]
|
||||
return None
|
||||
mod.__addon_enabled__ = False
|
||||
|
||||
# add the addon first it may want to initialize its own preferences.
|
||||
# must remove on fail through.
|
||||
if default_set:
|
||||
_addon_ensure(module_name)
|
||||
|
||||
# Split registering up into 3 steps so we can undo
|
||||
# if it fails par way through.
|
||||
|
||||
# Disable the context: using the context at all
|
||||
# while loading an addon is really bad, don't do it!
|
||||
with RestrictBlend():
|
||||
|
||||
# 1) try import
|
||||
try:
|
||||
mod = __import__(module_name)
|
||||
if mod.__file__ is None:
|
||||
# This can happen when the addon has been removed but there are
|
||||
# residual `.pyc` files left behind.
|
||||
raise ImportError(name=module_name)
|
||||
mod.__time__ = os.path.getmtime(mod.__file__)
|
||||
mod.__addon_enabled__ = False
|
||||
except Exception as ex:
|
||||
# if the addon doesn't exist, don't print full traceback
|
||||
if type(ex) is ImportError and ex.name == module_name:
|
||||
print("addon not loaded:", repr(module_name))
|
||||
print("cause:", str(ex))
|
||||
else:
|
||||
handle_error(ex)
|
||||
|
||||
if default_set:
|
||||
_addon_remove(module_name)
|
||||
return None
|
||||
|
||||
# 1.1) Fail when add-on is too old.
|
||||
# This is a temporary 2.8x migration check, so we can manage addons that are supported.
|
||||
|
||||
if mod.bl_info.get("blender", (0, 0, 0)) < (2, 80, 0):
|
||||
if _bpy.app.debug:
|
||||
print("Warning: Add-on '%s' was not upgraded for 2.80, ignoring" % module_name)
|
||||
return None
|
||||
|
||||
# 2) Try register collected modules.
|
||||
# Removed register_module, addons need to handle their own registration now.
|
||||
|
||||
from _bpy import _bl_owner_id_get, _bl_owner_id_set
|
||||
owner_id_prev = _bl_owner_id_get()
|
||||
_bl_owner_id_set(module_name)
|
||||
|
||||
# 3) Try run the modules register function.
|
||||
try:
|
||||
mod.register()
|
||||
except Exception as ex:
|
||||
print(
|
||||
"Exception in module register():",
|
||||
getattr(mod, "__file__", module_name),
|
||||
)
|
||||
handle_error(ex)
|
||||
del sys.modules[module_name]
|
||||
if default_set:
|
||||
_addon_remove(module_name)
|
||||
return None
|
||||
finally:
|
||||
_bl_owner_id_set(owner_id_prev)
|
||||
|
||||
# * OK loaded successfully! *
|
||||
mod.__addon_enabled__ = True
|
||||
mod.__addon_persistent__ = persistent
|
||||
|
||||
if _bpy.app.debug_python:
|
||||
print("\taddon_utils.enable", mod.__name__)
|
||||
|
||||
return mod
|
||||
|
||||
|
||||
def disable(module_name, *, default_set=False, handle_error=None):
|
||||
"""
|
||||
Disables an addon by name.
|
||||
|
||||
:arg module_name: The name of the addon and module.
|
||||
:type module_name: string
|
||||
:arg default_set: Set the user-preference.
|
||||
:type default_set: bool
|
||||
:arg handle_error: Called in the case of an error, taking an exception argument.
|
||||
:type handle_error: function
|
||||
"""
|
||||
import sys
|
||||
|
||||
if handle_error is None:
|
||||
def handle_error(_ex):
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
mod = sys.modules.get(module_name)
|
||||
|
||||
# possible this addon is from a previous session and didn't load a
|
||||
# module this time. So even if the module is not found, still disable
|
||||
# the addon in the user prefs.
|
||||
if mod and getattr(mod, "__addon_enabled__", False) is not False:
|
||||
mod.__addon_enabled__ = False
|
||||
mod.__addon_persistent = False
|
||||
|
||||
try:
|
||||
mod.unregister()
|
||||
except Exception as ex:
|
||||
mod_path = getattr(mod, "__file__", module_name)
|
||||
print("Exception in module unregister():", repr(mod_path))
|
||||
del mod_path
|
||||
handle_error(ex)
|
||||
else:
|
||||
print(
|
||||
"addon_utils.disable: %s not %s" % (
|
||||
module_name,
|
||||
"disabled" if mod is None else "loaded")
|
||||
)
|
||||
|
||||
# could be in more than once, unlikely but better do this just in case.
|
||||
if default_set:
|
||||
_addon_remove(module_name)
|
||||
|
||||
if _bpy.app.debug_python:
|
||||
print("\taddon_utils.disable", module_name)
|
||||
|
||||
|
||||
def reset_all(*, reload_scripts=False):
|
||||
"""
|
||||
Sets the addon state based on the user preferences.
|
||||
"""
|
||||
import sys
|
||||
|
||||
# initializes addons_fake_modules
|
||||
modules_refresh()
|
||||
|
||||
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
||||
paths_list = paths()
|
||||
|
||||
for path in paths_list:
|
||||
_bpy.utils._sys_path_ensure_append(path)
|
||||
for mod_name, _mod_path in _bpy.path.module_names(path):
|
||||
is_enabled, is_loaded = check(mod_name)
|
||||
|
||||
# first check if reload is needed before changing state.
|
||||
if reload_scripts:
|
||||
import importlib
|
||||
mod = sys.modules.get(mod_name)
|
||||
if mod:
|
||||
importlib.reload(mod)
|
||||
|
||||
if is_enabled == is_loaded:
|
||||
pass
|
||||
elif is_enabled:
|
||||
enable(mod_name)
|
||||
elif is_loaded:
|
||||
print("\taddon_utils.reset_all unloading", mod_name)
|
||||
disable(mod_name)
|
||||
|
||||
|
||||
def disable_all():
|
||||
import sys
|
||||
# Collect modules to disable first because dict can be modified as we disable.
|
||||
addon_modules = [
|
||||
item for item in sys.modules.items()
|
||||
if getattr(item[1], "__addon_enabled__", False)
|
||||
]
|
||||
# Check the enabled state again since it's possible the disable call
|
||||
# of one add-on disables others.
|
||||
for mod_name, mod in addon_modules:
|
||||
if getattr(mod, "__addon_enabled__", False):
|
||||
disable(mod_name)
|
||||
|
||||
|
||||
def _blender_manual_url_prefix():
|
||||
return "https://docs.blender.org/manual/%s/%d.%d" % (_bpy.utils.manual_language_code(), *_bpy.app.version[:2])
|
||||
|
||||
|
||||
def module_bl_info(mod, *, info_basis=None):
|
||||
if info_basis is None:
|
||||
info_basis = {
|
||||
"name": "",
|
||||
"author": "",
|
||||
"version": (),
|
||||
"blender": (),
|
||||
"location": "",
|
||||
"description": "",
|
||||
"doc_url": "",
|
||||
"support": 'COMMUNITY',
|
||||
"category": "",
|
||||
"warning": "",
|
||||
"show_expanded": False,
|
||||
}
|
||||
|
||||
addon_info = getattr(mod, "bl_info", {})
|
||||
|
||||
# avoid re-initializing
|
||||
if "_init" in addon_info:
|
||||
return addon_info
|
||||
|
||||
if not addon_info:
|
||||
mod.bl_info = addon_info
|
||||
|
||||
for key, value in info_basis.items():
|
||||
addon_info.setdefault(key, value)
|
||||
|
||||
if not addon_info["name"]:
|
||||
addon_info["name"] = mod.__name__
|
||||
|
||||
doc_url = addon_info["doc_url"]
|
||||
if doc_url:
|
||||
doc_url_prefix = "{BLENDER_MANUAL_URL}"
|
||||
if doc_url_prefix in doc_url:
|
||||
addon_info["doc_url"] = doc_url.replace(
|
||||
doc_url_prefix,
|
||||
_blender_manual_url_prefix(),
|
||||
)
|
||||
|
||||
addon_info["_init"] = None
|
||||
return addon_info
|
||||
210
scripts/modules/animsys_refactor.py
Normal file
210
scripts/modules/animsys_refactor.py
Normal file
@@ -0,0 +1,210 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
This module has utility functions for renaming
|
||||
rna values in fcurves and drivers.
|
||||
|
||||
Currently unused, but might become useful later again.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import bpy
|
||||
|
||||
|
||||
IS_TESTING = False
|
||||
|
||||
|
||||
def classes_recursive(base_type, clss=None):
|
||||
if clss is None:
|
||||
clss = [base_type]
|
||||
else:
|
||||
clss.append(base_type)
|
||||
|
||||
for base_type_iter in base_type.__bases__:
|
||||
if base_type_iter is not object:
|
||||
classes_recursive(base_type_iter, clss)
|
||||
|
||||
return clss
|
||||
|
||||
|
||||
class DataPathBuilder:
|
||||
"""Dummy class used to parse fcurve and driver data paths."""
|
||||
__slots__ = ("data_path", )
|
||||
|
||||
def __init__(self, attrs):
|
||||
self.data_path = attrs
|
||||
|
||||
def __getattr__(self, attr):
|
||||
str_value = ".%s" % attr
|
||||
return DataPathBuilder(self.data_path + (str_value, ))
|
||||
|
||||
def __getitem__(self, key):
|
||||
if type(key) is int:
|
||||
str_value = '[%d]' % key
|
||||
elif type(key) is str:
|
||||
str_value = '["%s"]' % bpy.utils.escape_identifier(key)
|
||||
else:
|
||||
raise Exception("unsupported accessor %r of type %r (internal error)" % (key, type(key)))
|
||||
return DataPathBuilder(self.data_path + (str_value, ))
|
||||
|
||||
def resolve(self, real_base, rna_update_from_map, fcurve, log):
|
||||
"""Return (attribute, value) pairs."""
|
||||
pairs = []
|
||||
base = real_base
|
||||
for item in self.data_path:
|
||||
if base is not Ellipsis:
|
||||
base_new = Ellipsis
|
||||
# find the new name
|
||||
if item.startswith("."):
|
||||
for class_name, item_new, options in (
|
||||
rna_update_from_map.get(item[1:], []) +
|
||||
[(None, item[1:], None)]
|
||||
):
|
||||
if callable(item_new):
|
||||
# No type check here, callback is assumed to know what it's doing.
|
||||
base_new, item_new = item_new(base, class_name, item[1:], fcurve, options)
|
||||
if base_new is not Ellipsis:
|
||||
break # found, don't keep looking
|
||||
else:
|
||||
# Type check!
|
||||
type_ok = True
|
||||
if class_name is not None:
|
||||
type_ok = False
|
||||
for base_type in classes_recursive(type(base)):
|
||||
if base_type.__name__ == class_name:
|
||||
type_ok = True
|
||||
break
|
||||
if type_ok:
|
||||
try:
|
||||
#print("base." + item_new)
|
||||
base_new = eval("base." + item_new)
|
||||
break # found, don't keep looking
|
||||
except:
|
||||
pass
|
||||
item_new = "." + item_new
|
||||
else:
|
||||
item_new = item
|
||||
try:
|
||||
base_new = eval("base" + item_new)
|
||||
except:
|
||||
pass
|
||||
|
||||
if base_new is Ellipsis:
|
||||
print("Failed to resolve data path:", self.data_path, file=log)
|
||||
base = base_new
|
||||
else:
|
||||
item_new = item
|
||||
|
||||
pairs.append((item_new, base))
|
||||
return pairs
|
||||
|
||||
|
||||
def id_iter():
|
||||
type_iter = type(bpy.data.objects)
|
||||
|
||||
for attr in dir(bpy.data):
|
||||
data_iter = getattr(bpy.data, attr, None)
|
||||
if type(data_iter) == type_iter:
|
||||
for id_data in data_iter:
|
||||
if id_data.library is None:
|
||||
yield id_data
|
||||
|
||||
|
||||
def anim_data_actions(anim_data):
|
||||
actions = []
|
||||
actions.append(anim_data.action)
|
||||
for track in anim_data.nla_tracks:
|
||||
for strip in track.strips:
|
||||
actions.append(strip.action)
|
||||
|
||||
# filter out None
|
||||
return [act for act in actions if act]
|
||||
|
||||
|
||||
def find_path_new(id_data, data_path, rna_update_from_map, fcurve, log):
|
||||
# note!, id_data can be ID type or a node tree
|
||||
# ignore ID props for now
|
||||
if data_path.startswith("["):
|
||||
return data_path
|
||||
|
||||
# recursive path fixing, likely will be one in most cases.
|
||||
data_path_builder = eval("DataPathBuilder(tuple())." + data_path)
|
||||
data_resolve = data_path_builder.resolve(id_data, rna_update_from_map, fcurve, log)
|
||||
|
||||
path_new = [pair[0] for pair in data_resolve]
|
||||
|
||||
return "".join(path_new)[1:] # skip the first "."
|
||||
|
||||
|
||||
def update_data_paths(rna_update, log=sys.stdout):
|
||||
"""
|
||||
rna_update triple [(class_name, from, to or to_callback, callback options), ...]
|
||||
to_callback is a function with this signature: update_cb(base, class_name, old_path, fcurve, options)
|
||||
where base is current object, class_name is the expected type name of base (callback has to handle
|
||||
this), old_path it the org name of base's property, fcurve is the affected fcurve (!),
|
||||
and options is an opaque data.
|
||||
class_name, fcurve and options may be None!
|
||||
"""
|
||||
|
||||
rna_update_from_map = {}
|
||||
for ren_class, ren_from, ren_to, options in rna_update:
|
||||
rna_update_from_map.setdefault(ren_from, []).append((ren_class, ren_to, options))
|
||||
|
||||
for id_data in id_iter():
|
||||
# check node-trees too
|
||||
anim_data_ls = [(id_data, getattr(id_data, "animation_data", None))]
|
||||
node_tree = getattr(id_data, "node_tree", None)
|
||||
if node_tree:
|
||||
anim_data_ls.append((node_tree, node_tree.animation_data))
|
||||
|
||||
for anim_data_base, anim_data in anim_data_ls:
|
||||
if anim_data is None:
|
||||
continue
|
||||
|
||||
for fcurve in anim_data.drivers:
|
||||
data_path = fcurve.data_path
|
||||
data_path_new = find_path_new(anim_data_base, data_path, rna_update_from_map, fcurve, log)
|
||||
# print(data_path_new)
|
||||
if data_path_new != data_path:
|
||||
if not IS_TESTING:
|
||||
fcurve.data_path = data_path_new
|
||||
fcurve.driver.is_valid = True # reset to allow this to work again
|
||||
print("driver-fcurve (%s): %s -> %s" % (id_data.name, data_path, data_path_new), file=log)
|
||||
|
||||
for var in fcurve.driver.variables:
|
||||
if var.type == 'SINGLE_PROP':
|
||||
for tar in var.targets:
|
||||
id_data_other = tar.id
|
||||
data_path = tar.data_path
|
||||
|
||||
if id_data_other and data_path:
|
||||
data_path_new = find_path_new(id_data_other, data_path, rna_update_from_map, None, log)
|
||||
# print(data_path_new)
|
||||
if data_path_new != data_path:
|
||||
if not IS_TESTING:
|
||||
tar.data_path = data_path_new
|
||||
print("driver (%s): %s -> %s" % (id_data_other.name, data_path, data_path_new),
|
||||
file=log)
|
||||
|
||||
for action in anim_data_actions(anim_data):
|
||||
for fcu in action.fcurves:
|
||||
data_path = fcu.data_path
|
||||
data_path_new = find_path_new(anim_data_base, data_path, rna_update_from_map, fcu, log)
|
||||
# print(data_path_new)
|
||||
if data_path_new != data_path:
|
||||
if not IS_TESTING:
|
||||
fcu.data_path = data_path_new
|
||||
print("fcurve (%s): %s -> %s" % (id_data.name, data_path, data_path_new), file=log)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Example, should be called externally
|
||||
# (class, from, to or to_callback, callback_options)
|
||||
replace_ls = [
|
||||
("AnimVizMotionPaths", "frame_after", "frame_after", None),
|
||||
("AnimVizMotionPaths", "frame_before", "frame_before", None),
|
||||
("AnimVizOnionSkinning", "frame_after", "frame_after", None),
|
||||
]
|
||||
|
||||
update_data_paths(replace_ls)
|
||||
203
scripts/modules/bl_app_override/__init__.py
Normal file
203
scripts/modules/bl_app_override/__init__.py
Normal file
@@ -0,0 +1,203 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
Module to manage overriding various parts of Blender.
|
||||
|
||||
Intended for use with 'app_templates', though it can be used from anywhere.
|
||||
"""
|
||||
|
||||
|
||||
# TODO, how to check these aren't from add-ons.
|
||||
# templates might need to un-register while filtering.
|
||||
def class_filter(cls_parent, **kw):
|
||||
whitelist = kw.pop("whitelist", None)
|
||||
blacklist = kw.pop("blacklist", None)
|
||||
kw_items = tuple(kw.items())
|
||||
for cls in cls_parent.__subclasses__():
|
||||
# same as is_registered()
|
||||
if "bl_rna" in cls.__dict__:
|
||||
if blacklist is not None and cls.__name__ in blacklist:
|
||||
continue
|
||||
if ((whitelist is not None and cls.__name__ is whitelist) or
|
||||
all((getattr(cls, attr) in expect) for attr, expect in kw_items)):
|
||||
yield cls
|
||||
|
||||
|
||||
def ui_draw_filter_register(
|
||||
*,
|
||||
ui_ignore_classes=None,
|
||||
ui_ignore_operator=None,
|
||||
ui_ignore_property=None,
|
||||
ui_ignore_menu=None,
|
||||
ui_ignore_label=None
|
||||
):
|
||||
import bpy
|
||||
|
||||
UILayout = bpy.types.UILayout
|
||||
|
||||
if ui_ignore_classes is None:
|
||||
ui_ignore_classes = (
|
||||
bpy.types.Panel,
|
||||
bpy.types.Menu,
|
||||
bpy.types.Header,
|
||||
)
|
||||
|
||||
class OperatorProperties_Fake:
|
||||
pass
|
||||
|
||||
class UILayout_Fake(bpy.types.UILayout):
|
||||
__slots__ = ()
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# ensure we always pass down UILayout_Fake instances
|
||||
if attr in {"row", "split", "column", "box", "column_flow"}:
|
||||
real_func = UILayout.__getattribute__(self, attr)
|
||||
|
||||
def dummy_func(*args, **kw):
|
||||
# print("wrapped", attr)
|
||||
ret = real_func(*args, **kw)
|
||||
return UILayout_Fake(ret)
|
||||
return dummy_func
|
||||
|
||||
elif attr in {"operator", "operator_menu_enum", "operator_enum", "operator_menu_hold"}:
|
||||
if ui_ignore_operator is None:
|
||||
return UILayout.__getattribute__(self, attr)
|
||||
|
||||
real_func = UILayout.__getattribute__(self, attr)
|
||||
|
||||
def dummy_func(*args, **kw):
|
||||
# print("wrapped", attr)
|
||||
ui_test = ui_ignore_operator(args[0])
|
||||
if ui_test is False:
|
||||
ret = real_func(*args, **kw)
|
||||
else:
|
||||
if ui_test is None:
|
||||
UILayout.__getattribute__(self, "label")(text="")
|
||||
else:
|
||||
assert ui_test is True
|
||||
# may need to be set
|
||||
ret = OperatorProperties_Fake()
|
||||
return ret
|
||||
return dummy_func
|
||||
|
||||
elif attr in {"prop", "prop_enum"}:
|
||||
if ui_ignore_property is None:
|
||||
return UILayout.__getattribute__(self, attr)
|
||||
|
||||
real_func = UILayout.__getattribute__(self, attr)
|
||||
|
||||
def dummy_func(*args, **kw):
|
||||
# print("wrapped", attr)
|
||||
ui_test = ui_ignore_property(args[0].__class__.__name__, args[1])
|
||||
if ui_test is False:
|
||||
ret = real_func(*args, **kw)
|
||||
else:
|
||||
if ui_test is None:
|
||||
UILayout.__getattribute__(self, "label")(text="")
|
||||
else:
|
||||
assert ui_test is True
|
||||
ret = None
|
||||
return ret
|
||||
return dummy_func
|
||||
|
||||
elif attr == "menu":
|
||||
if ui_ignore_menu is None:
|
||||
return UILayout.__getattribute__(self, attr)
|
||||
|
||||
real_func = UILayout.__getattribute__(self, attr)
|
||||
|
||||
def dummy_func(*args, **kw):
|
||||
# print("wrapped", attr)
|
||||
ui_test = ui_ignore_menu(args[0])
|
||||
if ui_test is False:
|
||||
ret = real_func(*args, **kw)
|
||||
else:
|
||||
if ui_test is None:
|
||||
UILayout.__getattribute__(self, "label")(text="")
|
||||
else:
|
||||
assert ui_test is True
|
||||
ret = None
|
||||
return ret
|
||||
return dummy_func
|
||||
|
||||
elif attr == "label":
|
||||
if ui_ignore_label is None:
|
||||
return UILayout.__getattribute__(self, attr)
|
||||
|
||||
real_func = UILayout.__getattribute__(self, attr)
|
||||
|
||||
def dummy_func(*args, **kw):
|
||||
# print("wrapped", attr)
|
||||
ui_test = ui_ignore_label(args[0] if args else kw.get("text", ""))
|
||||
if ui_test is False:
|
||||
ret = real_func(*args, **kw)
|
||||
else:
|
||||
if ui_test is None:
|
||||
real_func(text="")
|
||||
else:
|
||||
assert ui_test is True
|
||||
ret = None
|
||||
return ret
|
||||
return dummy_func
|
||||
else:
|
||||
return UILayout.__getattribute__(self, attr)
|
||||
# print(self, attr)
|
||||
|
||||
def operator(*args, **kw):
|
||||
return super().operator(*args, **kw)
|
||||
|
||||
def draw_override(func_orig, self_real, context):
|
||||
cls_real = self_real.__class__
|
||||
if cls_real is super:
|
||||
# simple, no wrapping
|
||||
return func_orig(self_real, context)
|
||||
|
||||
class Wrapper(cls_real):
|
||||
__slots__ = ()
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
if attr == "layout":
|
||||
return UILayout_Fake(self_real.layout)
|
||||
else:
|
||||
cls = super()
|
||||
try:
|
||||
return cls.__getattr__(self, attr)
|
||||
except AttributeError:
|
||||
# class variable
|
||||
try:
|
||||
return getattr(cls, attr)
|
||||
except AttributeError:
|
||||
# for preset bl_idname access
|
||||
return getattr(UILayout(self), attr)
|
||||
|
||||
@property
|
||||
def layout(self):
|
||||
# print("wrapped")
|
||||
return self_real.layout
|
||||
|
||||
return func_orig(Wrapper(self_real), context)
|
||||
|
||||
ui_ignore_store = []
|
||||
|
||||
for cls in ui_ignore_classes:
|
||||
for subcls in list(cls.__subclasses__()):
|
||||
if "draw" in subcls.__dict__: # don't want to get parents draw()
|
||||
|
||||
def replace_draw():
|
||||
# function also serves to hold draw_old in a local name-space
|
||||
draw_orig = subcls.draw
|
||||
|
||||
def draw(self, context):
|
||||
return draw_override(draw_orig, self, context)
|
||||
subcls.draw = draw
|
||||
|
||||
ui_ignore_store.append((subcls, "draw", subcls.draw))
|
||||
|
||||
replace_draw()
|
||||
|
||||
return ui_ignore_store
|
||||
|
||||
|
||||
def ui_draw_filter_unregister(ui_ignore_store):
|
||||
for (obj, attr, value) in ui_ignore_store:
|
||||
setattr(obj, attr, value)
|
||||
147
scripts/modules/bl_app_override/helpers.py
Normal file
147
scripts/modules/bl_app_override/helpers.py
Normal file
@@ -0,0 +1,147 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# AppOverrideState
|
||||
|
||||
|
||||
class AppOverrideState:
|
||||
"""
|
||||
Utility class to encapsulate overriding the application state
|
||||
so that settings can be restored afterwards.
|
||||
"""
|
||||
__slots__ = (
|
||||
# setup_classes
|
||||
"_class_store",
|
||||
# setup_ui_ignore
|
||||
"_ui_ignore_store",
|
||||
# setup_addons
|
||||
"_addon_store",
|
||||
)
|
||||
|
||||
# ---------
|
||||
# Callbacks
|
||||
#
|
||||
# Set as None, to make it simple to check if they're being overridden.
|
||||
|
||||
# setup/teardown classes
|
||||
class_ignore = None
|
||||
|
||||
# setup/teardown ui_ignore
|
||||
ui_ignore_classes = None
|
||||
ui_ignore_operator = None
|
||||
ui_ignore_property = None
|
||||
ui_ignore_menu = None
|
||||
ui_ignore_label = None
|
||||
|
||||
addon_paths = None
|
||||
addons = None
|
||||
|
||||
# End callbacks
|
||||
|
||||
def __init__(self):
|
||||
self._class_store = None
|
||||
self._addon_store = None
|
||||
self._ui_ignore_store = None
|
||||
|
||||
def _setup_classes(self):
|
||||
assert self._class_store is None
|
||||
self._class_store = self.class_ignore()
|
||||
from bpy.utils import unregister_class
|
||||
for cls in self._class_store:
|
||||
unregister_class(cls)
|
||||
|
||||
def _teardown_classes(self):
|
||||
assert self._class_store is not None
|
||||
|
||||
from bpy.utils import register_class
|
||||
for cls in self._class_store:
|
||||
register_class(cls)
|
||||
self._class_store = None
|
||||
|
||||
def _setup_ui_ignore(self):
|
||||
import bl_app_override
|
||||
|
||||
self._ui_ignore_store = bl_app_override.ui_draw_filter_register(
|
||||
ui_ignore_classes=(
|
||||
None if self.ui_ignore_classes is None
|
||||
else self.ui_ignore_classes()
|
||||
),
|
||||
ui_ignore_operator=self.ui_ignore_operator,
|
||||
ui_ignore_property=self.ui_ignore_property,
|
||||
ui_ignore_menu=self.ui_ignore_menu,
|
||||
ui_ignore_label=self.ui_ignore_label,
|
||||
)
|
||||
|
||||
def _teardown_ui_ignore(self):
|
||||
import bl_app_override
|
||||
bl_app_override.ui_draw_filter_unregister(
|
||||
self._ui_ignore_store
|
||||
)
|
||||
self._ui_ignore_store = None
|
||||
|
||||
def _setup_addons(self):
|
||||
import sys
|
||||
|
||||
sys_path = []
|
||||
if self.addon_paths is not None:
|
||||
for path in self.addon_paths():
|
||||
if path not in sys.path:
|
||||
sys.path.append(path)
|
||||
|
||||
import addon_utils
|
||||
addons = []
|
||||
if self.addons is not None:
|
||||
addons.extend(self.addons())
|
||||
for addon in addons:
|
||||
addon_utils.enable(addon)
|
||||
|
||||
self._addon_store = {
|
||||
"sys_path": sys_path,
|
||||
"addons": addons,
|
||||
}
|
||||
|
||||
def _teardown_addons(self):
|
||||
import sys
|
||||
|
||||
sys_path = self._addon_store["sys_path"]
|
||||
for path in sys_path:
|
||||
# should always succeed, but if not it doesn't matter
|
||||
# (someone else was changing the sys.path), ignore!
|
||||
try:
|
||||
sys.path.remove(path)
|
||||
except:
|
||||
pass
|
||||
|
||||
addons = self._addon_store["addons"]
|
||||
import addon_utils
|
||||
for addon in addons:
|
||||
addon_utils.disable(addon)
|
||||
|
||||
self._addon_store.clear()
|
||||
self._addon_store = None
|
||||
|
||||
def setup(self):
|
||||
if self.class_ignore is not None:
|
||||
self._setup_classes()
|
||||
|
||||
if any((self.addon_paths,
|
||||
self.addons,
|
||||
)):
|
||||
self._setup_addons()
|
||||
|
||||
if any((self.ui_ignore_operator,
|
||||
self.ui_ignore_property,
|
||||
self.ui_ignore_menu,
|
||||
self.ui_ignore_label,
|
||||
)):
|
||||
self._setup_ui_ignore()
|
||||
|
||||
def teardown(self):
|
||||
if self._class_store is not None:
|
||||
self._teardown_classes()
|
||||
|
||||
if self._addon_store is not None:
|
||||
self._teardown_addons()
|
||||
|
||||
if self._ui_ignore_store is not None:
|
||||
self._teardown_ui_ignore()
|
||||
173
scripts/modules/bl_app_template_utils.py
Normal file
173
scripts/modules/bl_app_template_utils.py
Normal file
@@ -0,0 +1,173 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
Similar to ``addon_utils``, except we can only have one active at a time.
|
||||
|
||||
In most cases users of this module will simply call 'activate'.
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
"activate",
|
||||
"import_from_path",
|
||||
"import_from_id",
|
||||
"reset",
|
||||
)
|
||||
|
||||
import bpy as _bpy
|
||||
|
||||
# Normally matches 'preferences.app_template_id',
|
||||
# but loading new preferences will get us out of sync.
|
||||
_app_template = {
|
||||
"id": "",
|
||||
}
|
||||
|
||||
# instead of sys.modules
|
||||
# note that we only ever have one template enabled at a time
|
||||
# so it may not seem necessary to use this.
|
||||
#
|
||||
# However, templates may want to share between each-other,
|
||||
# so any loaded modules are stored here?
|
||||
#
|
||||
# Note that the ID here is the app_template_id , not the modules __name__.
|
||||
_modules = {}
|
||||
|
||||
|
||||
def _enable(template_id, *, handle_error=None, ignore_not_found=False):
|
||||
from bpy_restrict_state import RestrictBlend
|
||||
|
||||
if handle_error is None:
|
||||
def handle_error(_ex):
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Split registering up into 2 steps so we can undo
|
||||
# if it fails par way through.
|
||||
|
||||
# disable the context, using the context at all is
|
||||
# really bad while loading an template, don't do it!
|
||||
with RestrictBlend():
|
||||
|
||||
# 1) try import
|
||||
try:
|
||||
mod = import_from_id(template_id, ignore_not_found=ignore_not_found)
|
||||
except Exception as ex:
|
||||
handle_error(ex)
|
||||
return None
|
||||
|
||||
_modules[template_id] = mod
|
||||
if mod is None:
|
||||
return None
|
||||
mod.__template_enabled__ = False
|
||||
|
||||
# 2) try run the modules register function
|
||||
try:
|
||||
mod.register()
|
||||
except Exception as ex:
|
||||
print("Exception in module register(): %r" %
|
||||
getattr(mod, "__file__", template_id))
|
||||
handle_error(ex)
|
||||
del _modules[template_id]
|
||||
return None
|
||||
|
||||
# * OK loaded successfully! *
|
||||
mod.__template_enabled__ = True
|
||||
|
||||
if _bpy.app.debug_python:
|
||||
print("\tapp_template_utils.enable", mod.__name__)
|
||||
|
||||
return mod
|
||||
|
||||
|
||||
def _disable(template_id, *, handle_error=None):
|
||||
"""
|
||||
Disables a template by name.
|
||||
|
||||
:arg template_id: The name of the template and module.
|
||||
:type template_id: string
|
||||
:arg handle_error: Called in the case of an error,
|
||||
taking an exception argument.
|
||||
:type handle_error: function
|
||||
"""
|
||||
|
||||
if handle_error is None:
|
||||
def handle_error(_ex):
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
mod = _modules.get(template_id, False)
|
||||
|
||||
if mod is None:
|
||||
# Loaded but has no module, remove since there is no use in keeping it.
|
||||
del _modules[template_id]
|
||||
elif getattr(mod, "__template_enabled__", False) is not False:
|
||||
mod.__template_enabled__ = False
|
||||
|
||||
try:
|
||||
mod.unregister()
|
||||
except Exception as ex:
|
||||
print("Exception in module unregister(): %r" %
|
||||
getattr(mod, "__file__", template_id))
|
||||
handle_error(ex)
|
||||
else:
|
||||
print("\tapp_template_utils.disable: %s not %s." %
|
||||
(template_id, "disabled" if mod is False else "loaded"))
|
||||
|
||||
if _bpy.app.debug_python:
|
||||
print("\tapp_template_utils.disable", template_id)
|
||||
|
||||
|
||||
def import_from_path(path, *, ignore_not_found=False):
|
||||
import os
|
||||
from importlib import import_module
|
||||
base_module, template_id = path.rsplit(os.sep, 2)[-2:]
|
||||
module_name = base_module + "." + template_id
|
||||
|
||||
try:
|
||||
return import_module(module_name)
|
||||
except ModuleNotFoundError as ex:
|
||||
if ignore_not_found and ex.name == module_name:
|
||||
return None
|
||||
raise ex
|
||||
|
||||
|
||||
def import_from_id(template_id, *, ignore_not_found=False):
|
||||
import os
|
||||
path = next(iter(_bpy.utils.app_template_paths(path=template_id)), None)
|
||||
if path is None:
|
||||
if ignore_not_found:
|
||||
return None
|
||||
else:
|
||||
raise Exception("%r template not found!" % template_id)
|
||||
else:
|
||||
if ignore_not_found:
|
||||
if not os.path.exists(os.path.join(path, "__init__.py")):
|
||||
return None
|
||||
return import_from_path(path, ignore_not_found=ignore_not_found)
|
||||
|
||||
|
||||
def activate(*, template_id=None, reload_scripts=False):
|
||||
template_id_prev = _app_template["id"]
|
||||
|
||||
# not needed but may as well avoids redundant
|
||||
# disable/enable for all add-ons on 'File -> New'
|
||||
if not reload_scripts and template_id_prev == template_id:
|
||||
return
|
||||
|
||||
if template_id_prev:
|
||||
_disable(template_id_prev)
|
||||
|
||||
# ignore_not_found so modules that don't contain scripts don't raise errors
|
||||
_mod = _enable(template_id, ignore_not_found=True) if template_id else None
|
||||
|
||||
_app_template["id"] = template_id
|
||||
|
||||
|
||||
def reset(*, reload_scripts=False):
|
||||
"""
|
||||
Sets default state.
|
||||
"""
|
||||
template_id = _bpy.context.preferences.app_template
|
||||
if _bpy.app.debug_python:
|
||||
print("bl_app_template_utils.reset('%s')" % template_id)
|
||||
|
||||
activate(template_id=template_id, reload_scripts=reload_scripts)
|
||||
4
scripts/modules/bl_console_utils/__init__.py
Normal file
4
scripts/modules/bl_console_utils/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
"""
|
||||
Utilities relating to text mode console interactions.
|
||||
"""
|
||||
@@ -0,0 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Copyright (c) 2009 www.stani.be
|
||||
|
||||
"""Package for console specific modules."""
|
||||
@@ -0,0 +1,172 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Copyright (c) 2009 www.stani.be
|
||||
|
||||
import inspect
|
||||
import re
|
||||
|
||||
|
||||
# regular expression constants
|
||||
DEF_DOC = r'%s\s*(\(.*?\))'
|
||||
DEF_SOURCE = r'def\s+%s\s*(\(.*?\)):'
|
||||
RE_EMPTY_LINE = re.compile(r'^\s*\n')
|
||||
RE_FLAG = re.MULTILINE | re.DOTALL
|
||||
RE_NEWLINE = re.compile('\n+')
|
||||
RE_SPACE = re.compile(r'\s+')
|
||||
RE_DEF_COMPLETE = re.compile(
|
||||
# don't start with a quote
|
||||
'''(?:^|[^"'a-zA-Z0-9_])'''
|
||||
# start with a \w = [a-zA-Z0-9_]
|
||||
r'''((\w+'''
|
||||
# allow also dots and closed bracket pairs []
|
||||
r'''(?:\w|[.]|\[.+?\])*'''
|
||||
# allow empty string
|
||||
'''|)'''
|
||||
# allow opening bracket(s)
|
||||
r'''(?:\(|\s)*)$''')
|
||||
|
||||
|
||||
def reduce_newlines(text):
|
||||
"""Reduces multiple newlines to a single newline.
|
||||
|
||||
:arg text: text with multiple newlines
|
||||
:type text: str
|
||||
:returns: text with single newlines
|
||||
:rtype: str
|
||||
|
||||
>>> reduce_newlines('hello\\n\\nworld')
|
||||
'hello\\nworld'
|
||||
"""
|
||||
return RE_NEWLINE.sub('\n', text)
|
||||
|
||||
|
||||
def reduce_spaces(text):
|
||||
"""Reduces multiple whitespaces to a single space.
|
||||
|
||||
:arg text: text with multiple spaces
|
||||
:type text: str
|
||||
:returns: text with single spaces
|
||||
:rtype: str
|
||||
|
||||
>>> reduce_spaces('hello \\nworld')
|
||||
'hello world'
|
||||
"""
|
||||
return RE_SPACE.sub(' ', text)
|
||||
|
||||
|
||||
def get_doc(obj):
|
||||
"""Get the doc string or comments for an object.
|
||||
|
||||
:arg object: object
|
||||
:returns: doc string
|
||||
:rtype: str
|
||||
|
||||
>>> get_doc(abs)
|
||||
'abs(number) -> number\\n\\nReturn the absolute value of the argument.'
|
||||
"""
|
||||
result = inspect.getdoc(obj) or inspect.getcomments(obj)
|
||||
return result and RE_EMPTY_LINE.sub('', result.rstrip()) or ''
|
||||
|
||||
|
||||
def get_argspec(func, *, strip_self=True, doc=None, source=None):
|
||||
"""Get argument specifications.
|
||||
|
||||
:arg strip_self: strip `self` from argspec
|
||||
:type strip_self: bool
|
||||
:arg doc: doc string of func (optional)
|
||||
:type doc: str
|
||||
:arg source: source code of func (optional)
|
||||
:type source: str
|
||||
:returns: argument specification
|
||||
:rtype: str
|
||||
|
||||
>>> get_argspec(inspect.getclasstree)
|
||||
'(classes, unique=0)'
|
||||
>>> get_argspec(abs)
|
||||
'(number)'
|
||||
"""
|
||||
# get the function object of the class
|
||||
try:
|
||||
func = func.__func__
|
||||
except AttributeError:
|
||||
pass
|
||||
# is callable?
|
||||
if not hasattr(func, '__call__'):
|
||||
return ''
|
||||
# func should have a name
|
||||
try:
|
||||
func_name = func.__name__
|
||||
except AttributeError:
|
||||
return ''
|
||||
# from docstring
|
||||
if doc is None:
|
||||
doc = get_doc(func)
|
||||
match = re.search(DEF_DOC % func_name, doc, RE_FLAG)
|
||||
# from source code
|
||||
if not match:
|
||||
if source is None:
|
||||
try:
|
||||
source = inspect.getsource(func)
|
||||
except (TypeError, IOError):
|
||||
source = ''
|
||||
if source:
|
||||
match = re.search(DEF_SOURCE % func_name, source, RE_FLAG)
|
||||
if match:
|
||||
argspec = reduce_spaces(match.group(1))
|
||||
else:
|
||||
# try with the inspect.getarg* functions
|
||||
try:
|
||||
argspec = inspect.formatargspec(*inspect.getfullargspec(func))
|
||||
except:
|
||||
try:
|
||||
argspec = inspect.formatargvalues(
|
||||
*inspect.getargvalues(func))
|
||||
except:
|
||||
argspec = ''
|
||||
if strip_self:
|
||||
argspec = argspec.replace('self, ', '')
|
||||
return argspec
|
||||
|
||||
|
||||
def complete(line, cursor, namespace):
|
||||
"""Complete callable with calltip.
|
||||
|
||||
:arg line: incomplete text line
|
||||
:type line: str
|
||||
:arg cursor: current character position
|
||||
:type cursor: int
|
||||
:arg namespace: namespace
|
||||
:type namespace: dict
|
||||
:returns: (matches, world, scrollback)
|
||||
:rtype: (list of str, str, str)
|
||||
|
||||
>>> import os
|
||||
>>> complete('os.path.isdir(', 14, {'os': os})[-1]
|
||||
'isdir(s)\\nReturn true if the pathname refers to an existing directory.'
|
||||
>>> complete('abs(', 4, {})[-1]
|
||||
'abs(number) -> number\\nReturn the absolute value of the argument.'
|
||||
"""
|
||||
matches = []
|
||||
word = ''
|
||||
scrollback = ''
|
||||
match = RE_DEF_COMPLETE.search(line[:cursor])
|
||||
|
||||
if match:
|
||||
word = match.group(1)
|
||||
func_word = match.group(2)
|
||||
try:
|
||||
func = eval(func_word, namespace)
|
||||
except Exception:
|
||||
func = None
|
||||
|
||||
if func:
|
||||
doc = get_doc(func)
|
||||
argspec = get_argspec(func, doc=doc)
|
||||
scrollback = func_word.split('.')[-1] + (argspec or '()')
|
||||
if doc.startswith(scrollback):
|
||||
scrollback = doc
|
||||
elif doc:
|
||||
scrollback += '\n' + doc
|
||||
scrollback = reduce_newlines(scrollback)
|
||||
|
||||
return matches, word, scrollback
|
||||
179
scripts/modules/bl_console_utils/autocomplete/complete_import.py
Normal file
179
scripts/modules/bl_console_utils/autocomplete/complete_import.py
Normal file
@@ -0,0 +1,179 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Copyright (c) 2009 Fernando Perez, www.stani.be
|
||||
|
||||
# Original copyright (see docstring):
|
||||
# ****************************************************************************
|
||||
# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
# ****************************************************************************
|
||||
|
||||
"""Completer for import statements
|
||||
|
||||
Original code was from IPython/Extensions/ipy_completers.py. The following
|
||||
changes have been made:
|
||||
- ported to python3
|
||||
- pep8 polishing
|
||||
- limit list of modules to prefix in case of "from w"
|
||||
- sorted modules
|
||||
- added sphinx documentation
|
||||
- complete() returns a blank list of the module isn't found
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
TIMEOUT_STORAGE = 3 # Time in secs after which the root-modules will be stored
|
||||
TIMEOUT_GIVEUP = 20 # Time in secs after which we give up
|
||||
|
||||
ROOT_MODULES = None
|
||||
|
||||
|
||||
def get_root_modules():
|
||||
"""
|
||||
Returns a list containing the names of all the modules available in the
|
||||
folders of the python-path.
|
||||
|
||||
:returns: modules
|
||||
:rtype: list
|
||||
"""
|
||||
global ROOT_MODULES
|
||||
modules = []
|
||||
if not (ROOT_MODULES is None):
|
||||
return ROOT_MODULES
|
||||
from time import time
|
||||
t = time()
|
||||
store = False
|
||||
for path in sys.path:
|
||||
modules += module_list(path)
|
||||
if time() - t >= TIMEOUT_STORAGE and not store:
|
||||
# Caching the list of root modules, please wait!
|
||||
store = True
|
||||
if time() - t > TIMEOUT_GIVEUP:
|
||||
# This is taking too long, we give up.
|
||||
ROOT_MODULES = []
|
||||
return []
|
||||
|
||||
modules += sys.builtin_module_names
|
||||
|
||||
# needed for modules defined in C
|
||||
modules += sys.modules.keys()
|
||||
|
||||
modules = list(set(modules))
|
||||
if '__init__' in modules:
|
||||
modules.remove('__init__')
|
||||
modules = sorted(modules)
|
||||
if store:
|
||||
ROOT_MODULES = modules
|
||||
return modules
|
||||
|
||||
|
||||
def module_list(path):
|
||||
"""
|
||||
Return the list containing the names of the modules available in
|
||||
the given folder.
|
||||
|
||||
:arg path: folder path
|
||||
:type path: str
|
||||
:returns: modules
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
if os.path.isdir(path):
|
||||
folder_list = os.listdir(path)
|
||||
elif path.endswith('.egg'):
|
||||
from zipimport import zipimporter
|
||||
try:
|
||||
folder_list = [f for f in zipimporter(path)._files]
|
||||
except:
|
||||
folder_list = []
|
||||
else:
|
||||
folder_list = []
|
||||
#folder_list = glob.glob(os.path.join(path,'*'))
|
||||
folder_list = [
|
||||
p for p in folder_list
|
||||
if (os.path.exists(os.path.join(path, p, '__init__.py')) or
|
||||
p[-3:] in {'.py', '.so'} or
|
||||
p[-4:] in {'.pyc', '.pyo', '.pyd'})]
|
||||
|
||||
folder_list = [os.path.basename(p).split('.')[0] for p in folder_list]
|
||||
return folder_list
|
||||
|
||||
|
||||
def complete(line):
|
||||
"""
|
||||
Returns a list containing the completion possibilities for an import line.
|
||||
|
||||
:arg line:
|
||||
|
||||
incomplete line which contains an import statement::
|
||||
|
||||
import xml.d
|
||||
from xml.dom import
|
||||
|
||||
:type line: str
|
||||
:returns: list of completion possibilities
|
||||
:rtype: list
|
||||
|
||||
>>> complete('import weak')
|
||||
['weakref']
|
||||
>>> complete('from weakref import C')
|
||||
['CallableProxyType']
|
||||
"""
|
||||
import inspect
|
||||
|
||||
def try_import(mod, *, only_modules=False):
|
||||
|
||||
def is_importable(module, attr):
|
||||
if only_modules:
|
||||
return inspect.ismodule(getattr(module, attr))
|
||||
else:
|
||||
return not (attr[:2] == '__' and attr[-2:] == '__')
|
||||
|
||||
try:
|
||||
m = __import__(mod)
|
||||
except:
|
||||
return []
|
||||
mods = mod.split('.')
|
||||
for module in mods[1:]:
|
||||
m = getattr(m, module)
|
||||
if (not hasattr(m, '__file__')) or (not only_modules) or\
|
||||
(hasattr(m, '__file__') and '__init__' in m.__file__):
|
||||
completion_list = [attr for attr in dir(m)
|
||||
if is_importable(m, attr)]
|
||||
else:
|
||||
completion_list = []
|
||||
completion_list.extend(getattr(m, '__all__', []))
|
||||
if hasattr(m, '__file__') and '__init__' in m.__file__:
|
||||
completion_list.extend(module_list(os.path.dirname(m.__file__)))
|
||||
completion_list = list(set(completion_list))
|
||||
if '__init__' in completion_list:
|
||||
completion_list.remove('__init__')
|
||||
return completion_list
|
||||
|
||||
def filter_prefix(names, prefix):
|
||||
return [name for name in names if name.startswith(prefix)]
|
||||
|
||||
words = line.split(' ')
|
||||
if len(words) == 3 and words[0] == 'from':
|
||||
return ['import ']
|
||||
if len(words) < 3 and (words[0] in {'import', 'from'}):
|
||||
if len(words) == 1:
|
||||
return get_root_modules()
|
||||
mod = words[1].split('.')
|
||||
if len(mod) < 2:
|
||||
return filter_prefix(get_root_modules(), words[-1])
|
||||
completion_list = try_import('.'.join(mod[:-1]), only_modules=True)
|
||||
completion_list = ['.'.join(mod[:-1] + [el]) for el in completion_list]
|
||||
return filter_prefix(completion_list, words[-1])
|
||||
if len(words) >= 3 and words[0] == 'from':
|
||||
mod = words[1]
|
||||
return filter_prefix(try_import(mod), words[-1])
|
||||
|
||||
# get here if the import is not found
|
||||
# import invalidmodule
|
||||
# ^, in this case return nothing
|
||||
return []
|
||||
@@ -0,0 +1,190 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Copyright (c) 2009 www.stani.be
|
||||
|
||||
"""Autocomplete with the standard library"""
|
||||
|
||||
import re
|
||||
import rlcompleter
|
||||
|
||||
|
||||
RE_INCOMPLETE_INDEX = re.compile(r'(.*?)\[[^\]]+$')
|
||||
|
||||
TEMP = '__tEmP__' # only \w characters are allowed!
|
||||
TEMP_N = len(TEMP)
|
||||
|
||||
|
||||
def is_dict(obj):
|
||||
"""Returns whether obj is a dictionary"""
|
||||
return hasattr(obj, 'keys') and hasattr(getattr(obj, 'keys'), '__call__')
|
||||
|
||||
|
||||
def is_struct_seq(obj):
|
||||
"""Returns whether obj is a structured sequence subclass: sys.float_info"""
|
||||
return isinstance(obj, tuple) and hasattr(obj, 'n_fields')
|
||||
|
||||
|
||||
def complete_names(word, namespace):
|
||||
"""Complete variable names or attributes
|
||||
|
||||
:arg word: word to be completed
|
||||
:type word: str
|
||||
:arg namespace: namespace
|
||||
:type namespace: dict
|
||||
:returns: completion matches
|
||||
:rtype: list of str
|
||||
|
||||
>>> complete_names('fo', {'foo': 'bar'})
|
||||
['foo', 'for', 'format(']
|
||||
"""
|
||||
# start completer
|
||||
completer = rlcompleter.Completer(namespace)
|
||||
# find matches with std library (don't try to implement this yourself)
|
||||
completer.complete(word, 0)
|
||||
return sorted(set(completer.matches))
|
||||
|
||||
|
||||
def complete_indices(word, namespace, *, obj=None, base=None):
|
||||
"""Complete a list or dictionary with its indices:
|
||||
|
||||
* integer numbers for list
|
||||
* any keys for dictionary
|
||||
|
||||
:arg word: word to be completed
|
||||
:type word: str
|
||||
:arg namespace: namespace
|
||||
:type namespace: dict
|
||||
:arg obj: object evaluated from base
|
||||
:arg base: sub-string which can be evaluated into an object.
|
||||
:type base: str
|
||||
:returns: completion matches
|
||||
:rtype: list of str
|
||||
|
||||
>>> complete_indices('foo', {'foo': range(5)})
|
||||
['foo[0]', 'foo[1]', 'foo[2]', 'foo[3]', 'foo[4]']
|
||||
>>> complete_indices('foo', {'foo': {'bar':0, 1:2}})
|
||||
['foo[1]', "foo['bar']"]
|
||||
>>> complete_indices("foo['b", {'foo': {'bar':0, 1:2}}, base='foo')
|
||||
["foo['bar']"]
|
||||
"""
|
||||
# FIXME: 'foo["b'
|
||||
if base is None:
|
||||
base = word
|
||||
if obj is None:
|
||||
try:
|
||||
obj = eval(base, namespace)
|
||||
except Exception:
|
||||
return []
|
||||
if not hasattr(obj, '__getitem__'):
|
||||
# obj is not a list or dictionary
|
||||
return []
|
||||
|
||||
obj_is_dict = is_dict(obj)
|
||||
|
||||
# rare objects have a __getitem__ but no __len__ (eg. BMEdge)
|
||||
if not obj_is_dict:
|
||||
try:
|
||||
obj_len = len(obj)
|
||||
except TypeError:
|
||||
return []
|
||||
|
||||
if obj_is_dict:
|
||||
# dictionary type
|
||||
matches = ['%s[%r]' % (base, key) for key in sorted(obj.keys())]
|
||||
else:
|
||||
# list type
|
||||
matches = ['%s[%d]' % (base, idx) for idx in range(obj_len)]
|
||||
if word != base:
|
||||
matches = [match for match in matches if match.startswith(word)]
|
||||
return matches
|
||||
|
||||
|
||||
def complete(word, namespace, *, private=True):
|
||||
"""Complete word within a namespace with the standard rlcompleter
|
||||
module. Also supports index or key access [].
|
||||
|
||||
:arg word: word to be completed
|
||||
:type word: str
|
||||
:arg namespace: namespace
|
||||
:type namespace: dict
|
||||
:arg private: whether private attribute/methods should be returned
|
||||
:type private: bool
|
||||
:returns: completion matches
|
||||
:rtype: list of str
|
||||
|
||||
>>> complete('foo[1', {'foo': range(14)})
|
||||
['foo[1]', 'foo[10]', 'foo[11]', 'foo[12]', 'foo[13]']
|
||||
>>> complete('foo[0]', {'foo': [range(5)]})
|
||||
['foo[0][0]', 'foo[0][1]', 'foo[0][2]', 'foo[0][3]', 'foo[0][4]']
|
||||
>>> complete('foo[0].i', {'foo': [range(5)]})
|
||||
['foo[0].index(', 'foo[0].insert(']
|
||||
>>> complete('rlcompleter', {'rlcompleter': rlcompleter})
|
||||
['rlcompleter.']
|
||||
"""
|
||||
#
|
||||
# if word is empty -> nothing to complete
|
||||
if not word:
|
||||
return []
|
||||
|
||||
re_incomplete_index = RE_INCOMPLETE_INDEX.search(word)
|
||||
if re_incomplete_index:
|
||||
# ignore incomplete index at the end, e.g 'a[1' -> 'a'
|
||||
matches = complete_indices(word, namespace,
|
||||
base=re_incomplete_index.group(1))
|
||||
|
||||
elif not ('[' in word):
|
||||
matches = complete_names(word, namespace)
|
||||
|
||||
elif word[-1] == ']':
|
||||
matches = [word]
|
||||
|
||||
elif '.' in word:
|
||||
# brackets are normally not allowed -> work around
|
||||
|
||||
# remove brackets by using a temp var without brackets
|
||||
obj, attr = word.rsplit('.', 1)
|
||||
try:
|
||||
# do not run the obj expression in the console
|
||||
namespace[TEMP] = eval(obj, namespace)
|
||||
except Exception:
|
||||
return []
|
||||
matches = complete_names(TEMP + '.' + attr, namespace)
|
||||
matches = [obj + match[TEMP_N:] for match in matches]
|
||||
del namespace[TEMP]
|
||||
|
||||
else:
|
||||
# safety net, but when would this occur?
|
||||
return []
|
||||
|
||||
if not matches:
|
||||
return []
|
||||
|
||||
# add '.', '(' or '[' if no match has been found
|
||||
elif len(matches) == 1 and matches[0] == word:
|
||||
|
||||
# try to retrieve the object
|
||||
try:
|
||||
obj = eval(word, namespace)
|
||||
except Exception:
|
||||
return []
|
||||
# ignore basic types
|
||||
if type(obj) in {bool, float, int, str}:
|
||||
return []
|
||||
# an extra char '[', '(' or '.' will be added
|
||||
if hasattr(obj, '__getitem__') and not is_struct_seq(obj):
|
||||
# list or dictionary
|
||||
matches = complete_indices(word, namespace, obj=obj)
|
||||
elif hasattr(obj, '__call__'):
|
||||
# callables
|
||||
matches = [word + '(']
|
||||
else:
|
||||
# any other type
|
||||
matches = [word + '.']
|
||||
|
||||
# separate public from private
|
||||
public_matches = [match for match in matches if not ('._' in match)]
|
||||
if private:
|
||||
private_matches = [match for match in matches if '._' in match]
|
||||
return public_matches + private_matches
|
||||
else:
|
||||
return public_matches
|
||||
136
scripts/modules/bl_console_utils/autocomplete/intellisense.py
Normal file
136
scripts/modules/bl_console_utils/autocomplete/intellisense.py
Normal file
@@ -0,0 +1,136 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Copyright (c) 2009 www.stani.be
|
||||
|
||||
"""This module provides intellisense features such as:
|
||||
|
||||
* autocompletion
|
||||
* calltips
|
||||
|
||||
It unifies all completion plugins and only loads them on demand.
|
||||
"""
|
||||
|
||||
# TODO: file complete if startswith quotes
|
||||
import os
|
||||
import re
|
||||
|
||||
# regular expressions to find out which completer we need
|
||||
|
||||
# line which starts with an import statement
|
||||
RE_MODULE = re.compile(r'''^import(\s|$)|from.+''')
|
||||
|
||||
# The following regular expression means an 'unquoted' word
|
||||
RE_UNQUOTED_WORD = re.compile(
|
||||
# don't start with a quote
|
||||
r'''(?:^|[^"'a-zA-Z0-9_])'''
|
||||
# start with a \w = [a-zA-Z0-9_]
|
||||
r'''((?:\w+'''
|
||||
# allow also dots and closed bracket pairs []
|
||||
r'''(?:\w|[.]|\[.+?\])*'''
|
||||
# allow empty string
|
||||
r'''|)'''
|
||||
# allow an unfinished index at the end (including quotes)
|
||||
r'''(?:\[[^\]]*$)?)$''',
|
||||
# allow unicode as theoretically this is possible
|
||||
re.UNICODE)
|
||||
|
||||
|
||||
def complete(line, cursor, namespace, private):
|
||||
"""Returns a list of possible completions:
|
||||
|
||||
* name completion
|
||||
* attribute completion (obj.attr)
|
||||
* index completion for lists and dictionaries
|
||||
* module completion (from/import)
|
||||
|
||||
:arg line: incomplete text line
|
||||
:type line: str
|
||||
:arg cursor: current character position
|
||||
:type cursor: int
|
||||
:arg namespace: namespace
|
||||
:type namespace: dict
|
||||
:arg private: whether private variables should be listed
|
||||
:type private: bool
|
||||
:returns: list of completions, word
|
||||
:rtype: list, str
|
||||
|
||||
>>> complete('re.sr', 5, {'re': re})
|
||||
(['re.sre_compile', 're.sre_parse'], 're.sr')
|
||||
"""
|
||||
re_unquoted_word = RE_UNQUOTED_WORD.search(line[:cursor])
|
||||
if re_unquoted_word:
|
||||
# unquoted word -> module or attribute completion
|
||||
word = re_unquoted_word.group(1)
|
||||
if RE_MODULE.match(line):
|
||||
from . import complete_import
|
||||
matches = complete_import.complete(line)
|
||||
if not private:
|
||||
matches[:] = [m for m in matches if m[:1] != "_"]
|
||||
matches.sort()
|
||||
else:
|
||||
from . import complete_namespace
|
||||
matches = complete_namespace.complete(word, namespace, private=private)
|
||||
else:
|
||||
# for now we don't have completers for strings
|
||||
# TODO: add file auto completer for strings
|
||||
word = ''
|
||||
matches = []
|
||||
return matches, word
|
||||
|
||||
|
||||
def expand(line, cursor, namespace, *, private=True):
|
||||
"""This method is invoked when the user asks autocompletion,
|
||||
e.g. when Ctrl+Space is clicked.
|
||||
|
||||
:arg line: incomplete text line
|
||||
:type line: str
|
||||
:arg cursor: current character position
|
||||
:type cursor: int
|
||||
:arg namespace: namespace
|
||||
:type namespace: dict
|
||||
:arg private: whether private variables should be listed
|
||||
:type private: bool
|
||||
:returns:
|
||||
|
||||
current expanded line, updated cursor position and scrollback
|
||||
|
||||
:rtype: str, int, str
|
||||
|
||||
>>> expand('os.path.isdir(', 14, {'os': os})[-1]
|
||||
'isdir(s)\\nReturn true if the pathname refers to an existing directory.'
|
||||
>>> expand('abs(', 4, {})[-1]
|
||||
'abs(number) -> number\\nReturn the absolute value of the argument.'
|
||||
"""
|
||||
if line[:cursor].strip().endswith('('):
|
||||
from . import complete_calltip
|
||||
matches, word, scrollback = complete_calltip.complete(
|
||||
line, cursor, namespace)
|
||||
prefix = os.path.commonprefix(matches)[len(word):]
|
||||
no_calltip = False
|
||||
else:
|
||||
matches, word = complete(line, cursor, namespace, private)
|
||||
prefix = os.path.commonprefix(matches)[len(word):]
|
||||
if len(matches) == 1:
|
||||
scrollback = ''
|
||||
else:
|
||||
# causes blender bug #27495 since string keys may contain '.'
|
||||
# scrollback = ' '.join([m.split('.')[-1] for m in matches])
|
||||
|
||||
# add white space to align with the cursor
|
||||
white_space = " " + (" " * (cursor + len(prefix)))
|
||||
word_prefix = word + prefix
|
||||
scrollback = '\n'.join(
|
||||
[white_space + m[len(word_prefix):]
|
||||
if (word_prefix and m.startswith(word_prefix))
|
||||
else
|
||||
white_space + m.split('.')[-1]
|
||||
for m in matches])
|
||||
|
||||
no_calltip = True
|
||||
|
||||
if prefix:
|
||||
line = line[:cursor] + prefix + line[cursor:]
|
||||
cursor += len(prefix.encode('utf-8'))
|
||||
if no_calltip and prefix.endswith('('):
|
||||
return expand(line, cursor, namespace, private=private)
|
||||
return line, cursor, scrollback
|
||||
3
scripts/modules/bl_i18n_utils/__init__.py
Normal file
3
scripts/modules/bl_i18n_utils/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""Package for translation (i18n) tools."""
|
||||
1137
scripts/modules/bl_i18n_utils/bl_extract_messages.py
Normal file
1137
scripts/modules/bl_i18n_utils/bl_extract_messages.py
Normal file
File diff suppressed because it is too large
Load Diff
130
scripts/modules/bl_i18n_utils/merge_po.py
Executable file
130
scripts/modules/bl_i18n_utils/merge_po.py
Executable file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Merge one or more .po files into the first dest one.
|
||||
# If a msgkey is present in more than one merged po, the one in the first file wins, unless
|
||||
# it’s marked as fuzzy and one later is not.
|
||||
# The fuzzy flag is removed if necessary.
|
||||
# All other comments are never modified.
|
||||
# However, commented messages in dst will always remain commented, and commented messages are
|
||||
# never merged from sources.
|
||||
|
||||
import sys
|
||||
|
||||
if __package__ is None:
|
||||
import settings
|
||||
import utils
|
||||
else:
|
||||
from . import (
|
||||
settings,
|
||||
utils,
|
||||
)
|
||||
|
||||
|
||||
# XXX This is a quick hack to make it work with new I18n... objects! To be reworked!
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Merge one or more .po files into the first dest one.\n"
|
||||
"If a msgkey (msgctxt, msgid) is present in more than one merged po, the one in the first file "
|
||||
"wins, unless it’s marked as fuzzy and one later is not.\n"
|
||||
"The fuzzy flag is removed if necessary.\n"
|
||||
"All other comments are never modified.\n"
|
||||
"Commented messages in dst will always remain commented, and commented messages are never merged "
|
||||
"from sources."
|
||||
),
|
||||
)
|
||||
parser.add_argument('-s', '--stats', action="store_true", help="Show statistics info.")
|
||||
parser.add_argument('-r', '--replace', action="store_true",
|
||||
help="Replace existing messages of same \"level\" already in dest po.")
|
||||
parser.add_argument('dst', metavar='dst.po', help="The dest po into which merge the others.")
|
||||
parser.add_argument('src', metavar='src.po', nargs='+', help="The po's to merge into the dst.po one.")
|
||||
args = parser.parse_args()
|
||||
|
||||
ret = 0
|
||||
done_msgkeys = set()
|
||||
done_fuzzy_msgkeys = set()
|
||||
nbr_merged = 0
|
||||
nbr_replaced = 0
|
||||
nbr_added = 0
|
||||
nbr_unfuzzied = 0
|
||||
|
||||
dst_msgs = utils.I18nMessages(kind='PO', src=args.dst)
|
||||
if dst_msgs.parsing_errors:
|
||||
print("Dest po is BROKEN, aborting.")
|
||||
return 1
|
||||
if args.stats:
|
||||
print("Dest po, before merging:")
|
||||
dst_msgs.print_stats(prefix="\t")
|
||||
# If we don’t want to replace existing valid translations, pre-populate done_msgkeys and done_fuzzy_msgkeys.
|
||||
if not args.replace:
|
||||
done_msgkeys = dst_msgs.trans_msgs.copy()
|
||||
done_fuzzy_msgkeys = dst_msgs.fuzzy_msgs.copy()
|
||||
for po in args.src:
|
||||
msgs = utils.I18nMessages(kind='PO', src=po)
|
||||
if msgs.parsing_errors:
|
||||
print("\tSrc po {} is BROKEN, skipping.".format(po))
|
||||
ret = 1
|
||||
continue
|
||||
print("\tMerging {}...".format(po))
|
||||
if args.stats:
|
||||
print("\t\tMerged po stats:")
|
||||
msgs.print_stats(prefix="\t\t\t")
|
||||
for msgkey, msg in msgs.msgs.items():
|
||||
msgctxt, msgid = msgkey
|
||||
# This msgkey has already been completely merged, or is a commented one,
|
||||
# or the new message is commented, skip it.
|
||||
if msgkey in (done_msgkeys | dst_msgs.comm_msgs | msgs.comm_msgs):
|
||||
continue
|
||||
is_ttip = msg.is_tooltip
|
||||
# New messages does not yet exists in dest.
|
||||
if msgkey not in dst_msgs.msgs:
|
||||
dst_msgs[msgkey] = msgs.msgs[msgkey]
|
||||
if msgkey in msgs.fuzzy_msgs:
|
||||
done_fuzzy_msgkeys.add(msgkey)
|
||||
dst_msgs.fuzzy_msgs.add(msgkey)
|
||||
elif msgkey in msgs.trans_msgs:
|
||||
done_msgkeys.add(msgkey)
|
||||
dst_msgs.trans_msgs.add(msgkey)
|
||||
nbr_added += 1
|
||||
# From now on, the new messages is already in dst.
|
||||
# New message is neither translated nor fuzzy, skip it.
|
||||
elif msgkey not in (msgs.trans_msgs | msgs.fuzzy_msgs):
|
||||
continue
|
||||
# From now on, the new message is either translated or fuzzy!
|
||||
# The new message is translated.
|
||||
elif msgkey in msgs.trans_msgs:
|
||||
dst_msgs.msgs[msgkey].msgstr = msg.msgstr
|
||||
done_msgkeys.add(msgkey)
|
||||
done_fuzzy_msgkeys.discard(msgkey)
|
||||
if msgkey in dst_msgs.fuzzy_msgs:
|
||||
dst_msgs.fuzzy_msgs.remove(msgkey)
|
||||
nbr_unfuzzied += 1
|
||||
if msgkey not in dst_msgs.trans_msgs:
|
||||
dst_msgs.trans_msgs.add(msgkey)
|
||||
else:
|
||||
nbr_replaced += 1
|
||||
nbr_merged += 1
|
||||
# The new message is fuzzy, org one is fuzzy too, and this msgkey has not yet been merged.
|
||||
elif msgkey not in (dst_msgs.trans_msgs | done_fuzzy_msgkeys):
|
||||
dst_msgs[msgkey].msgstr = msg.msgstr
|
||||
done_fuzzy_msgkeys.add(msgkey)
|
||||
dst_msgs.fuzzy_msgs.add(msgkey)
|
||||
nbr_merged += 1
|
||||
nbr_replaced += 1
|
||||
|
||||
dst_msgs.write(kind='PO', dest=args.dst)
|
||||
|
||||
print("Merged completed. {} messages were merged (among which {} were replaced), {} were added, "
|
||||
"{} were \"un-fuzzied\".".format(nbr_merged, nbr_replaced, nbr_added, nbr_unfuzzied))
|
||||
if args.stats:
|
||||
dst_msgs.update_info()
|
||||
print("Final merged po stats:")
|
||||
dst_msgs.print_stats(prefix="\t")
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n\n *** Running {} *** \n".format(__file__))
|
||||
sys.exit(main())
|
||||
710
scripts/modules/bl_i18n_utils/settings.py
Normal file
710
scripts/modules/bl_i18n_utils/settings.py
Normal file
@@ -0,0 +1,710 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Global settings used by all scripts in this dir.
|
||||
# XXX Before any use of the tools in this dir, please make a copy of this file
|
||||
# named "setting.py"
|
||||
# XXX This is a template, most values should be OK, but some you’ll have to
|
||||
# edit (most probably, BLENDER_EXEC and SOURCE_DIR).
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
|
||||
try:
|
||||
import bpy
|
||||
except ModuleNotFoundError:
|
||||
print("Could not import bpy, some features are not available when not run from Blender.")
|
||||
bpy = None
|
||||
|
||||
###############################################################################
|
||||
# MISC
|
||||
###############################################################################
|
||||
|
||||
# The languages defined in Blender.
|
||||
LANGUAGES_CATEGORIES = (
|
||||
# Min completeness level, UI english label.
|
||||
(0.95, "Complete"),
|
||||
(0.33, "In Progress"),
|
||||
(-1.0, "Starting"),
|
||||
)
|
||||
LANGUAGES = (
|
||||
# ID, UI english label, ISO code.
|
||||
(0, "Automatic (Automatic)", "DEFAULT"),
|
||||
(1, "English (English)", "en_US"),
|
||||
(2, "Japanese (日本語)", "ja_JP"),
|
||||
(3, "Dutch (Nederlandse taal)", "nl_NL"),
|
||||
(4, "Italian (Italiano)", "it_IT"),
|
||||
(5, "German (Deutsch)", "de_DE"),
|
||||
(6, "Finnish (Suomi)", "fi_FI"),
|
||||
(7, "Swedish (Svenska)", "sv_SE"),
|
||||
(8, "French (Français)", "fr_FR"),
|
||||
(9, "Spanish (Español)", "es"),
|
||||
(10, "Catalan (Català)", "ca_AD"),
|
||||
(11, "Czech (Český)", "cs_CZ"),
|
||||
(12, "Portuguese (Português)", "pt_PT"),
|
||||
(13, "Simplified Chinese (简体中文)", "zh_CN"),
|
||||
(14, "Traditional Chinese (繁體中文)", "zh_TW"),
|
||||
(15, "Russian (Русский)", "ru_RU"),
|
||||
(16, "Croatian (Hrvatski)", "hr_HR"),
|
||||
(17, "Serbian (Српски)", "sr_RS"),
|
||||
(18, "Ukrainian (Українська)", "uk_UA"),
|
||||
(19, "Polish (Polski)", "pl_PL"),
|
||||
(20, "Romanian (Român)", "ro_RO"),
|
||||
# Using the utf8 flipped form of Arabic (العربية).
|
||||
(21, "Arabic (ﺔﻴﺑﺮﻌﻟﺍ)", "ar_EG"),
|
||||
(22, "Bulgarian (Български)", "bg_BG"),
|
||||
(23, "Greek (Ελληνικά)", "el_GR"),
|
||||
(24, "Korean (한국어)", "ko_KR"),
|
||||
(25, "Nepali (नेपाली)", "ne_NP"),
|
||||
# Using the utf8 flipped form of Persian (فارسی).
|
||||
(26, "Persian (ﯽﺳﺭﺎﻓ)", "fa_IR"),
|
||||
(27, "Indonesian (Bahasa indonesia)", "id_ID"),
|
||||
(28, "Serbian Latin (Srpski latinica)", "sr_RS@latin"),
|
||||
(29, "Kyrgyz (Кыргыз тили)", "ky_KG"),
|
||||
(30, "Turkish (Türkçe)", "tr_TR"),
|
||||
(31, "Hungarian (Magyar)", "hu_HU"),
|
||||
(32, "Brazilian Portuguese (Português do Brasil)", "pt_BR"),
|
||||
# Using the utf8 flipped form of Hebrew (עִבְרִית)).
|
||||
(33, "Hebrew (תירִבְעִ)", "he_IL"),
|
||||
(34, "Estonian (Eestlane)", "et_EE"),
|
||||
(35, "Esperanto (Esperanto)", "eo"),
|
||||
(36, "Spanish from Spain (Español de España)", "es_ES"),
|
||||
(37, "Amharic (አማርኛ)", "am_ET"),
|
||||
(38, "Uzbek (Oʻzbek)", "uz_UZ"),
|
||||
(39, "Uzbek Cyrillic (Ўзбек)", "uz_UZ@cyrillic"),
|
||||
(40, "Hindi (मानक हिन्दी)", "hi_IN"),
|
||||
(41, "Vietnamese (tiếng Việt)", "vi_VN"),
|
||||
(42, "Basque (Euskara)", "eu_EU"),
|
||||
(43, "Hausa (Hausa)", "ha"),
|
||||
(44, "Kazakh (қазақша)", "kk_KZ"),
|
||||
(45, "Abkhaz (Аԥсуа бызшәа)", "ab"),
|
||||
(46, "Thai (ภาษาไทย)", "th_TH"),
|
||||
(47, "Slovak (Slovenčina)", "sk_SK"),
|
||||
(48, "Georgian (ქართული)", "ka"),
|
||||
)
|
||||
|
||||
# Default context, in py (keep in sync with `BLT_translation.h`)!
|
||||
if bpy is not None:
|
||||
assert bpy.app.translations.contexts.default == "*"
|
||||
DEFAULT_CONTEXT = "*"
|
||||
|
||||
# Name of language file used by Blender to generate translations' menu.
|
||||
LANGUAGES_FILE = "languages"
|
||||
|
||||
# The min level of completeness for a po file to be imported from /branches into /trunk, as a percentage.
|
||||
IMPORT_MIN_LEVEL = 0.0
|
||||
|
||||
# Languages in /branches we do not want to import in /trunk currently...
|
||||
IMPORT_LANGUAGES_SKIP = {
|
||||
'am_ET', 'bg_BG', 'el_GR', 'et_EE', 'ne_NP', 'ro_RO', 'uz_UZ', 'uz_UZ@cyrillic', 'kk_KZ', 'es_ES',
|
||||
}
|
||||
|
||||
# Languages that need RTL pre-processing.
|
||||
IMPORT_LANGUAGES_RTL = {
|
||||
'ar_EG', 'fa_IR', 'he_IL',
|
||||
}
|
||||
|
||||
# The comment prefix used in generated messages.txt file.
|
||||
MSG_COMMENT_PREFIX = "#~ "
|
||||
|
||||
# The comment prefix used in generated messages.txt file.
|
||||
MSG_CONTEXT_PREFIX = "MSGCTXT:"
|
||||
|
||||
# The default comment prefix used in po's.
|
||||
PO_COMMENT_PREFIX = "# "
|
||||
|
||||
# The comment prefix used to mark sources of msgids, in po's.
|
||||
PO_COMMENT_PREFIX_SOURCE = "#: "
|
||||
|
||||
# The comment prefix used to mark sources of msgids, in po's.
|
||||
PO_COMMENT_PREFIX_SOURCE_CUSTOM = "#. :src: "
|
||||
|
||||
# The general "generated" comment prefix, in po's.
|
||||
PO_COMMENT_PREFIX_GENERATED = "#. "
|
||||
|
||||
# The comment prefix used to comment entries in po's.
|
||||
PO_COMMENT_PREFIX_MSG = "#~ "
|
||||
|
||||
# The comment prefix used to mark fuzzy msgids, in po's.
|
||||
PO_COMMENT_FUZZY = "#, fuzzy"
|
||||
|
||||
# The prefix used to define context, in po's.
|
||||
PO_MSGCTXT = "msgctxt "
|
||||
|
||||
# The prefix used to define msgid, in po's.
|
||||
PO_MSGID = "msgid "
|
||||
|
||||
# The prefix used to define msgstr, in po's.
|
||||
PO_MSGSTR = "msgstr "
|
||||
|
||||
# The 'header' key of po files.
|
||||
PO_HEADER_KEY = (DEFAULT_CONTEXT, "")
|
||||
|
||||
PO_HEADER_MSGSTR = (
|
||||
"Project-Id-Version: {blender_ver} ({blender_hash})\\n\n"
|
||||
"Report-Msgid-Bugs-To: \\n\n"
|
||||
"POT-Creation-Date: {time}\\n\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\\n\n"
|
||||
"Language: {uid}\\n\n"
|
||||
"MIME-Version: 1.0\\n\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\\n\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
)
|
||||
PO_HEADER_COMMENT_COPYRIGHT = (
|
||||
"# Blender's translation file (po format).\n"
|
||||
"# Copyright (C) {year} The Blender Foundation.\n"
|
||||
"# This file is distributed under the same license as the Blender package.\n"
|
||||
"#\n"
|
||||
)
|
||||
PO_HEADER_COMMENT = (
|
||||
"# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.\n"
|
||||
"#"
|
||||
)
|
||||
|
||||
TEMPLATE_ISO_ID = "__TEMPLATE__"
|
||||
|
||||
# Num buttons report their label with a trailing ': '...
|
||||
NUM_BUTTON_SUFFIX = ": "
|
||||
|
||||
# Undocumented operator placeholder string.
|
||||
UNDOC_OPS_STR = "(undocumented operator)"
|
||||
|
||||
# The gettext domain.
|
||||
DOMAIN = "blender"
|
||||
|
||||
# Our own "gettext" stuff.
|
||||
# File type (ext) to parse.
|
||||
PYGETTEXT_ALLOWED_EXTS = {".c", ".cc", ".cpp", ".cxx", ".hh", ".hpp", ".hxx", ".h"}
|
||||
|
||||
# Max number of contexts into a BLT_I18N_MSGID_MULTI_CTXT macro...
|
||||
PYGETTEXT_MAX_MULTI_CTXT = 16
|
||||
|
||||
# Where to search contexts definitions, relative to SOURCE_DIR (defined below).
|
||||
PYGETTEXT_CONTEXTS_DEFSRC = os.path.join("source", "blender", "blentranslation", "BLT_translation.h")
|
||||
|
||||
# Regex to extract contexts defined in BLT_translation.h
|
||||
# XXX Not full-proof, but should be enough here!
|
||||
PYGETTEXT_CONTEXTS = "#define\\s+(BLT_I18NCONTEXT_[A-Z_0-9]+)\\s+\"([^\"]*)\""
|
||||
|
||||
# autopep8: off
|
||||
|
||||
# Keywords' regex.
|
||||
# XXX Most unfortunately, we can't use named backreferences inside character sets,
|
||||
# which makes the regexes even more twisty... :/
|
||||
_str_base = (
|
||||
# Match void string
|
||||
"(?P<{_}1>[\"'])(?P={_}1)" # Get opening quote (' or "), and closing immediately.
|
||||
"|"
|
||||
# Or match non-void string
|
||||
"(?P<{_}2>[\"'])" # Get opening quote (' or ").
|
||||
"(?{capt}(?:"
|
||||
# This one is for crazy things like "hi \\\\\" folks!"...
|
||||
r"(?:(?!<\\)(?:\\\\)*\\(?=(?P={_}2)))|"
|
||||
# The most common case.
|
||||
".(?!(?P={_}2))"
|
||||
")+.)" # Don't forget the last char!
|
||||
"(?P={_}2)" # And closing quote.
|
||||
)
|
||||
str_clean_re = _str_base.format(_="g", capt="P<clean>")
|
||||
_inbetween_str_re = (
|
||||
# XXX Strings may have comments between their pieces too, not only spaces!
|
||||
r"(?:\s*(?:"
|
||||
# A C comment
|
||||
r"/\*.*(?!\*/).\*/|"
|
||||
# Or a C++ one!
|
||||
r"//[^\n]*\n"
|
||||
# And we are done!
|
||||
r")?)*"
|
||||
)
|
||||
# Here we have to consider two different cases (empty string and other).
|
||||
_str_whole_re = (
|
||||
_str_base.format(_="{_}1_", capt=":") +
|
||||
# Optional loop start, this handles "split" strings...
|
||||
"(?:(?<=[\"'])" + _inbetween_str_re + "(?=[\"'])(?:"
|
||||
+ _str_base.format(_="{_}2_", capt=":") +
|
||||
# End of loop.
|
||||
"))*"
|
||||
)
|
||||
_ctxt_re_gen = lambda uid : r"(?P<ctxt_raw{uid}>(?:".format(uid=uid) + \
|
||||
_str_whole_re.format(_="_ctxt{uid}".format(uid=uid)) + \
|
||||
r")|(?:[A-Z_0-9]+))"
|
||||
_ctxt_re = _ctxt_re_gen("")
|
||||
_msg_re = r"(?P<msg_raw>" + _str_whole_re.format(_="_msg") + r")"
|
||||
PYGETTEXT_KEYWORDS = (() +
|
||||
tuple((r"{}\(\s*" + _msg_re + r"\s*\)").format(it)
|
||||
for it in ("IFACE_", "TIP_", "DATA_", "N_")) +
|
||||
|
||||
tuple((r"{}\(\s*" + _ctxt_re + r"\s*,\s*" + _msg_re + r"\s*\)").format(it)
|
||||
for it in ("CTX_IFACE_", "CTX_TIP_", "CTX_DATA_", "CTX_N_")) +
|
||||
|
||||
tuple(("{}\\((?:[^\"',]+,){{1,2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
|
||||
for it in ("BKE_report", "BKE_reportf", "BKE_reports_prepend", "BKE_reports_prependf",
|
||||
"CTX_wm_operator_poll_msg_set")) +
|
||||
|
||||
tuple(("{}\\((?:[^\"',]+,){{3}}\\s*" + _msg_re + r"\s*\)").format(it)
|
||||
for it in ("BMO_error_raise",)) +
|
||||
|
||||
tuple(("{}\\((?:[^\"',]+,){{2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
|
||||
for it in ("BKE_modifier_set_error",)) +
|
||||
|
||||
# This one is a tad more risky, but in practice would not expect a name/uid string parameter
|
||||
# (the second one in those functions) to ever have a comma in it, so think this is fine.
|
||||
tuple(("{}\\((?:[^,]+,){{2}}\\s*" + _msg_re + r"\s*(?:\)|,)").format(it)
|
||||
for it in ("modifier_subpanel_register", "gpencil_modifier_subpanel_register")) +
|
||||
|
||||
# bUnitDef unit names.
|
||||
# NOTE: regex is a bit more complex than it would need too. Since the actual
|
||||
# identifier (`B_UNIT_DEF_`) is at the end, if it's simpler/too general it
|
||||
# becomes extremely slow to process some (unrelated) source files.
|
||||
((r"\{(?:(?:\s*\"[^\",]+\"\s*,)|(?:\s*\"\\\"\",)|(?:\s*NULL\s*,)){4}\s*" +
|
||||
_msg_re + r"\s*,(?:(?:\s*\"[^\"',]+\"\s*,)|(?:\s*NULL\s*,))(?:[^,]+,){2}"
|
||||
+ "(?:\|?\s*B_UNIT_DEF_[_A-Z]+\s*)+\}"),) +
|
||||
|
||||
tuple((r"{}\(\s*" + _msg_re + r"\s*,\s*(?:" +
|
||||
r"\s*,\s*)?(?:".join(_ctxt_re_gen(i) for i in range(PYGETTEXT_MAX_MULTI_CTXT)) + r")?\s*\)").format(it)
|
||||
for it in ("BLT_I18N_MSGID_MULTI_CTXT",))
|
||||
)
|
||||
|
||||
# autopep8: on
|
||||
|
||||
|
||||
# Check printf mismatches between msgid and msgstr.
|
||||
CHECK_PRINTF_FORMAT = (
|
||||
r"(?!<%)(?:%%)*%" # Beginning, with handling for crazy things like '%%%%%s'
|
||||
r"[-+#0]?" # Flags (note: do not add the ' ' (space) flag here, generates too much false positives!)
|
||||
r"(?:\*|[0-9]+)?" # Width
|
||||
r"(?:\.(?:\*|[0-9]+))?" # Precision
|
||||
r"(?:[hljztL]|hh|ll)?" # Length
|
||||
r"[tldiuoxXfFeEgGaAcspn]" # Specifiers (note we have Blender-specific %t and %l ones too)
|
||||
)
|
||||
|
||||
# Should po parser warn when finding a first letter not capitalized?
|
||||
WARN_MSGID_NOT_CAPITALIZED = True
|
||||
|
||||
# Strings that should not raise above warning!
|
||||
WARN_MSGID_NOT_CAPITALIZED_ALLOWED = {
|
||||
"", # Simplifies things... :p
|
||||
"ac3",
|
||||
"along X",
|
||||
"along Y",
|
||||
"along Z",
|
||||
"along %s X",
|
||||
"along %s Y",
|
||||
"along %s Z",
|
||||
"along local Z",
|
||||
"arccos(A)",
|
||||
"arcsin(A)",
|
||||
"arctan(A)",
|
||||
"ascii",
|
||||
"author", # Addons' field. :/
|
||||
"bItasc",
|
||||
"blender.org",
|
||||
"color_index is invalid",
|
||||
"cos(A)",
|
||||
"cosh(A)",
|
||||
"dbl-", # Compacted for 'double', for keymap items.
|
||||
"description", # Addons' field. :/
|
||||
"dx",
|
||||
"fBM",
|
||||
"flac",
|
||||
"fps: %.2f",
|
||||
"fps: %i",
|
||||
"gimbal",
|
||||
"global",
|
||||
"glTF 2.0 (.glb/.gltf)",
|
||||
"glTF Binary (.glb)",
|
||||
"glTF Embedded (.gltf)",
|
||||
"glTF Material Output",
|
||||
"glTF Original PBR data",
|
||||
"glTF Separate (.gltf + .bin + textures)",
|
||||
"invoke() needs to be called before execute()",
|
||||
"iScale",
|
||||
"iso-8859-15",
|
||||
"iTaSC",
|
||||
"iTaSC parameters",
|
||||
"kb",
|
||||
"local",
|
||||
"location", # Addons' field. :/
|
||||
"locking %s X",
|
||||
"locking %s Y",
|
||||
"locking %s Z",
|
||||
"mkv",
|
||||
"mm",
|
||||
"mp2",
|
||||
"mp3",
|
||||
"normal",
|
||||
"ogg",
|
||||
"oneAPI",
|
||||
"p0",
|
||||
"px",
|
||||
"re",
|
||||
"res",
|
||||
"rv",
|
||||
"sin(A)",
|
||||
"sin(x) / x",
|
||||
"sinh(A)",
|
||||
"sqrt(x*x+y*y+z*z)",
|
||||
"sRGB",
|
||||
"sRGB display space",
|
||||
"sRGB display space with Filmic view transform",
|
||||
"tan(A)",
|
||||
"tanh(A)",
|
||||
"utf-8",
|
||||
"uv_on_emitter() requires a modifier from an evaluated object",
|
||||
"var",
|
||||
"vBVH",
|
||||
"view",
|
||||
"wav",
|
||||
"wmOwnerID '%s' not in workspace '%s'",
|
||||
"y",
|
||||
"y = (Ax + B)",
|
||||
# Sub-strings.
|
||||
"all",
|
||||
"all and invert unselected",
|
||||
"and AMD driver version 22.10 or newer",
|
||||
"and AMD Radeon Pro 21.Q4 driver or newer",
|
||||
"and Linux driver version xx.xx.23904 or newer",
|
||||
"and NVIDIA driver version 470 or newer",
|
||||
"and Windows driver version 101.3430 or newer",
|
||||
"available with",
|
||||
"brown fox",
|
||||
"can't save image while rendering",
|
||||
"category",
|
||||
"constructive modifier",
|
||||
"cursor",
|
||||
"custom",
|
||||
"custom matrix",
|
||||
"custom orientation",
|
||||
"edge data",
|
||||
"exp(A)",
|
||||
"expected a timeline/animation area to be active",
|
||||
"expected a view3d region",
|
||||
"expected a view3d region & editcurve",
|
||||
"expected a view3d region & editmesh",
|
||||
"face data",
|
||||
"gimbal",
|
||||
"global",
|
||||
"glTF Settings",
|
||||
"image file not found",
|
||||
"image format is read-only",
|
||||
"image path can't be written to",
|
||||
"in memory to enable editing!",
|
||||
"insufficient content",
|
||||
"into",
|
||||
"jumps over",
|
||||
"left",
|
||||
"local",
|
||||
"matrices", "no matrices",
|
||||
"multi-res modifier",
|
||||
"name",
|
||||
"non-triangle face",
|
||||
"normal",
|
||||
"or AMD with macOS 12.3 or newer",
|
||||
"performance impact!",
|
||||
"positions", "no positions",
|
||||
"read",
|
||||
"remove",
|
||||
"right",
|
||||
"selected",
|
||||
"selected and lock unselected",
|
||||
"selected and unlock unselected",
|
||||
"screen",
|
||||
"the lazy dog",
|
||||
"this legacy pose library to pose assets",
|
||||
"to the top level of the tree",
|
||||
"unable to load movie clip",
|
||||
"unable to load text",
|
||||
"unable to open the file",
|
||||
"unknown error reading file",
|
||||
"unknown error stating file",
|
||||
"unknown error writing file",
|
||||
"unselected",
|
||||
"unsupported font format",
|
||||
"unsupported format",
|
||||
"unsupported image format",
|
||||
"unsupported movie clip format",
|
||||
"untitled",
|
||||
"vertex data",
|
||||
"verts only",
|
||||
"view",
|
||||
"virtual parents",
|
||||
"which was replaced by the Asset Browser",
|
||||
"write",
|
||||
}
|
||||
WARN_MSGID_NOT_CAPITALIZED_ALLOWED |= set(lng[2] for lng in LANGUAGES)
|
||||
|
||||
WARN_MSGID_END_POINT_ALLOWED = {
|
||||
"Circle|Alt .",
|
||||
"Float Neg. Exp.",
|
||||
"Max Ext.",
|
||||
"Newer graphics drivers may be available to improve Blender support.",
|
||||
"Numpad .",
|
||||
"Pad.",
|
||||
" RNA Path: bpy.types.",
|
||||
"Temp. Diff.",
|
||||
"Temperature Diff.",
|
||||
"The program will now close.",
|
||||
"Your graphics card or driver has limited support. It may work, but with issues.",
|
||||
"Your graphics card or driver is not supported.",
|
||||
"Invalid surface UVs on %d curves.",
|
||||
}
|
||||
|
||||
PARSER_CACHE_HASH = 'sha1'
|
||||
|
||||
PARSER_TEMPLATE_ID = "__POT__"
|
||||
PARSER_PY_ID = "__PY__"
|
||||
|
||||
PARSER_PY_MARKER_BEGIN = "\n# ##### BEGIN AUTOGENERATED I18N SECTION #####\n"
|
||||
PARSER_PY_MARKER_END = "\n# ##### END AUTOGENERATED I18N SECTION #####\n"
|
||||
|
||||
PARSER_MAX_FILE_SIZE = 2 ** 24 # in bytes, i.e. 16 Mb.
|
||||
|
||||
###############################################################################
|
||||
# PATHS
|
||||
###############################################################################
|
||||
|
||||
# The Python3 executable.You’ll likely have to edit it in your user_settings.py
|
||||
# if you’re under Windows.
|
||||
PYTHON3_EXEC = "python3"
|
||||
|
||||
# The Blender executable!
|
||||
# This is just an example, you’ll have to edit it in your user_settings.py!
|
||||
BLENDER_EXEC = os.path.abspath(os.path.join("foo", "bar", "blender"))
|
||||
# check for blender.bin
|
||||
if not os.path.exists(BLENDER_EXEC):
|
||||
if os.path.exists(BLENDER_EXEC + ".bin"):
|
||||
BLENDER_EXEC = BLENDER_EXEC + ".bin"
|
||||
|
||||
# The gettext msgfmt "compiler". You’ll likely have to edit it in your user_settings.py if you’re under Windows.
|
||||
GETTEXT_MSGFMT_EXECUTABLE = "msgfmt"
|
||||
|
||||
# The FriBidi C compiled library (.so under Linux, .dll under windows...).
|
||||
# You’ll likely have to edit it in your user_settings.py if you’re under Windows., e.g. using the included one:
|
||||
# FRIBIDI_LIB = os.path.join(TOOLS_DIR, "libfribidi.dll")
|
||||
FRIBIDI_LIB = "libfribidi.so.0"
|
||||
|
||||
# The name of the (currently empty) file that must be present in a po's directory to enable rtl-preprocess.
|
||||
RTL_PREPROCESS_FILE = "is_rtl"
|
||||
|
||||
# The Blender source root path.
|
||||
# This is just an example, you’ll have to override it in your user_settings.py!
|
||||
SOURCE_DIR = os.path.abspath(os.path.join("blender"))
|
||||
|
||||
# The bf-translation repository (you'll have to override this in your user_settings.py).
|
||||
I18N_DIR = os.path.abspath(os.path.join("i18n"))
|
||||
|
||||
# The /branches path (relative to I18N_DIR).
|
||||
REL_BRANCHES_DIR = os.path.join("branches")
|
||||
|
||||
# The /trunk path (relative to I18N_DIR).
|
||||
REL_TRUNK_DIR = os.path.join("trunk")
|
||||
|
||||
# The /trunk/po path (relative to I18N_DIR).
|
||||
REL_TRUNK_PO_DIR = os.path.join(REL_TRUNK_DIR, "po")
|
||||
|
||||
# The /trunk/mo path (relative to I18N_DIR).
|
||||
REL_TRUNK_MO_DIR = os.path.join(REL_TRUNK_DIR, "locale")
|
||||
|
||||
|
||||
# The path to the *git* translation repository (relative to SOURCE_DIR).
|
||||
REL_GIT_I18N_DIR = os.path.join("locale")
|
||||
|
||||
|
||||
# The /po path of the *git* translation repository (relative to REL_GIT_I18N_DIR).
|
||||
REL_GIT_I18N_PO_DIR = os.path.join("po")
|
||||
|
||||
|
||||
# The Blender source path to check for i18n macros (relative to SOURCE_DIR).
|
||||
REL_POTFILES_SOURCE_DIR = os.path.join("source")
|
||||
|
||||
# Where to search for preset names (relative to SOURCE_DIR).
|
||||
REL_PRESETS_DIR = os.path.join("release", "scripts", "presets")
|
||||
|
||||
# Where to search for templates (relative to SOURCE_DIR).
|
||||
REL_TEMPLATES_DIR = os.path.join("release", "scripts", "startup",
|
||||
"bl_app_templates_system")
|
||||
|
||||
# The template messages file (relative to I18N_DIR).
|
||||
REL_FILE_NAME_POT = os.path.join(REL_BRANCHES_DIR, DOMAIN + ".pot")
|
||||
|
||||
# Mo root datapath.
|
||||
REL_MO_PATH_ROOT = os.path.join(REL_TRUNK_DIR, "locale")
|
||||
|
||||
# Mo path generator for a given language.
|
||||
REL_MO_PATH_TEMPLATE = os.path.join(REL_MO_PATH_ROOT, "{}", "LC_MESSAGES")
|
||||
|
||||
# Mo path generator for a given language (relative to any "locale" dir).
|
||||
MO_PATH_ROOT_RELATIVE = os.path.join("locale")
|
||||
MO_PATH_TEMPLATE_RELATIVE = os.path.join(MO_PATH_ROOT_RELATIVE, "{}", "LC_MESSAGES")
|
||||
|
||||
# Mo file name.
|
||||
MO_FILE_NAME = DOMAIN + ".mo"
|
||||
|
||||
# Where to search for py files that may contain ui strings (relative to one of the 'resource_path' of Blender).
|
||||
CUSTOM_PY_UI_FILES = [
|
||||
os.path.join("scripts", "startup", "bl_ui"),
|
||||
os.path.join("scripts", "startup", "bl_operators"),
|
||||
os.path.join("scripts", "modules", "rna_prop_ui.py"),
|
||||
os.path.join("scripts", "modules", "rna_keymap_ui.py"),
|
||||
os.path.join("scripts", "modules", "bpy_types.py"),
|
||||
os.path.join("scripts", "presets", "keyconfig"),
|
||||
]
|
||||
|
||||
# An optional text file listing files to force include/exclude from py_xgettext process.
|
||||
SRC_POTFILES = ""
|
||||
|
||||
# A cache storing validated msgids, to avoid re-spellchecking them.
|
||||
SPELL_CACHE = os.path.join("/tmp", ".spell_cache")
|
||||
|
||||
# Threshold defining whether a new msgid is similar enough with an old one to reuse its translation...
|
||||
SIMILAR_MSGID_THRESHOLD = 0.75
|
||||
|
||||
# Additional import paths to add to sys.path (';' separated)...
|
||||
INTERN_PY_SYS_PATHS = ""
|
||||
|
||||
# Custom override settings must be one dir above i18n tools itself!
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||
try:
|
||||
from bl_i18n_settings_override import *
|
||||
except ImportError: # If no i18n_override_settings available, it’s no error!
|
||||
pass
|
||||
|
||||
# Override with custom user settings, if available.
|
||||
try:
|
||||
from settings_user import *
|
||||
except ImportError: # If no user_settings available, it’s no error!
|
||||
pass
|
||||
|
||||
|
||||
for p in set(INTERN_PY_SYS_PATHS.split(";")):
|
||||
if p:
|
||||
sys.path.append(p)
|
||||
|
||||
|
||||
# The settings class itself!
|
||||
def _do_get(ref, path):
|
||||
return os.path.normpath(os.path.join(ref, path))
|
||||
|
||||
|
||||
def _do_set(ref, path):
|
||||
path = os.path.normpath(path)
|
||||
# If given path is absolute, make it relative to current ref one (else we consider it is already the case!)
|
||||
if os.path.isabs(path):
|
||||
# can't always find the relative path (between drive letters on windows)
|
||||
try:
|
||||
return os.path.relpath(path, ref)
|
||||
except ValueError:
|
||||
pass
|
||||
return path
|
||||
|
||||
|
||||
def _gen_get_set_path(ref, name):
|
||||
def _get(self):
|
||||
return _do_get(getattr(self, ref), getattr(self, name))
|
||||
|
||||
def _set(self, value):
|
||||
setattr(self, name, _do_set(getattr(self, ref), value))
|
||||
return _get, _set
|
||||
|
||||
|
||||
def _check_valid_data(uid, val):
|
||||
return not uid.startswith("_") and type(val) not in tuple(types.__dict__.values()) + (type,)
|
||||
|
||||
|
||||
class I18nSettings:
|
||||
"""
|
||||
Class allowing persistence of our settings!
|
||||
Saved in JSon format, so settings should be JSon'able objects!
|
||||
"""
|
||||
_settings = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# Addon preferences are singleton by definition, so is this class!
|
||||
if not I18nSettings._settings:
|
||||
cls._settings = super(I18nSettings, cls).__new__(cls)
|
||||
cls._settings.__dict__ = {uid: val for uid, val in globals().items() if _check_valid_data(uid, val)}
|
||||
return I18nSettings._settings
|
||||
|
||||
def __getstate__(self):
|
||||
return self.to_dict()
|
||||
|
||||
def __setstate__(self, mapping):
|
||||
return self.from_dict(mapping)
|
||||
|
||||
def from_dict(self, mapping):
|
||||
# Special case... :/
|
||||
if "INTERN_PY_SYS_PATHS" in mapping:
|
||||
self.PY_SYS_PATHS = mapping["INTERN_PY_SYS_PATHS"]
|
||||
self.__dict__.update(mapping)
|
||||
|
||||
def to_dict(self):
|
||||
glob = globals()
|
||||
return {uid: val for uid, val in self.__dict__.items() if _check_valid_data(uid, val) and uid in glob}
|
||||
|
||||
def from_json(self, string):
|
||||
self.from_dict(dict(json.loads(string)))
|
||||
|
||||
def to_json(self):
|
||||
# Only save the diff from default i18n_settings!
|
||||
glob = globals()
|
||||
export_dict = {
|
||||
uid: val for uid, val in self.__dict__.items()
|
||||
if _check_valid_data(uid, val) and glob.get(uid) != val
|
||||
}
|
||||
return json.dumps(export_dict)
|
||||
|
||||
def load(self, fname, reset=False):
|
||||
reset = reset or fname is None
|
||||
if reset:
|
||||
self.__dict__ = {uid: data for uid, data in globals().items() if not uid.startswith("_")}
|
||||
if fname is None:
|
||||
return
|
||||
if isinstance(fname, str):
|
||||
if not os.path.isfile(fname):
|
||||
# Assume it is already real JSon string...
|
||||
self.from_json(fname)
|
||||
return
|
||||
with open(fname, encoding="utf8") as f:
|
||||
self.from_json(f.read())
|
||||
# Else assume fname is already a file(like) object!
|
||||
else:
|
||||
self.from_json(fname.read())
|
||||
|
||||
def save(self, fname):
|
||||
if isinstance(fname, str):
|
||||
with open(fname, 'w', encoding="utf8") as f:
|
||||
f.write(self.to_json())
|
||||
# Else assume fname is already a file(like) object!
|
||||
else:
|
||||
fname.write(self.to_json())
|
||||
|
||||
BRANCHES_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_BRANCHES_DIR")))
|
||||
TRUNK_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_TRUNK_DIR")))
|
||||
TRUNK_PO_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_TRUNK_PO_DIR")))
|
||||
TRUNK_MO_DIR = property(*(_gen_get_set_path("I18N_DIR", "REL_TRUNK_MO_DIR")))
|
||||
GIT_I18N_ROOT = property(*(_gen_get_set_path("SOURCE_DIR", "REL_GIT_I18N_DIR")))
|
||||
GIT_I18N_PO_DIR = property(*(_gen_get_set_path("GIT_I18N_ROOT", "REL_GIT_I18N_PO_DIR")))
|
||||
POTFILES_SOURCE_DIR = property(*(_gen_get_set_path("SOURCE_DIR", "REL_POTFILES_SOURCE_DIR")))
|
||||
PRESETS_DIR = property(*(_gen_get_set_path("SOURCE_DIR", "REL_PRESETS_DIR")))
|
||||
TEMPLATES_DIR = property(*(_gen_get_set_path("SOURCE_DIR", "REL_TEMPLATES_DIR")))
|
||||
FILE_NAME_POT = property(*(_gen_get_set_path("I18N_DIR", "REL_FILE_NAME_POT")))
|
||||
MO_PATH_ROOT = property(*(_gen_get_set_path("I18N_DIR", "REL_MO_PATH_ROOT")))
|
||||
MO_PATH_TEMPLATE = property(*(_gen_get_set_path("I18N_DIR", "REL_MO_PATH_TEMPLATE")))
|
||||
|
||||
def _get_py_sys_paths(self):
|
||||
return self.INTERN_PY_SYS_PATHS
|
||||
|
||||
def _set_py_sys_paths(self, val):
|
||||
old_paths = set(self.INTERN_PY_SYS_PATHS.split(";")) - {""}
|
||||
new_paths = set(val.split(";")) - {""}
|
||||
for p in old_paths - new_paths:
|
||||
if p in sys.path:
|
||||
sys.path.remove(p)
|
||||
for p in new_paths - old_paths:
|
||||
sys.path.append(p)
|
||||
self.INTERN_PY_SYS_PATHS = val
|
||||
PY_SYS_PATHS = property(_get_py_sys_paths, _set_py_sys_paths)
|
||||
5
scripts/modules/bl_i18n_utils/settings_user.py
Normal file
5
scripts/modules/bl_i18n_utils/settings_user.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import os
|
||||
|
||||
import settings
|
||||
1592
scripts/modules/bl_i18n_utils/utils.py
Normal file
1592
scripts/modules/bl_i18n_utils/utils.py
Normal file
File diff suppressed because it is too large
Load Diff
142
scripts/modules/bl_i18n_utils/utils_cli.py
Normal file
142
scripts/modules/bl_i18n_utils/utils_cli.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Some useful operations from utils' I18nMessages class exposed as a CLI.
|
||||
|
||||
import os
|
||||
|
||||
if __package__ is None:
|
||||
import settings as settings_i18n
|
||||
import utils as utils_i18n
|
||||
import utils_languages_menu
|
||||
else:
|
||||
from . import settings as settings_i18n
|
||||
from . import utils as utils_i18n
|
||||
from . import utils_languages_menu
|
||||
|
||||
|
||||
def update_po(args, settings):
|
||||
pot = utils_i18n.I18nMessages(uid=None, kind='PO', src=args.template, settings=settings)
|
||||
if os.path.isfile(args.dst):
|
||||
uid = os.path.splitext(os.path.basename(args.dst))[0]
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.dst, settings=settings)
|
||||
po.update(pot)
|
||||
else:
|
||||
po = pot
|
||||
po.write(kind="PO", dest=args.dst)
|
||||
|
||||
|
||||
def cleanup_po(args, settings):
|
||||
uid = os.path.splitext(os.path.basename(args.src))[0]
|
||||
if not args.dst:
|
||||
args.dst = args.src
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.src, settings=settings)
|
||||
po.check(fix=True)
|
||||
po.clean_commented()
|
||||
po.write(kind="PO", dest=args.dst)
|
||||
|
||||
|
||||
def strip_po(args, settings):
|
||||
uid = os.path.splitext(os.path.basename(args.src))[0]
|
||||
if not args.dst:
|
||||
args.dst = args.src
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.src, settings=settings)
|
||||
po.clean_commented()
|
||||
po.write(kind="PO_COMPACT", dest=args.dst)
|
||||
|
||||
|
||||
def rtl_process_po(args, settings):
|
||||
uid = os.path.splitext(os.path.basename(args.src))[0]
|
||||
if not args.dst:
|
||||
args.dst = args.src
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=args.src, settings=settings)
|
||||
po.rtl_process()
|
||||
po.write(kind="PO", dest=args.dst)
|
||||
|
||||
|
||||
def language_menu(args, settings):
|
||||
# 'DEFAULT' and en_US are always valid, fully-translated "languages"!
|
||||
stats = {"DEFAULT": 1.0, "en_US": 1.0}
|
||||
|
||||
po_to_uid = {
|
||||
os.path.basename(po_path_branch): uid
|
||||
for can_use, uid, _num_id, _name, _isocode, po_path_branch
|
||||
in utils_i18n.list_po_dir(settings.BRANCHES_DIR, settings)
|
||||
if can_use
|
||||
}
|
||||
for po_dir in os.listdir(settings.BRANCHES_DIR):
|
||||
po_dir = os.path.join(settings.BRANCHES_DIR, po_dir)
|
||||
if not os.path.isdir(po_dir):
|
||||
continue
|
||||
for po_path in os.listdir(po_dir):
|
||||
uid = po_to_uid.get(po_path, None)
|
||||
#print("Checking %s, found uid %s" % (po_path, uid))
|
||||
po_path = os.path.join(settings.TRUNK_PO_DIR, po_path)
|
||||
if uid is not None:
|
||||
po = utils_i18n.I18nMessages(uid=uid, kind='PO', src=po_path, settings=settings)
|
||||
stats[uid] = po.nbr_trans_msgs / po.nbr_msgs if po.nbr_msgs > 0 else 0
|
||||
utils_languages_menu.gen_menu_file(stats, settings)
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Tool to perform common actions over PO/MO files.")
|
||||
parser.add_argument(
|
||||
'-s', '--settings', default=None,
|
||||
help="Override (some) default settings. Either a JSon file name, or a JSon string.",
|
||||
)
|
||||
sub_parsers = parser.add_subparsers()
|
||||
|
||||
sub_parser = sub_parsers.add_parser('update_po', help="Update a PO file from a given POT template file")
|
||||
sub_parser.add_argument(
|
||||
'--template', metavar='template.pot', required=True,
|
||||
help="The source pot file to use as template for the update.",
|
||||
)
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', required=True, help="The destination po to update.")
|
||||
sub_parser.set_defaults(func=update_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'cleanup_po',
|
||||
help="Cleanup a PO file (check for and fix some common errors, remove commented messages).",
|
||||
)
|
||||
sub_parser.add_argument('--src', metavar='src.po', required=True, help="The source po file to clean up.")
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', help="The destination po to write to.")
|
||||
sub_parser.set_defaults(func=cleanup_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'strip_po',
|
||||
help="Reduce all non-essential data from given PO file (reduce its size).",
|
||||
)
|
||||
sub_parser.add_argument('--src', metavar='src.po', required=True, help="The source po file to strip.")
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', help="The destination po to write to.")
|
||||
sub_parser.set_defaults(func=strip_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'rtl_process_po',
|
||||
help="Pre-process PO files for RTL languages.",
|
||||
)
|
||||
sub_parser.add_argument('--src', metavar='src.po', required=True, help="The source po file to process.")
|
||||
sub_parser.add_argument('--dst', metavar='dst.po', help="The destination po to write to.")
|
||||
sub_parser.set_defaults(func=rtl_process_po)
|
||||
|
||||
sub_parser = sub_parsers.add_parser(
|
||||
'language_menu',
|
||||
help="Generate the text file used by Blender to create its language menu.",
|
||||
)
|
||||
sub_parser.set_defaults(func=language_menu)
|
||||
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
|
||||
settings = settings_i18n.I18nSettings()
|
||||
settings.load(args.settings)
|
||||
|
||||
if getattr(args, 'template', None) is not None:
|
||||
settings.FILE_NAME_POT = args.template
|
||||
|
||||
args.func(args=args, settings=settings)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n\n *** Running {} *** \n".format(__file__))
|
||||
main()
|
||||
81
scripts/modules/bl_i18n_utils/utils_languages_menu.py
Executable file
81
scripts/modules/bl_i18n_utils/utils_languages_menu.py
Executable file
@@ -0,0 +1,81 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Update "languages" text file used by Blender at runtime to build translations menu.
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
OK = 0
|
||||
MISSING = 1
|
||||
TOOLOW = 2
|
||||
SKIPPED = 3
|
||||
FLAG_MESSAGES = {
|
||||
OK: "",
|
||||
MISSING: "No translation yet.",
|
||||
TOOLOW: "Not complete enough to be included.",
|
||||
SKIPPED: "Skipped (see IMPORT_LANGUAGES_SKIP in settings.py).",
|
||||
}
|
||||
|
||||
|
||||
def gen_menu_file(stats, settings):
|
||||
# Generate languages file used by Blender's i18n system.
|
||||
# First, match all entries in LANGUAGES to a lang in stats, if possible!
|
||||
tmp = []
|
||||
for uid_num, label, uid in settings.LANGUAGES:
|
||||
if uid in stats:
|
||||
if uid in settings.IMPORT_LANGUAGES_SKIP:
|
||||
tmp.append((stats[uid], uid_num, label, uid, SKIPPED))
|
||||
else:
|
||||
tmp.append((stats[uid], uid_num, label, uid, OK))
|
||||
else:
|
||||
tmp.append((0.0, uid_num, label, uid, MISSING))
|
||||
stats = tmp
|
||||
limits = sorted(settings.LANGUAGES_CATEGORIES, key=lambda it: it[0], reverse=True)
|
||||
idx = 0
|
||||
stats = sorted(stats, key=lambda it: it[0], reverse=True)
|
||||
langs_cats = [[] for i in range(len(limits))]
|
||||
highest_uid = 0
|
||||
for lvl, uid_num, label, uid, flag in stats:
|
||||
if lvl < limits[idx][0]:
|
||||
# Sub-sort languages by iso-codes.
|
||||
langs_cats[idx].sort(key=lambda it: it[2])
|
||||
idx += 1
|
||||
if lvl < settings.IMPORT_MIN_LEVEL and flag == OK:
|
||||
flag = TOOLOW
|
||||
langs_cats[idx].append((uid_num, label, uid, flag))
|
||||
if abs(uid_num) > highest_uid:
|
||||
highest_uid = abs(uid_num)
|
||||
# Sub-sort last group of languages by iso-codes!
|
||||
langs_cats[idx].sort(key=lambda it: it[2])
|
||||
data_lines = [
|
||||
"# File used by Blender to know which languages (translations) are available, ",
|
||||
"# and to generate translation menu.",
|
||||
"#",
|
||||
"# File format:",
|
||||
"# ID:MENULABEL:ISOCODE",
|
||||
"# ID must be unique, except for 0 value (marks categories for menu).",
|
||||
"# Line starting with a # are comments!",
|
||||
"#",
|
||||
"# Automatically generated by bl_i18n_utils/update_languages_menu.py script.",
|
||||
"# Highest ID currently in use: {}".format(highest_uid),
|
||||
]
|
||||
for cat, langs_cat in zip(limits, langs_cats):
|
||||
data_lines.append("#")
|
||||
# Write "category menu label"...
|
||||
if langs_cat:
|
||||
data_lines.append("0:{}:".format(cat[1]))
|
||||
else:
|
||||
# Do not write the category if it has no language!
|
||||
data_lines.append("# Void category! #0:{}:".format(cat[1]))
|
||||
# ...and all matching language entries!
|
||||
for uid_num, label, uid, flag in langs_cat:
|
||||
if flag == OK:
|
||||
data_lines.append("{}:{}:{}".format(uid_num, label, uid))
|
||||
else:
|
||||
# Non-existing, commented entry!
|
||||
data_lines.append("# {} #{}:{}:{}".format(FLAG_MESSAGES[flag], uid_num, label, uid))
|
||||
with open(os.path.join(settings.TRUNK_MO_DIR, settings.LANGUAGES_FILE), 'w', encoding="utf8") as f:
|
||||
f.write("\n".join(data_lines))
|
||||
with open(os.path.join(settings.GIT_I18N_ROOT, settings.LANGUAGES_FILE), 'w', encoding="utf8") as f:
|
||||
f.write("\n".join(data_lines))
|
||||
175
scripts/modules/bl_i18n_utils/utils_rtl.py
Executable file
175
scripts/modules/bl_i18n_utils/utils_rtl.py
Executable file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Pre-process right-to-left languages.
|
||||
# You can use it either standalone, or through import_po_from_branches or
|
||||
# update_trunk.
|
||||
#
|
||||
# Notes: This has been tested on Linux, not 100% it will work nicely on
|
||||
# Windows or OsX.
|
||||
# This uses ctypes, as there is no py3 binding for fribidi currently.
|
||||
# This implies you only need the compiled C library to run it.
|
||||
# Finally, note that it handles some formatting/escape codes (like
|
||||
# \", %s, %x12, %.4f, etc.), protecting them from ugly (evil) fribidi,
|
||||
# which seems completely unaware of such things (as unicode is...).
|
||||
|
||||
import ctypes
|
||||
import re
|
||||
|
||||
|
||||
# define FRIBIDI_MASK_NEUTRAL 0x00000040L /* Is neutral */
|
||||
FRIBIDI_PAR_ON = 0x00000040
|
||||
|
||||
|
||||
# define FRIBIDI_FLAG_SHAPE_MIRRORING 0x00000001
|
||||
# define FRIBIDI_FLAG_REORDER_NSM 0x00000002
|
||||
|
||||
# define FRIBIDI_FLAG_SHAPE_ARAB_PRES 0x00000100
|
||||
# define FRIBIDI_FLAG_SHAPE_ARAB_LIGA 0x00000200
|
||||
# define FRIBIDI_FLAG_SHAPE_ARAB_CONSOLE 0x00000400
|
||||
|
||||
# define FRIBIDI_FLAG_REMOVE_BIDI 0x00010000
|
||||
# define FRIBIDI_FLAG_REMOVE_JOINING 0x00020000
|
||||
# define FRIBIDI_FLAG_REMOVE_SPECIALS 0x00040000
|
||||
|
||||
# define FRIBIDI_FLAGS_DEFAULT ( \
|
||||
# FRIBIDI_FLAG_SHAPE_MIRRORING | \
|
||||
# FRIBIDI_FLAG_REORDER_NSM | \
|
||||
# FRIBIDI_FLAG_REMOVE_SPECIALS )
|
||||
|
||||
# define FRIBIDI_FLAGS_ARABIC ( \
|
||||
# FRIBIDI_FLAG_SHAPE_ARAB_PRES | \
|
||||
# FRIBIDI_FLAG_SHAPE_ARAB_LIGA )
|
||||
|
||||
FRIBIDI_FLAG_SHAPE_MIRRORING = 0x00000001
|
||||
FRIBIDI_FLAG_REORDER_NSM = 0x00000002
|
||||
FRIBIDI_FLAG_REMOVE_SPECIALS = 0x00040000
|
||||
|
||||
FRIBIDI_FLAG_SHAPE_ARAB_PRES = 0x00000100
|
||||
FRIBIDI_FLAG_SHAPE_ARAB_LIGA = 0x00000200
|
||||
|
||||
FRIBIDI_FLAGS_DEFAULT = FRIBIDI_FLAG_SHAPE_MIRRORING | FRIBIDI_FLAG_REORDER_NSM | FRIBIDI_FLAG_REMOVE_SPECIALS
|
||||
|
||||
FRIBIDI_FLAGS_ARABIC = FRIBIDI_FLAG_SHAPE_ARAB_PRES | FRIBIDI_FLAG_SHAPE_ARAB_LIGA
|
||||
|
||||
|
||||
MENU_DETECT_REGEX = re.compile("%x\\d+\\|")
|
||||
|
||||
|
||||
##### Kernel processing funcs. #####
|
||||
def protect_format_seq(msg):
|
||||
"""
|
||||
Find some specific escaping/formatting sequences (like \", %s, etc.,
|
||||
and protect them from any modification!
|
||||
"""
|
||||
# LRM = "\u200E"
|
||||
# RLM = "\u200F"
|
||||
LRE = "\u202A"
|
||||
# RLE = "\u202B"
|
||||
PDF = "\u202C"
|
||||
LRO = "\u202D"
|
||||
# RLO = "\u202E"
|
||||
# uctrl = {LRE, RLE, PDF, LRO, RLO}
|
||||
# Most likely incomplete, but seems to cover current needs.
|
||||
format_codes = set("tslfd")
|
||||
digits = set(".0123456789")
|
||||
|
||||
if not msg:
|
||||
return msg
|
||||
elif MENU_DETECT_REGEX.search(msg):
|
||||
# An ugly "menu" message, just force it whole LRE if not yet done.
|
||||
if msg[0] not in {LRE, LRO}:
|
||||
msg = LRE + msg
|
||||
|
||||
idx = 0
|
||||
ret = []
|
||||
ln = len(msg)
|
||||
while idx < ln:
|
||||
dlt = 1
|
||||
# # If we find a control char, skip any additional protection!
|
||||
# if msg[idx] in uctrl:
|
||||
# ret.append(msg[idx:])
|
||||
# break
|
||||
# \" or \'
|
||||
if idx < (ln - 1) and msg[idx] == '\\' and msg[idx + 1] in "\"\'":
|
||||
dlt = 2
|
||||
# %x12|
|
||||
elif idx < (ln - 2) and msg[idx] == '%' and msg[idx + 1] in "x" and msg[idx + 2] in digits:
|
||||
dlt = 2
|
||||
while (idx + dlt) < ln and msg[idx + dlt] in digits:
|
||||
dlt += 1
|
||||
if (idx + dlt) < ln and msg[idx + dlt] == '|':
|
||||
dlt += 1
|
||||
# %.4f
|
||||
elif idx < (ln - 3) and msg[idx] == '%' and msg[idx + 1] in digits:
|
||||
dlt = 2
|
||||
while (idx + dlt) < ln and msg[idx + dlt] in digits:
|
||||
dlt += 1
|
||||
if (idx + dlt) < ln and msg[idx + dlt] in format_codes:
|
||||
dlt += 1
|
||||
else:
|
||||
dlt = 1
|
||||
# %s
|
||||
elif idx < (ln - 1) and msg[idx] == '%' and msg[idx + 1] in format_codes:
|
||||
dlt = 2
|
||||
|
||||
if dlt > 1:
|
||||
ret.append(LRE)
|
||||
ret += msg[idx:idx + dlt]
|
||||
idx += dlt
|
||||
if dlt > 1:
|
||||
ret.append(PDF)
|
||||
|
||||
return "".join(ret)
|
||||
|
||||
|
||||
def log2vis(msgs, settings):
|
||||
"""
|
||||
Globally mimics deprecated fribidi_log2vis.
|
||||
msgs should be an iterable of messages to rtl-process.
|
||||
"""
|
||||
fbd = ctypes.CDLL(settings.FRIBIDI_LIB)
|
||||
|
||||
for msg in msgs:
|
||||
msg = protect_format_seq(msg)
|
||||
|
||||
fbc_str = ctypes.create_unicode_buffer(msg)
|
||||
ln = len(fbc_str) - 1
|
||||
# print(fbc_str.value, ln)
|
||||
btypes = (ctypes.c_int * ln)()
|
||||
embed_lvl = (ctypes.c_uint8 * ln)()
|
||||
pbase_dir = ctypes.c_int(FRIBIDI_PAR_ON)
|
||||
jtypes = (ctypes.c_uint8 * ln)()
|
||||
flags = FRIBIDI_FLAGS_DEFAULT | FRIBIDI_FLAGS_ARABIC
|
||||
|
||||
# Find out direction of each char.
|
||||
fbd.fribidi_get_bidi_types(fbc_str, ln, ctypes.byref(btypes))
|
||||
|
||||
# print(*btypes)
|
||||
|
||||
fbd.fribidi_get_par_embedding_levels(btypes, ln,
|
||||
ctypes.byref(pbase_dir),
|
||||
embed_lvl)
|
||||
|
||||
# print(*embed_lvl)
|
||||
|
||||
# Joinings for arabic chars.
|
||||
fbd.fribidi_get_joining_types(fbc_str, ln, jtypes)
|
||||
# print(*jtypes)
|
||||
fbd.fribidi_join_arabic(btypes, ln, embed_lvl, jtypes)
|
||||
# print(*jtypes)
|
||||
|
||||
# Final Shaping!
|
||||
fbd.fribidi_shape(flags, embed_lvl, ln, jtypes, fbc_str)
|
||||
|
||||
# print(fbc_str.value)
|
||||
# print(*(ord(c) for c in fbc_str))
|
||||
# And now, the reordering.
|
||||
# Note that here, we expect a single line, so no need to do
|
||||
# fancy things...
|
||||
fbd.fribidi_reorder_line(flags, btypes, ln, 0, pbase_dir, embed_lvl,
|
||||
fbc_str, None)
|
||||
# print(fbc_str.value)
|
||||
# print(*(ord(c) for c in fbc_str))
|
||||
|
||||
yield fbc_str.value
|
||||
885
scripts/modules/bl_i18n_utils/utils_spell_check.py
Normal file
885
scripts/modules/bl_i18n_utils/utils_spell_check.py
Normal file
@@ -0,0 +1,885 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import enchant
|
||||
import os
|
||||
import pickle
|
||||
import re
|
||||
|
||||
|
||||
class SpellChecker:
|
||||
"""
|
||||
A basic spell checker.
|
||||
"""
|
||||
|
||||
# These must be all lower case for comparisons
|
||||
uimsgs = {
|
||||
# OK words
|
||||
"adaptively", "adaptivity",
|
||||
"aren", # aren't
|
||||
"betweens", # yuck! in-betweens!
|
||||
"boolean", "booleans",
|
||||
"chamfer",
|
||||
"couldn", # couldn't
|
||||
"customizable",
|
||||
"decrement",
|
||||
"derivate",
|
||||
"deterministically",
|
||||
"doesn", # doesn't
|
||||
"duplications",
|
||||
"effector",
|
||||
"equi", # equi-angular, etc.
|
||||
"fader",
|
||||
"globbing",
|
||||
"gridded",
|
||||
"haptics",
|
||||
"hasn", # hasn't
|
||||
"hetero",
|
||||
"hoc", # ad-hoc
|
||||
"incompressible",
|
||||
"indices",
|
||||
"instantiation",
|
||||
"iridas",
|
||||
"isn", # isn't
|
||||
"iterable",
|
||||
"kyrgyz",
|
||||
"latin",
|
||||
"merchantability",
|
||||
"mplayer",
|
||||
"ons", # add-ons
|
||||
"pong", # ping pong
|
||||
"resumable",
|
||||
"runtimes",
|
||||
"scalable",
|
||||
"shadeless",
|
||||
"shouldn", # shouldn't
|
||||
"smoothen",
|
||||
"spacings",
|
||||
"teleport", "teleporting",
|
||||
"tangency",
|
||||
"vertices",
|
||||
"wasn", # wasn't
|
||||
"zig", "zag",
|
||||
|
||||
# Brands etc.
|
||||
"htc",
|
||||
"huawei",
|
||||
"radeon",
|
||||
"vive",
|
||||
"xbox",
|
||||
|
||||
# Merged words
|
||||
"antialiasing", "antialias",
|
||||
"arcsine", "arccosine", "arctangent",
|
||||
"autoclip",
|
||||
"autocomplete",
|
||||
"autoexec",
|
||||
"autoexecution",
|
||||
"autogenerated",
|
||||
"autolock",
|
||||
"automask", "automasking",
|
||||
"automerge",
|
||||
"autoname",
|
||||
"autopack",
|
||||
"autosave",
|
||||
"autoscale",
|
||||
"autosmooth",
|
||||
"autosplit",
|
||||
"backface", "backfacing",
|
||||
"backimage",
|
||||
"backscattered",
|
||||
"bandnoise",
|
||||
"bindcode",
|
||||
"bitdepth",
|
||||
"bitflag", "bitflags",
|
||||
"bitrate",
|
||||
"blackbody",
|
||||
"blendfile",
|
||||
"blendin",
|
||||
"bonesize",
|
||||
"boundbox",
|
||||
"boxpack",
|
||||
"buffersize",
|
||||
"builtin", "builtins",
|
||||
"bytecode",
|
||||
"chunksize",
|
||||
"codebase",
|
||||
"customdata",
|
||||
"dataset", "datasets",
|
||||
"de",
|
||||
"deadzone",
|
||||
"deconstruct",
|
||||
"defocus",
|
||||
"denoise", "denoised", "denoising", "denoiser",
|
||||
"deselect", "deselecting", "deselection",
|
||||
"despill", "despilling",
|
||||
"dirtree",
|
||||
"editcurve",
|
||||
"editmesh",
|
||||
"faceforward",
|
||||
"filebrowser",
|
||||
"filelist",
|
||||
"filename", "filenames",
|
||||
"filepath", "filepaths",
|
||||
"forcefield", "forcefields",
|
||||
"framerange",
|
||||
"frontmost",
|
||||
"fulldome", "fulldomes",
|
||||
"fullscreen",
|
||||
"gamepad",
|
||||
"gridline", "gridlines",
|
||||
"hardlight",
|
||||
"hemi",
|
||||
"hostname",
|
||||
"inbetween",
|
||||
"inscatter", "inscattering",
|
||||
"libdata",
|
||||
"lightcache",
|
||||
"lightgroup", "lightgroups",
|
||||
"lightprobe", "lightprobes",
|
||||
"lightless",
|
||||
"lineset",
|
||||
"linestyle", "linestyles",
|
||||
"localview",
|
||||
"lookup", "lookups",
|
||||
"mathutils",
|
||||
"micropolygon",
|
||||
"midlevel",
|
||||
"midground",
|
||||
"mixdown",
|
||||
"monospaced",
|
||||
"multi",
|
||||
"multifractal",
|
||||
"multiframe",
|
||||
"multilayer",
|
||||
"multipaint",
|
||||
"multires", "multiresolution",
|
||||
"multisampling",
|
||||
"multiscatter",
|
||||
"multitexture",
|
||||
"multithreaded",
|
||||
"multiuser",
|
||||
"multiview",
|
||||
"namespace",
|
||||
"nodetree", "nodetrees",
|
||||
"keyconfig",
|
||||
"offscreen",
|
||||
"online",
|
||||
"playhead",
|
||||
"popup", "popups",
|
||||
"pointcloud",
|
||||
"pre",
|
||||
"precache", "precaching",
|
||||
"precalculate",
|
||||
"precomputing",
|
||||
"prefetch",
|
||||
"prefilter", "prefiltering",
|
||||
"preload",
|
||||
"premultiply", "premultiplied",
|
||||
"prepass",
|
||||
"prepend",
|
||||
"preprocess", "preprocessing", "preprocessor", "preprocessed",
|
||||
"preseek",
|
||||
"preselect", "preselected",
|
||||
"promillage",
|
||||
"pushdown",
|
||||
"raytree",
|
||||
"readonly",
|
||||
"realtime",
|
||||
"reinject", "reinjected",
|
||||
"rekey",
|
||||
"relink",
|
||||
"remesh",
|
||||
"reprojection", "reproject", "reprojecting",
|
||||
"resample",
|
||||
"resize",
|
||||
"restpose",
|
||||
"resync", "resynced",
|
||||
"retarget", "retargets", "retargeting", "retargeted",
|
||||
"retiming",
|
||||
"rigidbody",
|
||||
"ringnoise",
|
||||
"rolloff",
|
||||
"runtime",
|
||||
"scanline",
|
||||
"screenshot", "screenshots",
|
||||
"seekability",
|
||||
"selfcollision",
|
||||
"shadowbuffer", "shadowbuffers",
|
||||
"singletexture",
|
||||
"softbox",
|
||||
"spellcheck", "spellchecking",
|
||||
"startup",
|
||||
"stateful",
|
||||
"starfield",
|
||||
"studiolight",
|
||||
"subflare", "subflares",
|
||||
"subframe", "subframes",
|
||||
"subclass", "subclasses", "subclassing",
|
||||
"subdirectory", "subdirectories", "subdir", "subdirs",
|
||||
"subitem",
|
||||
"submode",
|
||||
"submodule", "submodules",
|
||||
"subpath",
|
||||
"subsize",
|
||||
"substep", "substeps",
|
||||
"substring",
|
||||
"targetless",
|
||||
"textbox", "textboxes",
|
||||
"tilemode",
|
||||
"timestamp", "timestamps",
|
||||
"timestep", "timesteps",
|
||||
"todo",
|
||||
"tradeoff",
|
||||
"un",
|
||||
"unadjust", "unadjusted",
|
||||
"unassociate", "unassociated",
|
||||
"unbake",
|
||||
"uncheck",
|
||||
"unclosed",
|
||||
"uncomment",
|
||||
"unculled",
|
||||
"undeformed",
|
||||
"undistort", "undistorted", "undistortion",
|
||||
"ungroup", "ungrouped",
|
||||
"unhide",
|
||||
"unindent",
|
||||
"unitless",
|
||||
"unkeyed",
|
||||
"unlink", "unlinked",
|
||||
"unmute",
|
||||
"unphysical",
|
||||
"unpremultiply",
|
||||
"unprojected",
|
||||
"unprotect",
|
||||
"unreacted",
|
||||
"unreferenced",
|
||||
"unregister",
|
||||
"unselect", "unselected", "unselectable",
|
||||
"unsets",
|
||||
"unshadowed",
|
||||
"unspill",
|
||||
"unstitchable", "unstitch",
|
||||
"unsubdivided", "unsubdivide",
|
||||
"untrusted",
|
||||
"vectorscope",
|
||||
"whitespace", "whitespaces",
|
||||
"worldspace",
|
||||
"workflow",
|
||||
"workspace", "workspaces",
|
||||
|
||||
# Neologisms, slangs
|
||||
"affectable",
|
||||
"animatable",
|
||||
"automagic", "automagically",
|
||||
"blobby",
|
||||
"blockiness", "blocky",
|
||||
"collider", "colliders",
|
||||
"deformer", "deformers",
|
||||
"determinator",
|
||||
"editability",
|
||||
"effectors",
|
||||
"expander",
|
||||
"instancer",
|
||||
"keyer",
|
||||
"lacunarity",
|
||||
"linkable",
|
||||
"numerics",
|
||||
"occluder", "occluders",
|
||||
"overridable",
|
||||
"passepartout",
|
||||
"perspectively",
|
||||
"pixelate",
|
||||
"pointiness",
|
||||
"polycount",
|
||||
"polygonization", "polygonalization", # yuck!
|
||||
"scalings",
|
||||
"selectable", "selectability",
|
||||
"shaper",
|
||||
"smoothen", "smoothening",
|
||||
"spherize", "spherized",
|
||||
"stitchable",
|
||||
"symmetrize",
|
||||
"trackability",
|
||||
"transmissivity",
|
||||
"rasterized", "rasterization", "rasterizer",
|
||||
"renderer", "renderers", "renderable", "renderability",
|
||||
|
||||
# Really bad!!!
|
||||
"convertor",
|
||||
"fullscr",
|
||||
|
||||
# Abbreviations
|
||||
"aero",
|
||||
"amb",
|
||||
"anim",
|
||||
"aov",
|
||||
"app",
|
||||
"bbox", "bboxes",
|
||||
"bksp", # Backspace
|
||||
"bool",
|
||||
"calc",
|
||||
"cfl",
|
||||
"config", "configs",
|
||||
"const",
|
||||
"coord", "coords",
|
||||
"degr",
|
||||
"diff",
|
||||
"dof",
|
||||
"dupli", "duplis",
|
||||
"eg",
|
||||
"esc",
|
||||
"expr",
|
||||
"fac",
|
||||
"fra",
|
||||
"fract",
|
||||
"frs",
|
||||
"grless",
|
||||
"http",
|
||||
"init",
|
||||
"irr", # Irradiance
|
||||
"kbit", "kb",
|
||||
"lang", "langs",
|
||||
"lclick", "rclick",
|
||||
"lensdist",
|
||||
"loc", "rot", "pos",
|
||||
"lorem",
|
||||
"luma",
|
||||
"mbs", # mouse button 'select'.
|
||||
"mem",
|
||||
"multicam",
|
||||
"num",
|
||||
"ok",
|
||||
"orco",
|
||||
"ortho",
|
||||
"pano",
|
||||
"persp",
|
||||
"pref", "prefs",
|
||||
"prev",
|
||||
"param",
|
||||
"premul",
|
||||
"quad", "quads",
|
||||
"quat", "quats",
|
||||
"recalc", "recalcs",
|
||||
"refl",
|
||||
"sce",
|
||||
"sel",
|
||||
"spec",
|
||||
"struct", "structs",
|
||||
"subdiv",
|
||||
"sys",
|
||||
"tex",
|
||||
"texcoord",
|
||||
"tmr", # timer
|
||||
"tri", "tris",
|
||||
"udim", "udims",
|
||||
"upres", # Upresolution
|
||||
"usd",
|
||||
"uv", "uvs", "uvw", "uw", "uvmap",
|
||||
"ve",
|
||||
"vec",
|
||||
"vel", # velocity!
|
||||
"vert", "verts",
|
||||
"vis",
|
||||
"vram",
|
||||
"xor",
|
||||
"xyz", "xzy", "yxz", "yzx", "zxy", "zyx",
|
||||
"xy", "xz", "yx", "yz", "zx", "zy",
|
||||
|
||||
# General computer/science terms
|
||||
"affine",
|
||||
"albedo",
|
||||
"anamorphic",
|
||||
"anisotropic", "anisotropy",
|
||||
"arcminute", "arcminutes",
|
||||
"arcsecond", "arcseconds",
|
||||
"bimanual", # OpenXR?
|
||||
"bitangent",
|
||||
"boid", "boids",
|
||||
"ceil",
|
||||
"centum", # From 'centum weight'
|
||||
"compressibility",
|
||||
"coplanar",
|
||||
"curvilinear",
|
||||
"dekameter", "dekameters",
|
||||
"equiangular",
|
||||
"equisolid",
|
||||
"euler", "eulers",
|
||||
"fribidi",
|
||||
"gettext",
|
||||
"hashable",
|
||||
"hotspot",
|
||||
"hydrostatic",
|
||||
"interocular",
|
||||
"intrinsics",
|
||||
"irradiance",
|
||||
"isosurface",
|
||||
"jitter", "jittering", "jittered",
|
||||
"keymap", "keymaps",
|
||||
"lambertian",
|
||||
"laplacian",
|
||||
"metadata",
|
||||
"microwatt", "microwatts",
|
||||
"milliwatt", "milliwatts",
|
||||
"msgfmt",
|
||||
"nand", "xnor",
|
||||
"nanowatt", "nanowatts",
|
||||
"normals",
|
||||
"numpad",
|
||||
"octahedral",
|
||||
"octree",
|
||||
"omnidirectional",
|
||||
"opengl",
|
||||
"openmp",
|
||||
"parametrization",
|
||||
"photoreceptor",
|
||||
"poly",
|
||||
"polyline", "polylines",
|
||||
"probabilistically",
|
||||
"pulldown", "pulldowns",
|
||||
"quadratically",
|
||||
"quantized",
|
||||
"quartic",
|
||||
"quaternion", "quaternions",
|
||||
"quintic",
|
||||
"samplerate",
|
||||
"sawtooth",
|
||||
"scrollback",
|
||||
"scrollbar",
|
||||
"scroller",
|
||||
"searchable",
|
||||
"spacebar",
|
||||
"subtractive",
|
||||
"superellipse",
|
||||
"thumbstick",
|
||||
"tooltip", "tooltips",
|
||||
"touchpad", "trackpad",
|
||||
"tuple",
|
||||
"unicode",
|
||||
"viewport", "viewports",
|
||||
"viscoelastic",
|
||||
"vorticity",
|
||||
"waveform", "waveforms",
|
||||
"wildcard", "wildcards",
|
||||
"wintab", # Some Windows tablet API
|
||||
|
||||
# General computer graphics terms
|
||||
"anaglyph",
|
||||
"bezier", "beziers",
|
||||
"bicubic",
|
||||
"bilinear",
|
||||
"bindpose",
|
||||
"binormal",
|
||||
"blackpoint", "whitepoint",
|
||||
"blinn",
|
||||
"bokeh",
|
||||
"catadioptric",
|
||||
"centroid",
|
||||
"chroma",
|
||||
"chrominance",
|
||||
"clearcoat",
|
||||
"codec", "codecs",
|
||||
"collada",
|
||||
"compositing",
|
||||
"crossfade",
|
||||
"cubemap", "cubemaps",
|
||||
"cuda",
|
||||
"deinterlace",
|
||||
"dropoff",
|
||||
"duotone",
|
||||
"dv",
|
||||
"eigenvectors",
|
||||
"emissive",
|
||||
"equirectangular",
|
||||
"filmlike",
|
||||
"fisheye",
|
||||
"framerate",
|
||||
"gimbal",
|
||||
"grayscale",
|
||||
"icosahedron",
|
||||
"icosphere",
|
||||
"inpaint",
|
||||
"kerning",
|
||||
"lightmap",
|
||||
"linearlight",
|
||||
"lossless", "lossy",
|
||||
"luminance",
|
||||
"mantaflow",
|
||||
"matcap",
|
||||
"microfacet",
|
||||
"midtones",
|
||||
"mipmap", "mipmaps", "mip",
|
||||
"ngon", "ngons",
|
||||
"ntsc",
|
||||
"nurb", "nurbs",
|
||||
"perlin",
|
||||
"phong",
|
||||
"photorealistic",
|
||||
"pinlight",
|
||||
"posterize",
|
||||
"qi",
|
||||
"radiosity",
|
||||
"raycast", "raycasting",
|
||||
"raytrace", "raytracing", "raytraced",
|
||||
"refractions",
|
||||
"remesher", "remeshing", "remesh",
|
||||
"renderfarm",
|
||||
"scanfill",
|
||||
"shader", "shaders",
|
||||
"shadowmap", "shadowmaps",
|
||||
"softlight",
|
||||
"specular", "specularity",
|
||||
"spillmap",
|
||||
"sobel",
|
||||
"stereoscopy",
|
||||
"texel",
|
||||
"timecode",
|
||||
"tonemap",
|
||||
"toon",
|
||||
"transmissive",
|
||||
"uvproject",
|
||||
"vividlight",
|
||||
"volumetrics",
|
||||
"voronoi",
|
||||
"voxel", "voxels",
|
||||
"vsync",
|
||||
"vulkan",
|
||||
"wireframe",
|
||||
"zmask",
|
||||
"ztransp",
|
||||
|
||||
# Blender terms
|
||||
"audaspace",
|
||||
"azone", # action zone
|
||||
"backwire",
|
||||
"bbone",
|
||||
"bendy", # bones
|
||||
"bmesh",
|
||||
"breakdowner",
|
||||
"bspline",
|
||||
"bweight",
|
||||
"colorband",
|
||||
"crazyspace",
|
||||
"datablock", "datablocks",
|
||||
"despeckle",
|
||||
"depsgraph",
|
||||
"dopesheet",
|
||||
"dupliface", "duplifaces",
|
||||
"dupliframe", "dupliframes",
|
||||
"dupliobject", "dupliob",
|
||||
"dupligroup",
|
||||
"duplivert",
|
||||
"dyntopo",
|
||||
"editbone",
|
||||
"editmode",
|
||||
"eevee",
|
||||
"fcurve", "fcurves",
|
||||
"fedge", "fedges",
|
||||
"filmic",
|
||||
"fluidsim",
|
||||
"freestyle",
|
||||
"enum", "enums",
|
||||
"gizmogroup",
|
||||
"gon", "gons", # N-Gon(s)
|
||||
"gpencil",
|
||||
"idcol",
|
||||
"keyframe", "keyframes", "keyframing", "keyframed",
|
||||
"lookdev",
|
||||
"luminocity",
|
||||
"mathvis",
|
||||
"metaball", "metaballs", "mball",
|
||||
"metaelement", "metaelements",
|
||||
"metastrip", "metastrips",
|
||||
"movieclip",
|
||||
"mpoly",
|
||||
"mtex",
|
||||
"nabla",
|
||||
"navmesh",
|
||||
"outliner",
|
||||
"overscan",
|
||||
"paintmap", "paintmaps",
|
||||
"polygroup", "polygroups",
|
||||
"poselib",
|
||||
"pushpull",
|
||||
"pyconstraint", "pyconstraints",
|
||||
"qe", # keys...
|
||||
"shaderfx", "shaderfxs",
|
||||
"shapekey", "shapekeys",
|
||||
"shrinkfatten",
|
||||
"shrinkwrap",
|
||||
"softbody",
|
||||
"stucci",
|
||||
"subdiv",
|
||||
"subtype",
|
||||
"sunsky",
|
||||
"tessface", "tessfaces",
|
||||
"texface",
|
||||
"timeline", "timelines",
|
||||
"tosphere",
|
||||
"uilist",
|
||||
"userpref",
|
||||
"vcol", "vcols",
|
||||
"vgroup", "vgroups",
|
||||
"vinterlace",
|
||||
"vse",
|
||||
"wasd", "wasdqe", # keys...
|
||||
"wetmap", "wetmaps",
|
||||
"wpaint",
|
||||
"uvwarp",
|
||||
|
||||
# UOC (Ugly Operator Categories)
|
||||
"cachefile",
|
||||
"paintcurve",
|
||||
"ptcache",
|
||||
"dpaint",
|
||||
|
||||
# Algorithm/library names
|
||||
"ashikhmin", # Ashikhmin-Shirley
|
||||
"arsloe", # Texel-Marsen-Arsloe
|
||||
"beckmann",
|
||||
"blackman", # Blackman-Harris
|
||||
"blosc",
|
||||
"burley", # Christensen-Burley
|
||||
"catmull",
|
||||
"catrom",
|
||||
"chebychev",
|
||||
"conrady", # Brown-Conrady
|
||||
"courant",
|
||||
"cryptomatte", "crypto",
|
||||
"embree",
|
||||
"gmp",
|
||||
"hosek",
|
||||
"kutta",
|
||||
"lennard",
|
||||
"marsen", # Texel-Marsen-Arsloe
|
||||
"mikktspace",
|
||||
"minkowski",
|
||||
"minnaert",
|
||||
"mises", # von Mises-Fisher
|
||||
"moskowitz", # Pierson-Moskowitz
|
||||
"musgrave",
|
||||
"nayar",
|
||||
"netravali",
|
||||
"nishita",
|
||||
"ogawa",
|
||||
"oren",
|
||||
"peucker", # Ramer-Douglas-Peucker
|
||||
"pierson", # Pierson-Moskowitz
|
||||
"preetham",
|
||||
"prewitt",
|
||||
"ramer", # Ramer-Douglas-Peucker
|
||||
"runge",
|
||||
"sobol",
|
||||
"verlet",
|
||||
"von", # von Mises-Fisher
|
||||
"wilkie",
|
||||
"worley",
|
||||
|
||||
# Acronyms
|
||||
"aa", "msaa",
|
||||
"acescg", # ACEScg color space.
|
||||
"ao",
|
||||
"aov", "aovs",
|
||||
"api",
|
||||
"apic", # Affine Particle-In-Cell
|
||||
"asc", "cdl",
|
||||
"ascii",
|
||||
"atrac",
|
||||
"avx",
|
||||
"bsdf", "bsdfs",
|
||||
"bssrdf",
|
||||
"bw",
|
||||
"ccd",
|
||||
"cmd",
|
||||
"cmos",
|
||||
"cpus",
|
||||
"ctrl",
|
||||
"cw", "ccw",
|
||||
"dev",
|
||||
"dls",
|
||||
"djv",
|
||||
"dpi",
|
||||
"dvar",
|
||||
"dx",
|
||||
"eo",
|
||||
"ewa",
|
||||
"fh",
|
||||
"fk",
|
||||
"fov",
|
||||
"fft",
|
||||
"futura",
|
||||
"fx",
|
||||
"gfx",
|
||||
"ggx",
|
||||
"gl",
|
||||
"glsl",
|
||||
"gpl",
|
||||
"gpu", "gpus",
|
||||
"hc",
|
||||
"hdc",
|
||||
"hdr", "hdri", "hdris",
|
||||
"hh", "mm", "ss", "ff", # hh:mm:ss:ff timecode
|
||||
"hpg", # Intel Xe-HPG architecture
|
||||
"hsv", "hsva", "hsl",
|
||||
"id",
|
||||
"ies",
|
||||
"ior",
|
||||
"itu",
|
||||
"jonswap",
|
||||
"lfe",
|
||||
"lhs",
|
||||
"lmb", "mmb", "rmb",
|
||||
"lscm",
|
||||
"lx", # Lux light unit
|
||||
"kb",
|
||||
"mis",
|
||||
"mocap",
|
||||
"msgid", "msgids",
|
||||
"mux",
|
||||
"ndof",
|
||||
"pbr", # Physically Based Rendering
|
||||
"ppc",
|
||||
"precisa",
|
||||
"px",
|
||||
"qmc",
|
||||
"rdna",
|
||||
"rdp",
|
||||
"rgb", "rgba",
|
||||
"rhs",
|
||||
"rv",
|
||||
"sdl",
|
||||
"sdls",
|
||||
"sl",
|
||||
"smpte",
|
||||
"ssao",
|
||||
"ssr",
|
||||
"svn",
|
||||
"tma",
|
||||
"ui",
|
||||
"unix",
|
||||
"uuid",
|
||||
"vbo", "vbos",
|
||||
"vfx",
|
||||
"vmm",
|
||||
"vr",
|
||||
"wxyz",
|
||||
"xr",
|
||||
"ycc", "ycca",
|
||||
"yrgb",
|
||||
"yuv", "yuva",
|
||||
|
||||
# Blender acronyms
|
||||
"bli",
|
||||
"bpy",
|
||||
"bvh",
|
||||
"dbvt",
|
||||
"dop", # BLI K-Dop BVH
|
||||
"ik",
|
||||
"nla",
|
||||
"py",
|
||||
"qbvh",
|
||||
"rna",
|
||||
"rvo",
|
||||
"simd",
|
||||
"sph",
|
||||
"svbvh",
|
||||
|
||||
# Files types/formats
|
||||
"aac",
|
||||
"avi",
|
||||
"attrac",
|
||||
"autocad",
|
||||
"autodesk",
|
||||
"bmp",
|
||||
"btx",
|
||||
"cineon",
|
||||
"dpx",
|
||||
"dwaa",
|
||||
"dwab",
|
||||
"dxf",
|
||||
"eps",
|
||||
"exr",
|
||||
"fbx",
|
||||
"fbxnode",
|
||||
"ffmpeg",
|
||||
"flac",
|
||||
"gltf",
|
||||
"gzip",
|
||||
"ico",
|
||||
"jpg", "jpeg", "jpegs",
|
||||
"json",
|
||||
"lzw",
|
||||
"matroska",
|
||||
"mdd",
|
||||
"mkv",
|
||||
"mpeg", "mjpeg",
|
||||
"mtl",
|
||||
"ogg",
|
||||
"openjpeg",
|
||||
"osl",
|
||||
"oso",
|
||||
"pcm",
|
||||
"piz",
|
||||
"png", "pngs",
|
||||
"po",
|
||||
"quicktime",
|
||||
"rle",
|
||||
"sgi",
|
||||
"stl",
|
||||
"svg",
|
||||
"targa", "tga",
|
||||
"tiff",
|
||||
"theora",
|
||||
"vorbis",
|
||||
"vp9",
|
||||
"wav",
|
||||
"webm",
|
||||
"xiph",
|
||||
"xml",
|
||||
"xna",
|
||||
"xvid",
|
||||
}
|
||||
|
||||
_valid_before = "(?<=[\\s*'\"`])|(?<=[a-zA-Z][/-])|(?<=^)"
|
||||
_valid_after = "(?=[\\s'\"`.!?,;:])|(?=[/-]\\s*[a-zA-Z])|(?=$)"
|
||||
_valid_words = "(?:{})(?:(?:[A-Z]+[a-z]*)|[A-Z]*|[a-z]*)(?:{})".format(_valid_before, _valid_after)
|
||||
_split_words = re.compile(_valid_words).findall
|
||||
|
||||
@classmethod
|
||||
def split_words(cls, text):
|
||||
return [w for w in cls._split_words(text) if w]
|
||||
|
||||
def __init__(self, settings, lang="en_US"):
|
||||
self.settings = settings
|
||||
self.dict_spelling = enchant.Dict(lang)
|
||||
self.cache = set(self.uimsgs)
|
||||
|
||||
cache = self.settings.SPELL_CACHE
|
||||
if cache and os.path.exists(cache):
|
||||
with open(cache, 'rb') as f:
|
||||
self.cache |= set(pickle.load(f))
|
||||
|
||||
def __del__(self):
|
||||
cache = self.settings.SPELL_CACHE
|
||||
if cache and os.path.exists(cache):
|
||||
with open(cache, 'wb') as f:
|
||||
pickle.dump(self.cache, f)
|
||||
|
||||
def check(self, txt):
|
||||
ret = []
|
||||
|
||||
if txt in self.cache:
|
||||
return ret
|
||||
|
||||
for w in self.split_words(txt):
|
||||
w_lower = w.lower()
|
||||
if w_lower in self.cache:
|
||||
continue
|
||||
if not self.dict_spelling.check(w):
|
||||
ret.append((w, self.dict_spelling.suggest(w)))
|
||||
else:
|
||||
self.cache.add(w_lower)
|
||||
|
||||
if not ret:
|
||||
self.cache.add(txt)
|
||||
|
||||
return ret
|
||||
7
scripts/modules/bl_keymap_utils/__init__.py
Normal file
7
scripts/modules/bl_keymap_utils/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"io",
|
||||
"keymap_from_toolbar",
|
||||
"keymap_hierarchy",
|
||||
)
|
||||
306
scripts/modules/bl_keymap_utils/io.py
Normal file
306
scripts/modules/bl_keymap_utils/io.py
Normal file
@@ -0,0 +1,306 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Export Functions
|
||||
|
||||
__all__ = (
|
||||
"_init_properties_from_data", # Shared with gizmo default property initialization.
|
||||
"keyconfig_export_as_data",
|
||||
"keyconfig_import_from_data",
|
||||
"keyconfig_init_from_data",
|
||||
"keyconfig_merge",
|
||||
"keymap_init_from_data",
|
||||
)
|
||||
|
||||
|
||||
def indent(levels):
|
||||
return levels * " "
|
||||
|
||||
|
||||
def round_float_32(f):
|
||||
from struct import pack, unpack
|
||||
return unpack("f", pack("f", f))[0]
|
||||
|
||||
|
||||
def repr_f32(f):
|
||||
f_round = round_float_32(f)
|
||||
f_str = repr(f)
|
||||
f_str_frac = f_str.partition(".")[2]
|
||||
if not f_str_frac:
|
||||
return f_str
|
||||
for i in range(1, len(f_str_frac)):
|
||||
f_test = round(f, i)
|
||||
f_test_round = round_float_32(f_test)
|
||||
if f_test_round == f_round:
|
||||
return "%.*f" % (i, f_test)
|
||||
return f_str
|
||||
|
||||
|
||||
def kmi_args_as_data(kmi):
|
||||
s = [
|
||||
f"\"type\": '{kmi.type}'",
|
||||
f"\"value\": '{kmi.value}'"
|
||||
]
|
||||
|
||||
if kmi.any:
|
||||
s.append("\"any\": True")
|
||||
else:
|
||||
for attr in ("shift", "ctrl", "alt", "oskey"):
|
||||
if mod := getattr(kmi, attr):
|
||||
s.append(f"\"{attr:s}\": " + ("-1" if mod == -1 else "True"))
|
||||
if (mod := kmi.key_modifier) and (mod != 'NONE'):
|
||||
s.append(f"\"key_modifier\": '{mod:s}'")
|
||||
if (direction := kmi.direction) and (direction != 'ANY'):
|
||||
s.append(f"\"direction\": '{direction:s}'")
|
||||
|
||||
if kmi.repeat:
|
||||
if (
|
||||
(kmi.map_type == 'KEYBOARD' and kmi.value in {'PRESS', 'ANY'}) or
|
||||
(kmi.map_type == 'TEXTINPUT')
|
||||
):
|
||||
s.append("\"repeat\": True")
|
||||
|
||||
return "{" + ", ".join(s) + "}"
|
||||
|
||||
|
||||
def _kmi_properties_to_lines_recursive(level, properties, lines):
|
||||
from bpy.types import OperatorProperties
|
||||
|
||||
def string_value(value):
|
||||
if isinstance(value, (str, bool, int, set)):
|
||||
return repr(value)
|
||||
elif isinstance(value, float):
|
||||
return repr_f32(value)
|
||||
elif getattr(value, '__len__', False):
|
||||
return repr(tuple(value))
|
||||
raise Exception(f"Export key configuration: can't write {value!r}")
|
||||
|
||||
for pname in properties.bl_rna.properties.keys():
|
||||
if pname != "rna_type":
|
||||
value = getattr(properties, pname)
|
||||
if isinstance(value, OperatorProperties):
|
||||
lines_test = []
|
||||
_kmi_properties_to_lines_recursive(level + 2, value, lines_test)
|
||||
if lines_test:
|
||||
lines.append(f"(")
|
||||
lines.append(f"\"{pname}\",\n")
|
||||
lines.append(f"{indent(level + 3)}" "[")
|
||||
lines.extend(lines_test)
|
||||
lines.append("],\n")
|
||||
lines.append(f"{indent(level + 3)}" "),\n" f"{indent(level + 2)}")
|
||||
del lines_test
|
||||
elif properties.is_property_set(pname):
|
||||
value = string_value(value)
|
||||
lines.append((f"(\"{pname}\", {value:s}),\n" f"{indent(level + 2)}"))
|
||||
|
||||
|
||||
def _kmi_properties_to_lines(level, kmi_props, lines):
|
||||
if kmi_props is None:
|
||||
return
|
||||
|
||||
lines_test = [f"\"properties\":\n" f"{indent(level + 1)}" "["]
|
||||
_kmi_properties_to_lines_recursive(level, kmi_props, lines_test)
|
||||
if len(lines_test) > 1:
|
||||
lines_test.append("],\n")
|
||||
lines.extend(lines_test)
|
||||
|
||||
|
||||
def _kmi_attrs_or_none(level, kmi):
|
||||
lines = []
|
||||
_kmi_properties_to_lines(level + 1, kmi.properties, lines)
|
||||
if kmi.active is False:
|
||||
lines.append(f"{indent(level)}\"active\":" "False,\n")
|
||||
if not lines:
|
||||
return None
|
||||
return "".join(lines)
|
||||
|
||||
|
||||
def keyconfig_export_as_data(wm, kc, filepath, *, all_keymaps=False):
|
||||
# Alternate format
|
||||
|
||||
# Generate a list of keymaps to export:
|
||||
#
|
||||
# First add all user_modified keymaps (found in keyconfigs.user.keymaps list),
|
||||
# then add all remaining keymaps from the currently active custom keyconfig.
|
||||
#
|
||||
# Sort the resulting list according to top context name,
|
||||
# while this isn't essential, it makes comparing keymaps simpler.
|
||||
#
|
||||
# This will create a final list of keymaps that can be used as a "diff" against
|
||||
# the default blender keyconfig, recreating the current setup from a fresh blender
|
||||
# without needing to export keymaps which haven't been edited.
|
||||
|
||||
class FakeKeyConfig:
|
||||
keymaps = []
|
||||
edited_kc = FakeKeyConfig()
|
||||
for km in wm.keyconfigs.user.keymaps:
|
||||
if all_keymaps or km.is_user_modified:
|
||||
edited_kc.keymaps.append(km)
|
||||
# merge edited keymaps with non-default keyconfig, if it exists
|
||||
if kc != wm.keyconfigs.default:
|
||||
export_keymaps = keyconfig_merge(edited_kc, kc)
|
||||
else:
|
||||
export_keymaps = keyconfig_merge(edited_kc, edited_kc)
|
||||
|
||||
# Sort the keymap list by top context name before exporting,
|
||||
# not essential, just convenient to order them predictably.
|
||||
export_keymaps.sort(key=lambda k: k[0].name)
|
||||
|
||||
with open(filepath, "w", encoding="utf-8") as fh:
|
||||
fw = fh.write
|
||||
|
||||
# Use the file version since it includes the sub-version
|
||||
# which we can bump multiple times between releases.
|
||||
from bpy.app import version_file
|
||||
fw(f"keyconfig_version = {version_file!r}\n")
|
||||
del version_file
|
||||
|
||||
fw("keyconfig_data = \\\n[")
|
||||
|
||||
for km, _kc_x in export_keymaps:
|
||||
km = km.active()
|
||||
fw("(")
|
||||
fw(f"\"{km.name:s}\",\n")
|
||||
fw(f"{indent(2)}" "{")
|
||||
fw(f"\"space_type\": '{km.space_type:s}'")
|
||||
fw(f", \"region_type\": '{km.region_type:s}'")
|
||||
# We can detect from the kind of items.
|
||||
if km.is_modal:
|
||||
fw(", \"modal\": True")
|
||||
fw("},\n")
|
||||
fw(f"{indent(2)}" "{")
|
||||
is_modal = km.is_modal
|
||||
fw(f"\"items\":\n")
|
||||
fw(f"{indent(3)}[")
|
||||
for kmi in km.keymap_items:
|
||||
if is_modal:
|
||||
kmi_id = kmi.propvalue
|
||||
else:
|
||||
kmi_id = kmi.idname
|
||||
fw(f"(")
|
||||
kmi_args = kmi_args_as_data(kmi)
|
||||
kmi_data = _kmi_attrs_or_none(4, kmi)
|
||||
fw(f"\"{kmi_id:s}\"")
|
||||
if kmi_data is None:
|
||||
fw(f", ")
|
||||
else:
|
||||
fw(",\n" f"{indent(5)}")
|
||||
|
||||
fw(kmi_args)
|
||||
if kmi_data is None:
|
||||
fw(", None),\n")
|
||||
else:
|
||||
fw(",\n")
|
||||
fw(f"{indent(5)}" "{")
|
||||
fw(kmi_data)
|
||||
fw(f"{indent(6)}")
|
||||
fw("},\n" f"{indent(5)}")
|
||||
fw("),\n")
|
||||
fw(f"{indent(4)}")
|
||||
fw("],\n" f"{indent(3)}")
|
||||
fw("},\n" f"{indent(2)}")
|
||||
fw("),\n" f"{indent(1)}")
|
||||
|
||||
fw("]\n")
|
||||
fw("\n\n")
|
||||
fw("if __name__ == \"__main__\":\n")
|
||||
|
||||
# We could remove this in the future, as loading new key-maps in older Blender versions
|
||||
# makes less and less sense as Blender changes.
|
||||
fw(" # Only add keywords that are supported.\n")
|
||||
fw(" from bpy.app import version as blender_version\n")
|
||||
fw(" keywords = {}\n")
|
||||
fw(" if blender_version >= (2, 92, 0):\n")
|
||||
fw(" keywords[\"keyconfig_version\"] = keyconfig_version\n")
|
||||
|
||||
fw(" import os\n")
|
||||
fw(" from bl_keymap_utils.io import keyconfig_import_from_data\n")
|
||||
fw(" keyconfig_import_from_data(\n")
|
||||
fw(" os.path.splitext(os.path.basename(__file__))[0],\n")
|
||||
fw(" keyconfig_data,\n")
|
||||
fw(" **keywords,\n")
|
||||
fw(" )\n")
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Import Functions
|
||||
#
|
||||
# NOTE: unlike export, this runs on startup.
|
||||
# Take care making changes that could impact performance.
|
||||
|
||||
def _init_properties_from_data(base_props, base_value):
|
||||
assert type(base_value) is list
|
||||
for attr, value in base_value:
|
||||
if type(value) is list:
|
||||
base_props.property_unset(attr)
|
||||
props = getattr(base_props, attr)
|
||||
_init_properties_from_data(props, value)
|
||||
else:
|
||||
try:
|
||||
setattr(base_props, attr, value)
|
||||
except AttributeError:
|
||||
print(f"Warning: property '{attr}' not found in item '{base_props.__class__.__name__}'")
|
||||
except Exception as ex:
|
||||
print(f"Warning: {ex!r}")
|
||||
|
||||
|
||||
def keymap_init_from_data(km, km_items, is_modal=False):
|
||||
new_fn = getattr(km.keymap_items, "new_modal" if is_modal else "new")
|
||||
for (kmi_idname, kmi_args, kmi_data) in km_items:
|
||||
kmi = new_fn(kmi_idname, **kmi_args)
|
||||
if kmi_data is not None:
|
||||
if not kmi_data.get("active", True):
|
||||
kmi.active = False
|
||||
kmi_props_data = kmi_data.get("properties", None)
|
||||
if kmi_props_data is not None:
|
||||
kmi_props = kmi.properties
|
||||
assert type(kmi_props_data) is list
|
||||
_init_properties_from_data(kmi_props, kmi_props_data)
|
||||
|
||||
|
||||
def keyconfig_init_from_data(kc, keyconfig_data):
|
||||
# Load data in the format defined above.
|
||||
#
|
||||
# Runs at load time, keep this fast!
|
||||
for (km_name, km_args, km_content) in keyconfig_data:
|
||||
km = kc.keymaps.new(km_name, **km_args)
|
||||
km_items = km_content["items"]
|
||||
# Check here instead of inside 'keymap_init_from_data'
|
||||
# because we want to allow both tuple & list types in that case.
|
||||
#
|
||||
# For full keymaps, ensure these are always lists to allow for extending them
|
||||
# in a generic way that doesn't have to check for the type each time.
|
||||
assert type(km_items) is list
|
||||
keymap_init_from_data(km, km_items, is_modal=km_args.get("modal", False))
|
||||
|
||||
|
||||
def keyconfig_import_from_data(name, keyconfig_data, *, keyconfig_version=(0, 0, 0)):
|
||||
# Load data in the format defined above.
|
||||
#
|
||||
# Runs at load time, keep this fast!
|
||||
|
||||
import bpy
|
||||
wm = bpy.context.window_manager
|
||||
kc = wm.keyconfigs.new(name)
|
||||
if keyconfig_version is not None:
|
||||
from .versioning import keyconfig_update
|
||||
keyconfig_data = keyconfig_update(keyconfig_data, keyconfig_version)
|
||||
keyconfig_init_from_data(kc, keyconfig_data)
|
||||
return kc
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Utility Functions
|
||||
|
||||
def keyconfig_merge(kc1, kc2):
|
||||
""" note: kc1 takes priority over kc2
|
||||
"""
|
||||
kc1_names = {km.name for km in kc1.keymaps}
|
||||
merged_keymaps = [(km, kc1) for km in kc1.keymaps]
|
||||
if kc1 != kc2:
|
||||
merged_keymaps.extend(
|
||||
(km, kc2)
|
||||
for km in kc2.keymaps
|
||||
if km.name not in kc1_names
|
||||
)
|
||||
return merged_keymaps
|
||||
436
scripts/modules/bl_keymap_utils/keymap_from_toolbar.py
Normal file
436
scripts/modules/bl_keymap_utils/keymap_from_toolbar.py
Normal file
@@ -0,0 +1,436 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Dynamically create a keymap which is used by the popup toolbar
|
||||
# for accelerator key access.
|
||||
|
||||
__all__ = (
|
||||
"generate",
|
||||
)
|
||||
|
||||
|
||||
def generate(context, space_type, *, use_fallback_keys=True, use_reset=True):
|
||||
"""
|
||||
Keymap for popup toolbar, currently generated each time.
|
||||
"""
|
||||
from bl_ui.space_toolsystem_common import ToolSelectPanelHelper
|
||||
|
||||
def modifier_keywords_from_item(kmi):
|
||||
kw = {}
|
||||
for (attr, default) in (
|
||||
("any", False),
|
||||
("shift", False),
|
||||
("ctrl", False),
|
||||
("alt", False),
|
||||
("oskey", False),
|
||||
("key_modifier", 'NONE'),
|
||||
):
|
||||
val = getattr(kmi, attr)
|
||||
if val != default:
|
||||
kw[attr] = val
|
||||
return kw
|
||||
|
||||
def dict_as_tuple(d):
|
||||
return tuple((k, v) for (k, v) in sorted(d.items()))
|
||||
|
||||
cls = ToolSelectPanelHelper._tool_class_from_space_type(space_type)
|
||||
|
||||
items_all = [
|
||||
# 0: tool
|
||||
# 1: keymap item (direct access)
|
||||
# 2: keymap item (newly calculated for toolbar)
|
||||
[item, None, None]
|
||||
for item in ToolSelectPanelHelper._tools_flatten(cls.tools_from_context(context))
|
||||
if item is not None
|
||||
]
|
||||
items_all_id = {item_container[0].idname for item_container in items_all}
|
||||
|
||||
# Press the toolbar popup key again to set the default tool,
|
||||
# this is useful because the select box tool is useful as a way
|
||||
# to 'drop' currently active tools (it's basically a 'none' tool).
|
||||
# so this allows us to quickly go back to a state that allows
|
||||
# a shortcut based workflow (before the tool system was added).
|
||||
use_tap_reset = use_reset
|
||||
# TODO: support other tools for modes which don't use this tool.
|
||||
tap_reset_tool = "builtin.cursor"
|
||||
# Check the tool is available in the current context.
|
||||
if tap_reset_tool not in items_all_id:
|
||||
use_tap_reset = False
|
||||
|
||||
# Pie-menu style release to activate.
|
||||
use_release_confirm = use_reset
|
||||
|
||||
# Generate items when no keys are mapped.
|
||||
use_auto_keymap_alpha = False # Map manually in the default key-map.
|
||||
use_auto_keymap_num = use_fallback_keys
|
||||
|
||||
# Temporary, only create so we can pass 'properties' to find_item_from_operator.
|
||||
use_hack_properties = True
|
||||
|
||||
km_name_default = "Toolbar Popup"
|
||||
km_name = km_name_default + " <temp>"
|
||||
wm = context.window_manager
|
||||
keyconf_user = wm.keyconfigs.user
|
||||
keyconf_active = wm.keyconfigs.active
|
||||
|
||||
keymap = keyconf_active.keymaps.get(km_name)
|
||||
if keymap is None:
|
||||
keymap = keyconf_active.keymaps.new(km_name, space_type='EMPTY', region_type='TEMPORARY')
|
||||
for kmi in keymap.keymap_items:
|
||||
keymap.keymap_items.remove(kmi)
|
||||
|
||||
keymap_src = keyconf_user.keymaps.get(km_name_default)
|
||||
if keymap_src is not None:
|
||||
for kmi_src in keymap_src.keymap_items:
|
||||
# Skip tools that aren't currently shown.
|
||||
if (
|
||||
(kmi_src.idname == "wm.tool_set_by_id") and
|
||||
(kmi_src.properties.name not in items_all_id)
|
||||
):
|
||||
continue
|
||||
keymap.keymap_items.new_from_item(kmi_src)
|
||||
del keymap_src
|
||||
del items_all_id
|
||||
|
||||
kmi_unique_args = set()
|
||||
|
||||
def kmi_unique_or_pass(kmi_args):
|
||||
kmi_unique_len = len(kmi_unique_args)
|
||||
kmi_unique_args.add(dict_as_tuple(kmi_args))
|
||||
return kmi_unique_len != len(kmi_unique_args)
|
||||
|
||||
cls = ToolSelectPanelHelper._tool_class_from_space_type(space_type)
|
||||
|
||||
if use_hack_properties:
|
||||
kmi_hack = keymap.keymap_items.new("wm.tool_set_by_id", 'NONE', 'PRESS')
|
||||
kmi_hack_properties = kmi_hack.properties
|
||||
kmi_hack.active = False
|
||||
|
||||
kmi_hack_brush_select = keymap.keymap_items.new("paint.brush_select", 'NONE', 'PRESS')
|
||||
kmi_hack_brush_select_properties = kmi_hack_brush_select.properties
|
||||
kmi_hack_brush_select.active = False
|
||||
|
||||
if use_release_confirm or use_tap_reset:
|
||||
kmi_toolbar = wm.keyconfigs.find_item_from_operator(
|
||||
idname="wm.toolbar",
|
||||
)[1]
|
||||
kmi_toolbar_type = None if not kmi_toolbar else kmi_toolbar.type
|
||||
if use_tap_reset and kmi_toolbar_type is not None:
|
||||
kmi_toolbar_args_type_only = {"type": kmi_toolbar_type}
|
||||
kmi_toolbar_args = {**kmi_toolbar_args_type_only, **modifier_keywords_from_item(kmi_toolbar)}
|
||||
else:
|
||||
use_tap_reset = False
|
||||
del kmi_toolbar
|
||||
|
||||
if use_tap_reset:
|
||||
kmi_found = None
|
||||
if use_hack_properties:
|
||||
# First check for direct assignment, if this tool already has a key, no need to add a new one.
|
||||
kmi_hack_properties.name = tap_reset_tool
|
||||
kmi_found = wm.keyconfigs.find_item_from_operator(
|
||||
idname="wm.tool_set_by_id",
|
||||
context='INVOKE_REGION_WIN',
|
||||
# properties={"name": item.idname},
|
||||
properties=kmi_hack_properties,
|
||||
include={'KEYBOARD'},
|
||||
)[1]
|
||||
if kmi_found:
|
||||
use_tap_reset = False
|
||||
del kmi_found
|
||||
|
||||
if use_tap_reset:
|
||||
use_tap_reset = kmi_unique_or_pass(kmi_toolbar_args)
|
||||
|
||||
if use_tap_reset:
|
||||
items_all[:] = [
|
||||
item_container
|
||||
for item_container in items_all
|
||||
if item_container[0].idname != tap_reset_tool
|
||||
]
|
||||
|
||||
# -----------------------
|
||||
# Begin Keymap Generation
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Direct Tool Assignment & Brushes
|
||||
|
||||
for item_container in items_all:
|
||||
item = item_container[0]
|
||||
# Only check the first item in the tools key-map (a little arbitrary).
|
||||
if use_hack_properties:
|
||||
# First check for direct assignment.
|
||||
kmi_hack_properties.name = item.idname
|
||||
kmi_found = wm.keyconfigs.find_item_from_operator(
|
||||
idname="wm.tool_set_by_id",
|
||||
context='INVOKE_REGION_WIN',
|
||||
# properties={"name": item.idname},
|
||||
properties=kmi_hack_properties,
|
||||
include={'KEYBOARD'},
|
||||
)[1]
|
||||
|
||||
if kmi_found is None:
|
||||
if item.data_block:
|
||||
# PAINT_OT_brush_select
|
||||
mode = context.active_object.mode
|
||||
# See: BKE_paint_get_tool_prop_id_from_paintmode
|
||||
if space_type == 'IMAGE_EDITOR':
|
||||
if context.space_data.mode == 'PAINT':
|
||||
attr = "image_tool"
|
||||
else:
|
||||
attr = None
|
||||
elif space_type == 'VIEW_3D':
|
||||
attr = {
|
||||
'SCULPT': "sculpt_tool",
|
||||
'VERTEX_PAINT': "vertex_tool",
|
||||
'WEIGHT_PAINT': "weight_tool",
|
||||
'TEXTURE_PAINT': "image_tool",
|
||||
'PAINT_GPENCIL': "gpencil_tool",
|
||||
'VERTEX_GPENCIL': "gpencil_vertex_tool",
|
||||
'SCULPT_GPENCIL': "gpencil_sculpt_tool",
|
||||
'WEIGHT_GPENCIL': "gpencil_weight_tool",
|
||||
'SCULPT_CURVES': "curves_sculpt_tool",
|
||||
}.get(mode, None)
|
||||
else:
|
||||
attr = None
|
||||
|
||||
if attr is not None:
|
||||
setattr(kmi_hack_brush_select_properties, attr, item.data_block)
|
||||
kmi_found = wm.keyconfigs.find_item_from_operator(
|
||||
idname="paint.brush_select",
|
||||
context='INVOKE_REGION_WIN',
|
||||
properties=kmi_hack_brush_select_properties,
|
||||
include={'KEYBOARD'},
|
||||
)[1]
|
||||
elif mode in {'EDIT', 'PARTICLE_EDIT', 'SCULPT_GPENCIL'}:
|
||||
# Doesn't use brushes
|
||||
pass
|
||||
else:
|
||||
print("Unsupported mode:", mode)
|
||||
del mode, attr
|
||||
|
||||
else:
|
||||
kmi_found = None
|
||||
|
||||
if kmi_found is not None:
|
||||
pass
|
||||
elif item.operator is not None:
|
||||
kmi_found = wm.keyconfigs.find_item_from_operator(
|
||||
idname=item.operator,
|
||||
context='INVOKE_REGION_WIN',
|
||||
include={'KEYBOARD'},
|
||||
)[1]
|
||||
elif item.keymap is not None:
|
||||
km = keyconf_user.keymaps.get(item.keymap[0])
|
||||
if km is None:
|
||||
print("Keymap", repr(item.keymap[0]), "not found for tool", item.idname)
|
||||
kmi_found = None
|
||||
else:
|
||||
kmi_first = km.keymap_items
|
||||
kmi_first = kmi_first[0] if kmi_first else None
|
||||
if kmi_first is not None:
|
||||
kmi_found = wm.keyconfigs.find_item_from_operator(
|
||||
idname=kmi_first.idname,
|
||||
# properties=kmi_first.properties, # prevents matches, don't use.
|
||||
context='INVOKE_REGION_WIN',
|
||||
include={'KEYBOARD'},
|
||||
)[1]
|
||||
if kmi_found is None:
|
||||
# We need non-keyboard events so keys with 'key_modifier' key is found.
|
||||
kmi_found = wm.keyconfigs.find_item_from_operator(
|
||||
idname=kmi_first.idname,
|
||||
# properties=kmi_first.properties, # prevents matches, don't use.
|
||||
context='INVOKE_REGION_WIN',
|
||||
exclude={'KEYBOARD'},
|
||||
)[1]
|
||||
if kmi_found is not None:
|
||||
if kmi_found.key_modifier == 'NONE':
|
||||
kmi_found = None
|
||||
else:
|
||||
kmi_found = None
|
||||
del kmi_first
|
||||
del km
|
||||
else:
|
||||
kmi_found = None
|
||||
item_container[1] = kmi_found
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Single Key Access
|
||||
|
||||
# More complex multi-pass test.
|
||||
for item_container in items_all:
|
||||
item, kmi_found = item_container[:2]
|
||||
if kmi_found is None:
|
||||
continue
|
||||
kmi_found_type = kmi_found.type
|
||||
|
||||
# Only for single keys.
|
||||
if (
|
||||
(len(kmi_found_type) == 1) or
|
||||
# When a tool is being activated instead of running an operator, just copy the shortcut.
|
||||
(kmi_found.idname in {"wm.tool_set_by_id", "WM_OT_tool_set_by_id"})
|
||||
):
|
||||
kmi_args = {"type": kmi_found_type, **modifier_keywords_from_item(kmi_found)}
|
||||
if kmi_unique_or_pass(kmi_args):
|
||||
kmi = keymap.keymap_items.new(idname="wm.tool_set_by_id", value='PRESS', **kmi_args)
|
||||
kmi.properties.name = item.idname
|
||||
item_container[2] = kmi
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Single Key Modifier
|
||||
#
|
||||
#
|
||||
# Test for key_modifier, where alpha key is used as a 'key_modifier'
|
||||
# (grease pencil holding 'D' for example).
|
||||
|
||||
for item_container in items_all:
|
||||
item, kmi_found, kmi_exist = item_container
|
||||
if kmi_found is None or kmi_exist:
|
||||
continue
|
||||
|
||||
kmi_found_type = kmi_found.type
|
||||
if kmi_found_type in {
|
||||
'LEFTMOUSE',
|
||||
'RIGHTMOUSE',
|
||||
'MIDDLEMOUSE',
|
||||
'BUTTON4MOUSE',
|
||||
'BUTTON5MOUSE',
|
||||
'BUTTON6MOUSE',
|
||||
'BUTTON7MOUSE',
|
||||
}:
|
||||
kmi_found_type = kmi_found.key_modifier
|
||||
# excludes 'NONE'
|
||||
if len(kmi_found_type) == 1:
|
||||
kmi_args = {"type": kmi_found_type, **modifier_keywords_from_item(kmi_found)}
|
||||
del kmi_args["key_modifier"]
|
||||
if kmi_unique_or_pass(kmi_args):
|
||||
kmi = keymap.keymap_items.new(idname="wm.tool_set_by_id", value='PRESS', **kmi_args)
|
||||
kmi.properties.name = item.idname
|
||||
item_container[2] = kmi
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Assign A-Z to Keys
|
||||
#
|
||||
# When the keys are free.
|
||||
|
||||
if use_auto_keymap_alpha:
|
||||
# Map all unmapped keys to numbers,
|
||||
# while this is a bit strange it means users will not confuse regular key bindings to ordered bindings.
|
||||
|
||||
# First map A-Z.
|
||||
kmi_type_alpha_char = [chr(i) for i in range(65, 91)]
|
||||
kmi_type_alpha_args = {c: {"type": c} for c in kmi_type_alpha_char}
|
||||
kmi_type_alpha_args_tuple = {c: dict_as_tuple(kmi_type_alpha_args[c]) for c in kmi_type_alpha_char}
|
||||
for item_container in items_all:
|
||||
item, kmi_found, kmi_exist = item_container
|
||||
if kmi_exist:
|
||||
continue
|
||||
kmi_type = item.label[0].upper()
|
||||
kmi_tuple = kmi_type_alpha_args_tuple.get(kmi_type)
|
||||
if kmi_tuple and kmi_tuple not in kmi_unique_args:
|
||||
kmi_unique_args.add(kmi_tuple)
|
||||
kmi = keymap.keymap_items.new(
|
||||
idname="wm.tool_set_by_id",
|
||||
value='PRESS',
|
||||
**kmi_type_alpha_args[kmi_type],
|
||||
)
|
||||
kmi.properties.name = item.idname
|
||||
item_container[2] = kmi
|
||||
del kmi_type_alpha_char, kmi_type_alpha_args, kmi_type_alpha_args_tuple
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Assign Numbers to Keys
|
||||
|
||||
if use_auto_keymap_num:
|
||||
# Free events (last used first).
|
||||
kmi_type_auto = ('ONE', 'TWO', 'THREE', 'FOUR', 'FIVE', 'SIX', 'SEVEN', 'EIGHT', 'NINE', 'ZERO')
|
||||
# Map both numbers and num-pad.
|
||||
kmi_type_dupe = {
|
||||
'ONE': 'NUMPAD_1',
|
||||
'TWO': 'NUMPAD_2',
|
||||
'THREE': 'NUMPAD_3',
|
||||
'FOUR': 'NUMPAD_4',
|
||||
'FIVE': 'NUMPAD_5',
|
||||
'SIX': 'NUMPAD_6',
|
||||
'SEVEN': 'NUMPAD_7',
|
||||
'EIGHT': 'NUMPAD_8',
|
||||
'NINE': 'NUMPAD_9',
|
||||
'ZERO': 'NUMPAD_0',
|
||||
}
|
||||
|
||||
def iter_free_events():
|
||||
for mod in ({}, {"shift": True}, {"ctrl": True}, {"alt": True}):
|
||||
for e in kmi_type_auto:
|
||||
yield (e, mod)
|
||||
|
||||
iter_events = iter(iter_free_events())
|
||||
|
||||
for item_container in items_all:
|
||||
item, kmi_found, kmi_exist = item_container
|
||||
if kmi_exist:
|
||||
continue
|
||||
kmi_args = None
|
||||
while True:
|
||||
key, mod = next(iter_events, (None, None))
|
||||
if key is None:
|
||||
break
|
||||
kmi_args = {"type": key, **mod}
|
||||
kmi_tuple = dict_as_tuple(kmi_args)
|
||||
if kmi_tuple in kmi_unique_args:
|
||||
kmi_args = None
|
||||
else:
|
||||
break
|
||||
|
||||
if kmi_args is not None:
|
||||
kmi = keymap.keymap_items.new(idname="wm.tool_set_by_id", value='PRESS', **kmi_args)
|
||||
kmi.properties.name = item.idname
|
||||
item_container[2] = kmi
|
||||
kmi_unique_args.add(kmi_tuple)
|
||||
|
||||
key = kmi_type_dupe.get(kmi_args["type"])
|
||||
if key is not None:
|
||||
kmi_args["type"] = key
|
||||
kmi_tuple = dict_as_tuple(kmi_args)
|
||||
if kmi_tuple not in kmi_unique_args:
|
||||
kmi = keymap.keymap_items.new(idname="wm.tool_set_by_id", value='PRESS', **kmi_args)
|
||||
kmi.properties.name = item.idname
|
||||
kmi_unique_args.add(kmi_tuple)
|
||||
|
||||
# ---------------------
|
||||
# End Keymap Generation
|
||||
|
||||
if use_hack_properties:
|
||||
keymap.keymap_items.remove(kmi_hack)
|
||||
keymap.keymap_items.remove(kmi_hack_brush_select)
|
||||
|
||||
# Keep last so we can try add a key without any modifiers
|
||||
# in the case this toolbar was activated with modifiers.
|
||||
if use_tap_reset:
|
||||
if len(kmi_toolbar_args_type_only) == len(kmi_toolbar_args):
|
||||
kmi_toolbar_args_available = kmi_toolbar_args
|
||||
else:
|
||||
# We have modifiers, see if we have a free key w/o modifiers.
|
||||
kmi_toolbar_tuple = dict_as_tuple(kmi_toolbar_args_type_only)
|
||||
if kmi_toolbar_tuple not in kmi_unique_args:
|
||||
kmi_toolbar_args_available = kmi_toolbar_args_type_only
|
||||
kmi_unique_args.add(kmi_toolbar_tuple)
|
||||
else:
|
||||
kmi_toolbar_args_available = kmi_toolbar_args
|
||||
del kmi_toolbar_tuple
|
||||
|
||||
kmi = keymap.keymap_items.new(
|
||||
"wm.tool_set_by_id",
|
||||
value='DOUBLE_CLICK',
|
||||
**kmi_toolbar_args_available,
|
||||
)
|
||||
kmi.properties.name = tap_reset_tool
|
||||
|
||||
if use_release_confirm and (kmi_toolbar_type is not None):
|
||||
kmi = keymap.keymap_items.new(
|
||||
"ui.button_execute",
|
||||
type=kmi_toolbar_type,
|
||||
value='RELEASE',
|
||||
any=True,
|
||||
)
|
||||
kmi.properties.skip_depressed = True
|
||||
|
||||
wm.keyconfigs.update()
|
||||
return keymap
|
||||
229
scripts/modules/bl_keymap_utils/keymap_hierarchy.py
Normal file
229
scripts/modules/bl_keymap_utils/keymap_hierarchy.py
Normal file
@@ -0,0 +1,229 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"generate",
|
||||
)
|
||||
|
||||
|
||||
def _km_expand_from_toolsystem(space_type, context_mode):
|
||||
def _fn():
|
||||
from bl_ui.space_toolsystem_common import ToolSelectPanelHelper
|
||||
for cls in ToolSelectPanelHelper.__subclasses__():
|
||||
if cls.bl_space_type == space_type:
|
||||
return cls.keymap_ui_hierarchy(context_mode)
|
||||
raise Exception("keymap not found")
|
||||
return _fn
|
||||
|
||||
|
||||
def _km_hierarchy_iter_recursive(items):
|
||||
for sub in items:
|
||||
if callable(sub):
|
||||
yield from sub()
|
||||
else:
|
||||
yield (*sub[:3], list(_km_hierarchy_iter_recursive(sub[3])))
|
||||
|
||||
|
||||
def generate():
|
||||
return list(_km_hierarchy_iter_recursive(_km_hierarchy))
|
||||
|
||||
|
||||
# bpy.type.KeyMap: (km.name, km.space_type, km.region_type, [...])
|
||||
|
||||
# ('Script', 'EMPTY', 'WINDOW', []),
|
||||
|
||||
|
||||
# Access via 'km_hierarchy'.
|
||||
_km_hierarchy = [
|
||||
('Window', 'EMPTY', 'WINDOW', []), # file save, window change, exit
|
||||
('Screen', 'EMPTY', 'WINDOW', [ # full screen, undo, screenshot
|
||||
('Screen Editing', 'EMPTY', 'WINDOW', []), # re-sizing, action corners
|
||||
('Region Context Menu', 'EMPTY', 'WINDOW', []), # header/footer/navigation_bar stuff (per region)
|
||||
]),
|
||||
|
||||
('View2D', 'EMPTY', 'WINDOW', []), # view 2d navigation (per region)
|
||||
('View2D Buttons List', 'EMPTY', 'WINDOW', []), # view 2d with buttons navigation
|
||||
|
||||
('User Interface', 'EMPTY', 'WINDOW', []),
|
||||
|
||||
('3D View', 'VIEW_3D', 'WINDOW', [ # view 3d navigation and generic stuff (select, transform)
|
||||
('Object Mode', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'OBJECT'),
|
||||
]),
|
||||
('Mesh', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'EDIT_MESH'),
|
||||
]),
|
||||
('Curve', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'EDIT_CURVE'),
|
||||
]),
|
||||
('Curves', 'EMPTY', 'WINDOW', []),
|
||||
('Armature', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'EDIT_ARMATURE'),
|
||||
]),
|
||||
('Metaball', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'EDIT_METABALL'),
|
||||
]),
|
||||
('Lattice', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'EDIT_LATTICE'),
|
||||
]),
|
||||
('Font', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'EDIT_TEXT'),
|
||||
]),
|
||||
|
||||
('Pose', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'POSE'),
|
||||
]),
|
||||
|
||||
('Vertex Paint', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'PAINT_VERTEX'),
|
||||
]),
|
||||
('Weight Paint', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'PAINT_WEIGHT'),
|
||||
]),
|
||||
('Paint Vertex Selection (Weight, Vertex)', 'EMPTY', 'WINDOW', []),
|
||||
('Paint Face Mask (Weight, Vertex, Texture)', 'EMPTY', 'WINDOW', []),
|
||||
# image and view3d
|
||||
('Image Paint', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'PAINT_TEXTURE'),
|
||||
]),
|
||||
('Sculpt', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'SCULPT'),
|
||||
]),
|
||||
|
||||
('Sculpt Curves', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'CURVES_SCULPT'),
|
||||
]),
|
||||
|
||||
('Particle', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', 'PARTICLE'),
|
||||
]),
|
||||
|
||||
('Knife Tool Modal Map', 'EMPTY', 'WINDOW', []),
|
||||
('Custom Normals Modal Map', 'EMPTY', 'WINDOW', []),
|
||||
('Bevel Modal Map', 'EMPTY', 'WINDOW', []),
|
||||
('Paint Stroke Modal', 'EMPTY', 'WINDOW', []),
|
||||
('Sculpt Expand Modal', 'EMPTY', 'WINDOW', []),
|
||||
('Paint Curve', 'EMPTY', 'WINDOW', []),
|
||||
('Curve Pen Modal Map', 'EMPTY', 'WINDOW', []),
|
||||
|
||||
('Object Non-modal', 'EMPTY', 'WINDOW', []), # mode change
|
||||
|
||||
('View3D Placement Modal', 'EMPTY', 'WINDOW', []),
|
||||
('View3D Walk Modal', 'EMPTY', 'WINDOW', []),
|
||||
('View3D Fly Modal', 'EMPTY', 'WINDOW', []),
|
||||
('View3D Rotate Modal', 'EMPTY', 'WINDOW', []),
|
||||
('View3D Move Modal', 'EMPTY', 'WINDOW', []),
|
||||
('View3D Zoom Modal', 'EMPTY', 'WINDOW', []),
|
||||
('View3D Dolly Modal', 'EMPTY', 'WINDOW', []),
|
||||
|
||||
# toolbar and properties
|
||||
('3D View Generic', 'VIEW_3D', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('VIEW_3D', None),
|
||||
]),
|
||||
]),
|
||||
|
||||
('Graph Editor', 'GRAPH_EDITOR', 'WINDOW', [
|
||||
('Graph Editor Generic', 'GRAPH_EDITOR', 'WINDOW', []),
|
||||
]),
|
||||
('Dopesheet', 'DOPESHEET_EDITOR', 'WINDOW', [
|
||||
('Dopesheet Generic', 'DOPESHEET_EDITOR', 'WINDOW', []),
|
||||
]),
|
||||
('NLA Editor', 'NLA_EDITOR', 'WINDOW', [
|
||||
('NLA Channels', 'NLA_EDITOR', 'WINDOW', []),
|
||||
('NLA Generic', 'NLA_EDITOR', 'WINDOW', []),
|
||||
]),
|
||||
('Timeline', 'TIMELINE', 'WINDOW', []),
|
||||
|
||||
('Image', 'IMAGE_EDITOR', 'WINDOW', [
|
||||
# Image (reverse order, UVEdit before Image).
|
||||
('UV Editor', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('IMAGE_EDITOR', 'UV'),
|
||||
]),
|
||||
('UV Sculpt', 'EMPTY', 'WINDOW', []),
|
||||
# Image and view3d.
|
||||
('Image Paint', 'EMPTY', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('IMAGE_EDITOR', 'PAINT'),
|
||||
]),
|
||||
('Image View', 'IMAGE_EDITOR', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('IMAGE_EDITOR', 'VIEW'),
|
||||
]),
|
||||
('Image Generic', 'IMAGE_EDITOR', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('IMAGE_EDITOR', None),
|
||||
]),
|
||||
]),
|
||||
|
||||
('Outliner', 'OUTLINER', 'WINDOW', []),
|
||||
|
||||
('Node Editor', 'NODE_EDITOR', 'WINDOW', [
|
||||
('Node Generic', 'NODE_EDITOR', 'WINDOW', []),
|
||||
]),
|
||||
('SequencerCommon', 'SEQUENCE_EDITOR', 'WINDOW', [
|
||||
('Sequencer', 'SEQUENCE_EDITOR', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('SEQUENCE_EDITOR', 'SEQUENCER'),
|
||||
]),
|
||||
('SequencerPreview', 'SEQUENCE_EDITOR', 'WINDOW', [
|
||||
_km_expand_from_toolsystem('SEQUENCE_EDITOR', 'PREVIEW'),
|
||||
]),
|
||||
]),
|
||||
|
||||
('File Browser', 'FILE_BROWSER', 'WINDOW', [
|
||||
('File Browser Main', 'FILE_BROWSER', 'WINDOW', []),
|
||||
('File Browser Buttons', 'FILE_BROWSER', 'WINDOW', []),
|
||||
]),
|
||||
|
||||
('Info', 'INFO', 'WINDOW', []),
|
||||
|
||||
('Property Editor', 'PROPERTIES', 'WINDOW', []), # align context menu
|
||||
|
||||
('Text', 'TEXT_EDITOR', 'WINDOW', [
|
||||
('Text Generic', 'TEXT_EDITOR', 'WINDOW', []),
|
||||
]),
|
||||
('Console', 'CONSOLE', 'WINDOW', []),
|
||||
('Clip', 'CLIP_EDITOR', 'WINDOW', [
|
||||
('Clip Editor', 'CLIP_EDITOR', 'WINDOW', []),
|
||||
('Clip Graph Editor', 'CLIP_EDITOR', 'WINDOW', []),
|
||||
('Clip Dopesheet Editor', 'CLIP_EDITOR', 'WINDOW', []),
|
||||
]),
|
||||
|
||||
('Grease Pencil', 'EMPTY', 'WINDOW', [ # grease pencil stuff (per region)
|
||||
('Grease Pencil Stroke Curve Edit Mode', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Edit Mode', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Paint (Draw brush)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Paint (Fill)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Paint (Erase)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Paint (Tint)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Paint Mode', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt Mode', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Smooth)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Thickness)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Strength)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Grab)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Push)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Twist)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Pinch)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Randomize)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Sculpt (Clone)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Weight Mode', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Weight (Draw)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Vertex Mode', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Vertex (Draw)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Vertex (Blur)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Vertex (Average)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Vertex (Smear)', 'EMPTY', 'WINDOW', []),
|
||||
('Grease Pencil Stroke Vertex (Replace)', 'EMPTY', 'WINDOW', []),
|
||||
]),
|
||||
('Mask Editing', 'EMPTY', 'WINDOW', []),
|
||||
('Frames', 'EMPTY', 'WINDOW', []), # frame navigation (per region)
|
||||
('Markers', 'EMPTY', 'WINDOW', []), # markers (per region)
|
||||
('Animation', 'EMPTY', 'WINDOW', []), # frame change on click, preview range (per region)
|
||||
('Animation Channels', 'EMPTY', 'WINDOW', []),
|
||||
|
||||
('View3D Gesture Circle', 'EMPTY', 'WINDOW', []),
|
||||
('Gesture Straight Line', 'EMPTY', 'WINDOW', []),
|
||||
('Gesture Zoom Border', 'EMPTY', 'WINDOW', []),
|
||||
('Gesture Box', 'EMPTY', 'WINDOW', []),
|
||||
|
||||
('Standard Modal Map', 'EMPTY', 'WINDOW', []),
|
||||
('Transform Modal Map', 'EMPTY', 'WINDOW', []),
|
||||
('Eyedropper Modal Map', 'EMPTY', 'WINDOW', []),
|
||||
('Eyedropper ColorRamp PointSampling Map', 'EMPTY', 'WINDOW', []),
|
||||
]
|
||||
48
scripts/modules/bl_keymap_utils/platform_helpers.py
Normal file
48
scripts/modules/bl_keymap_utils/platform_helpers.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
|
||||
def keyconfig_data_oskey_from_ctrl(keyconfig_data_src, *, filter_fn=None):
|
||||
keyconfig_data_dst = []
|
||||
for km_name, km_parms, km_items_data_src in keyconfig_data_src:
|
||||
km_items_data_dst = km_items_data_src.copy()
|
||||
items_dst = []
|
||||
km_items_data_dst["items"] = items_dst
|
||||
for item_src in km_items_data_src["items"]:
|
||||
item_op, item_event, item_prop = item_src
|
||||
if "ctrl" in item_event:
|
||||
if filter_fn is None or filter_fn(item_event):
|
||||
item_event = item_event.copy()
|
||||
item_event["oskey"] = item_event["ctrl"]
|
||||
del item_event["ctrl"]
|
||||
items_dst.append((item_op, item_event, item_prop))
|
||||
items_dst.append(item_src)
|
||||
keyconfig_data_dst.append((km_name, km_parms, km_items_data_dst))
|
||||
return keyconfig_data_dst
|
||||
|
||||
|
||||
def keyconfig_data_oskey_from_ctrl_for_macos(keyconfig_data_src):
|
||||
"""Use for apple since Cmd is typically used in-place of Ctrl."""
|
||||
def filter_fn(item_event):
|
||||
if item_event.get("ctrl"):
|
||||
event_type = item_event["type"]
|
||||
# Ctrl-{Key}
|
||||
if (event_type in {
|
||||
'H',
|
||||
'M',
|
||||
'SPACE',
|
||||
'W',
|
||||
'ACCENT_GRAVE',
|
||||
'PERIOD',
|
||||
'TAB',
|
||||
}):
|
||||
if (not item_event.get("alt")) and (not item_event.get("shift")):
|
||||
return False
|
||||
# Ctrl-Alt-{Key}
|
||||
if (event_type in {
|
||||
'Q',
|
||||
}):
|
||||
if item_event.get("alt") and (not item_event.get("shift")):
|
||||
return False
|
||||
return True
|
||||
|
||||
return keyconfig_data_oskey_from_ctrl(keyconfig_data_src, filter_fn=filter_fn)
|
||||
63
scripts/modules/bl_keymap_utils/versioning.py
Normal file
63
scripts/modules/bl_keymap_utils/versioning.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Update Blender version this key-map was written in:
|
||||
#
|
||||
# When the version is `(0, 0, 0)`, the key-map being loaded didn't contain any versioning information.
|
||||
# This will older than `(2, 92, 0)`.
|
||||
|
||||
def keyconfig_update(keyconfig_data, keyconfig_version):
|
||||
from bpy.app import version_file as blender_version
|
||||
if keyconfig_version >= blender_version:
|
||||
return keyconfig_data
|
||||
|
||||
# Version the key-map.
|
||||
import copy
|
||||
# Only copy once.
|
||||
has_copy = False
|
||||
|
||||
# Default repeat to false.
|
||||
if keyconfig_version <= (2, 92, 0):
|
||||
if not has_copy:
|
||||
keyconfig_data = copy.deepcopy(keyconfig_data)
|
||||
has_copy = True
|
||||
|
||||
for _km_name, _km_parms, km_items_data in keyconfig_data:
|
||||
for (_item_op, item_event, _item_prop) in km_items_data["items"]:
|
||||
if item_event.get("value") == 'PRESS':
|
||||
# Unfortunately we don't know the 'map_type' at this point.
|
||||
# Setting repeat true on other kinds of events is harmless.
|
||||
item_event["repeat"] = True
|
||||
|
||||
if keyconfig_version <= (3, 2, 5):
|
||||
if not has_copy:
|
||||
keyconfig_data = copy.deepcopy(keyconfig_data)
|
||||
has_copy = True
|
||||
|
||||
for _km_name, _km_parms, km_items_data in keyconfig_data:
|
||||
for (_item_op, item_event, _item_prop) in km_items_data["items"]:
|
||||
if ty_new := {
|
||||
'EVT_TWEAK_L': 'LEFTMOUSE',
|
||||
'EVT_TWEAK_M': 'MIDDLEMOUSE',
|
||||
'EVT_TWEAK_R': 'RIGHTMOUSE',
|
||||
}.get(item_event.get("type")):
|
||||
item_event["type"] = ty_new
|
||||
if (value := item_event["value"]) != 'ANY':
|
||||
item_event["direction"] = value
|
||||
item_event["value"] = 'CLICK_DRAG'
|
||||
|
||||
if keyconfig_version <= (3, 2, 6):
|
||||
if not has_copy:
|
||||
keyconfig_data = copy.deepcopy(keyconfig_data)
|
||||
has_copy = True
|
||||
|
||||
for _km_name, _km_parms, km_items_data in keyconfig_data:
|
||||
for (_item_op, item_event, _item_prop) in km_items_data["items"]:
|
||||
if ty_new := {
|
||||
'NDOF_BUTTON_ESC': 'ESC',
|
||||
'NDOF_BUTTON_ALT': 'LEFT_ALT',
|
||||
'NDOF_BUTTON_SHIFT': 'LEFT_SHIFT',
|
||||
'NDOF_BUTTON_CTRL': 'LEFT_CTRL',
|
||||
}.get(item_event.get("type")):
|
||||
item_event["type"] = ty_new
|
||||
|
||||
return keyconfig_data
|
||||
531
scripts/modules/bl_previews_utils/bl_previews_render.py
Normal file
531
scripts/modules/bl_previews_utils/bl_previews_render.py
Normal file
@@ -0,0 +1,531 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Populate a template file (POT format currently) from Blender RNA/py/C data.
|
||||
# Note: This script is meant to be used from inside Blender!
|
||||
|
||||
import os
|
||||
|
||||
import bpy
|
||||
from mathutils import (
|
||||
Euler,
|
||||
Matrix,
|
||||
Vector,
|
||||
)
|
||||
|
||||
|
||||
OBJECT_TYPES_RENDER = {'MESH', 'CURVE', 'SURFACE', 'META', 'FONT'}
|
||||
|
||||
|
||||
def ids_nolib(bids):
|
||||
return (bid for bid in bids if not bid.library)
|
||||
|
||||
|
||||
def ids_nolib_with_preview(bids):
|
||||
return (bid for bid in bids if (not bid.library and bid.preview))
|
||||
|
||||
|
||||
def rna_backup_gen(data, include_props=None, exclude_props=None, root=()):
|
||||
# only writable properties...
|
||||
for p in data.bl_rna.properties:
|
||||
pid = p.identifier
|
||||
if pid == "rna_type" or pid == "original":
|
||||
continue
|
||||
path = root + (pid,)
|
||||
if include_props is not None and path not in include_props:
|
||||
continue
|
||||
if exclude_props is not None and path in exclude_props:
|
||||
continue
|
||||
val = getattr(data, pid)
|
||||
if val is not None and p.type == 'POINTER':
|
||||
# recurse!
|
||||
yield from rna_backup_gen(val, include_props, exclude_props, root=path)
|
||||
elif data.is_property_readonly(pid):
|
||||
continue
|
||||
else:
|
||||
yield path, val
|
||||
|
||||
|
||||
def rna_backup_restore(data, backup):
|
||||
for path, val in backup:
|
||||
dt = data
|
||||
for pid in path[:-1]:
|
||||
dt = getattr(dt, pid)
|
||||
setattr(dt, path[-1], val)
|
||||
|
||||
|
||||
def do_previews(do_objects, do_collections, do_scenes, do_data_intern):
|
||||
import collections
|
||||
|
||||
# Helpers.
|
||||
RenderContext = collections.namedtuple("RenderContext", (
|
||||
"scene", "world", "camera", "light", "camera_data", "light_data", "image", # All those are names!
|
||||
"backup_scene", "backup_world", "backup_camera", "backup_light", "backup_camera_data", "backup_light_data",
|
||||
))
|
||||
|
||||
RENDER_PREVIEW_SIZE = bpy.app.render_preview_size
|
||||
|
||||
def render_context_create(engine, objects_ignored):
|
||||
if engine == '__SCENE':
|
||||
backup_scene, backup_world, backup_camera, backup_light, backup_camera_data, backup_light_data = [()] * 6
|
||||
scene = bpy.context.window.scene
|
||||
exclude_props = {('world',), ('camera',), ('tool_settings',), ('preview',)}
|
||||
backup_scene = tuple(rna_backup_gen(scene, exclude_props=exclude_props))
|
||||
world = scene.world
|
||||
camera = scene.camera
|
||||
if camera:
|
||||
camera_data = camera.data
|
||||
else:
|
||||
backup_camera, backup_camera_data = [None] * 2
|
||||
camera_data = bpy.data.cameras.new("TEMP_preview_render_camera")
|
||||
camera = bpy.data.objects.new("TEMP_preview_render_camera", camera_data)
|
||||
camera.rotation_euler = Euler((1.1635528802871704, 0.0, 0.7853981852531433), 'XYZ') # (66.67, 0, 45)
|
||||
scene.camera = camera
|
||||
scene.collection.objects.link(camera)
|
||||
# TODO: add light if none found in scene?
|
||||
light = None
|
||||
light_data = None
|
||||
else:
|
||||
backup_scene, backup_world, backup_camera, backup_light, backup_camera_data, backup_light_data = [None] * 6
|
||||
|
||||
scene = bpy.data.scenes.new("TEMP_preview_render_scene")
|
||||
world = bpy.data.worlds.new("TEMP_preview_render_world")
|
||||
camera_data = bpy.data.cameras.new("TEMP_preview_render_camera")
|
||||
camera = bpy.data.objects.new("TEMP_preview_render_camera", camera_data)
|
||||
light_data = bpy.data.lights.new("TEMP_preview_render_light", 'SPOT')
|
||||
light = bpy.data.objects.new("TEMP_preview_render_light", light_data)
|
||||
|
||||
objects_ignored.add((camera.name, light.name))
|
||||
|
||||
scene.world = world
|
||||
|
||||
camera.rotation_euler = Euler((1.1635528802871704, 0.0, 0.7853981852531433), 'XYZ') # (66.67, 0, 45)
|
||||
scene.camera = camera
|
||||
scene.collection.objects.link(camera)
|
||||
|
||||
light.rotation_euler = Euler((0.7853981852531433, 0.0, 1.7453292608261108), 'XYZ') # (45, 0, 100)
|
||||
light_data.falloff_type = 'CONSTANT'
|
||||
light_data.spot_size = 1.0471975803375244 # 60
|
||||
scene.collection.objects.link(light)
|
||||
|
||||
scene.render.engine = 'CYCLES'
|
||||
scene.render.film_transparent = True
|
||||
# TODO: define Cycles world?
|
||||
|
||||
scene.render.image_settings.file_format = 'PNG'
|
||||
scene.render.image_settings.color_depth = '8'
|
||||
scene.render.image_settings.color_mode = 'RGBA'
|
||||
scene.render.image_settings.compression = 25
|
||||
scene.render.resolution_x = RENDER_PREVIEW_SIZE
|
||||
scene.render.resolution_y = RENDER_PREVIEW_SIZE
|
||||
scene.render.resolution_percentage = 100
|
||||
scene.render.filepath = os.path.join(bpy.app.tempdir, 'TEMP_preview_render.png')
|
||||
scene.render.use_overwrite = True
|
||||
scene.render.use_stamp = False
|
||||
scene.render.threads_mode = 'AUTO'
|
||||
|
||||
image = bpy.data.images.new("TEMP_render_image", RENDER_PREVIEW_SIZE, RENDER_PREVIEW_SIZE, alpha=True)
|
||||
image.source = 'FILE'
|
||||
image.filepath = scene.render.filepath
|
||||
|
||||
return RenderContext(
|
||||
scene.name, world.name if world else None, camera.name, light.name if light else None,
|
||||
camera_data.name, light_data.name if light_data else None, image.name,
|
||||
backup_scene, backup_world, backup_camera, backup_light, backup_camera_data, backup_light_data,
|
||||
)
|
||||
|
||||
def render_context_delete(render_context):
|
||||
# We use try/except blocks here to avoid crash, too much things can go wrong, and we want to leave the current
|
||||
# .blend as clean as possible!
|
||||
success = True
|
||||
|
||||
scene = bpy.data.scenes[render_context.scene, None]
|
||||
try:
|
||||
if render_context.backup_scene is None:
|
||||
scene.world = None
|
||||
scene.camera = None
|
||||
if render_context.camera:
|
||||
scene.collection.objects.unlink(bpy.data.objects[render_context.camera, None])
|
||||
if render_context.light:
|
||||
scene.collection.objects.unlink(bpy.data.objects[render_context.light, None])
|
||||
bpy.data.scenes.remove(scene, do_unlink=True)
|
||||
scene = None
|
||||
else:
|
||||
rna_backup_restore(scene, render_context.backup_scene)
|
||||
except Exception as e:
|
||||
print("ERROR:", e)
|
||||
success = False
|
||||
|
||||
if render_context.world is not None:
|
||||
try:
|
||||
world = bpy.data.worlds[render_context.world, None]
|
||||
if render_context.backup_world is None:
|
||||
if scene is not None:
|
||||
scene.world = None
|
||||
world.user_clear()
|
||||
bpy.data.worlds.remove(world)
|
||||
else:
|
||||
rna_backup_restore(world, render_context.backup_world)
|
||||
except Exception as e:
|
||||
print("ERROR:", e)
|
||||
success = False
|
||||
|
||||
if render_context.camera:
|
||||
try:
|
||||
camera = bpy.data.objects[render_context.camera, None]
|
||||
if render_context.backup_camera is None:
|
||||
if scene is not None:
|
||||
scene.camera = None
|
||||
scene.collection.objects.unlink(camera)
|
||||
camera.user_clear()
|
||||
bpy.data.objects.remove(camera)
|
||||
bpy.data.cameras.remove(bpy.data.cameras[render_context.camera_data, None])
|
||||
else:
|
||||
rna_backup_restore(camera, render_context.backup_camera)
|
||||
rna_backup_restore(bpy.data.cameras[render_context.camera_data, None],
|
||||
render_context.backup_camera_data)
|
||||
except Exception as e:
|
||||
print("ERROR:", e)
|
||||
success = False
|
||||
|
||||
if render_context.light:
|
||||
try:
|
||||
light = bpy.data.objects[render_context.light, None]
|
||||
if render_context.backup_light is None:
|
||||
if scene is not None:
|
||||
scene.collection.objects.unlink(light)
|
||||
light.user_clear()
|
||||
bpy.data.objects.remove(light)
|
||||
bpy.data.lights.remove(bpy.data.lights[render_context.light_data, None])
|
||||
else:
|
||||
rna_backup_restore(light, render_context.backup_light)
|
||||
rna_backup_restore(bpy.data.lights[render_context.light_data,
|
||||
None], render_context.backup_light_data)
|
||||
except Exception as e:
|
||||
print("ERROR:", e)
|
||||
success = False
|
||||
|
||||
try:
|
||||
image = bpy.data.images[render_context.image, None]
|
||||
image.user_clear()
|
||||
bpy.data.images.remove(image)
|
||||
except Exception as e:
|
||||
print("ERROR:", e)
|
||||
success = False
|
||||
|
||||
return success
|
||||
|
||||
def object_bbox_merge(bbox, ob, ob_space, offset_matrix):
|
||||
# Take collections instances into account (including linked one in this case).
|
||||
if ob.type == 'EMPTY' and ob.instance_type == 'COLLECTION':
|
||||
grp_objects = tuple((ob.name, ob.library.filepath if ob.library else None)
|
||||
for ob in ob.instance_collection.all_objects)
|
||||
if (len(grp_objects) == 0):
|
||||
ob_bbox = ob.bound_box
|
||||
else:
|
||||
coords = objects_bbox_calc(ob_space, grp_objects,
|
||||
Matrix.Translation(ob.instance_collection.instance_offset).inverted())
|
||||
ob_bbox = ((coords[0], coords[1], coords[2]), (coords[21], coords[22], coords[23]))
|
||||
elif ob.bound_box:
|
||||
ob_bbox = ob.bound_box
|
||||
else:
|
||||
ob_bbox = ((-ob.scale.x, -ob.scale.y, -ob.scale.z), (ob.scale.x, ob.scale.y, ob.scale.z))
|
||||
|
||||
for v in ob_bbox:
|
||||
v = offset_matrix @ Vector(v) if offset_matrix is not None else Vector(v)
|
||||
v = ob_space.matrix_world.inverted() @ ob.matrix_world @ v
|
||||
if bbox[0].x > v.x:
|
||||
bbox[0].x = v.x
|
||||
if bbox[0].y > v.y:
|
||||
bbox[0].y = v.y
|
||||
if bbox[0].z > v.z:
|
||||
bbox[0].z = v.z
|
||||
if bbox[1].x < v.x:
|
||||
bbox[1].x = v.x
|
||||
if bbox[1].y < v.y:
|
||||
bbox[1].y = v.y
|
||||
if bbox[1].z < v.z:
|
||||
bbox[1].z = v.z
|
||||
|
||||
def objects_bbox_calc(camera, objects, offset_matrix):
|
||||
bbox = (Vector((1e24, 1e24, 1e24)), Vector((-1e24, -1e24, -1e24)))
|
||||
for obname, libpath in objects:
|
||||
ob = bpy.data.objects[obname, libpath]
|
||||
object_bbox_merge(bbox, ob, camera, offset_matrix)
|
||||
# Our bbox has been generated in camera local space, bring it back in world one
|
||||
bbox[0][:] = camera.matrix_world @ bbox[0]
|
||||
bbox[1][:] = camera.matrix_world @ bbox[1]
|
||||
cos = (
|
||||
bbox[0].x, bbox[0].y, bbox[0].z,
|
||||
bbox[0].x, bbox[0].y, bbox[1].z,
|
||||
bbox[0].x, bbox[1].y, bbox[0].z,
|
||||
bbox[0].x, bbox[1].y, bbox[1].z,
|
||||
bbox[1].x, bbox[0].y, bbox[0].z,
|
||||
bbox[1].x, bbox[0].y, bbox[1].z,
|
||||
bbox[1].x, bbox[1].y, bbox[0].z,
|
||||
bbox[1].x, bbox[1].y, bbox[1].z,
|
||||
)
|
||||
return cos
|
||||
|
||||
def preview_render_do(render_context, item_container, item_name, objects, offset_matrix=None):
|
||||
# Unused.
|
||||
# scene = bpy.data.scenes[render_context.scene, None]
|
||||
if objects is not None:
|
||||
camera = bpy.data.objects[render_context.camera, None]
|
||||
light = bpy.data.objects[render_context.light, None] if render_context.light is not None else None
|
||||
cos = objects_bbox_calc(camera, objects, offset_matrix)
|
||||
depsgraph = bpy.context.evaluated_depsgraph_get()
|
||||
loc, _ortho_scale = camera.camera_fit_coords(depsgraph, cos)
|
||||
camera.location = loc
|
||||
# Set camera clipping accordingly to computed bbox.
|
||||
min_dist = 1e24
|
||||
max_dist = -1e24
|
||||
for co in zip(*(iter(cos),) * 3):
|
||||
dist = (Vector(co) - loc).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
if dist > max_dist:
|
||||
max_dist = dist
|
||||
camera.data.clip_start = min_dist / 2
|
||||
camera.data.clip_end = max_dist * 2
|
||||
if light:
|
||||
loc, _ortho_scale = light.camera_fit_coords(depsgraph, cos)
|
||||
light.location = loc
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
image = bpy.data.images[render_context.image, None]
|
||||
item = getattr(bpy.data, item_container)[item_name, None]
|
||||
image.reload()
|
||||
preview = item.preview_ensure()
|
||||
preview.image_size = (RENDER_PREVIEW_SIZE, RENDER_PREVIEW_SIZE)
|
||||
preview.image_pixels_float[:] = image.pixels
|
||||
|
||||
# And now, main code!
|
||||
do_save = True
|
||||
|
||||
if do_data_intern:
|
||||
bpy.ops.wm.previews_clear(id_type={'SHADING'})
|
||||
bpy.ops.wm.previews_ensure()
|
||||
|
||||
render_contexts = {}
|
||||
|
||||
objects_ignored = set()
|
||||
collections_ignored = set()
|
||||
|
||||
prev_scenename = bpy.context.window.scene.name
|
||||
|
||||
if do_objects:
|
||||
prev_shown = {ob.name: ob.hide_render for ob in ids_nolib(bpy.data.objects)}
|
||||
for ob in ids_nolib(bpy.data.objects):
|
||||
if ob in objects_ignored:
|
||||
continue
|
||||
ob.hide_render = True
|
||||
for root in ids_nolib(bpy.data.objects):
|
||||
if root.name in objects_ignored:
|
||||
continue
|
||||
if root.type not in OBJECT_TYPES_RENDER:
|
||||
continue
|
||||
objects = ((root.name, None),)
|
||||
|
||||
render_context = render_contexts.get('CYCLES', None)
|
||||
if render_context is None:
|
||||
render_context = render_context_create('CYCLES', objects_ignored)
|
||||
render_contexts['CYCLES'] = render_context
|
||||
|
||||
scene = bpy.data.scenes[render_context.scene, None]
|
||||
bpy.context.window.scene = scene
|
||||
|
||||
for obname, libpath in objects:
|
||||
ob = bpy.data.objects[obname, libpath]
|
||||
if obname not in scene.objects:
|
||||
scene.collection.objects.link(ob)
|
||||
ob.hide_render = False
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
preview_render_do(render_context, 'objects', root.name, objects)
|
||||
|
||||
# XXX Hyper Super Uber Suspicious Hack!
|
||||
# Without this, on windows build, script excepts with following message:
|
||||
# Traceback (most recent call last):
|
||||
# File "<string>", line 1, in <module>
|
||||
# File "<string>", line 451, in <module>
|
||||
# File "<string>", line 443, in main
|
||||
# File "<string>", line 327, in do_previews
|
||||
# OverflowError: Python int too large to convert to C long
|
||||
# ... :(
|
||||
scene = bpy.data.scenes[render_context.scene, None]
|
||||
for obname, libpath in objects:
|
||||
ob = bpy.data.objects[obname, libpath]
|
||||
scene.collection.objects.unlink(ob)
|
||||
ob.hide_render = True
|
||||
|
||||
for ob in ids_nolib(bpy.data.objects):
|
||||
is_rendered = prev_shown.get(ob.name, ...)
|
||||
if is_rendered is not ...:
|
||||
ob.hide_render = is_rendered
|
||||
|
||||
if do_collections:
|
||||
for grp in ids_nolib(bpy.data.collections):
|
||||
if grp.name in collections_ignored:
|
||||
continue
|
||||
# Here too, we do want to keep linked objects members of local collection...
|
||||
objects = tuple((ob.name, ob.library.filepath if ob.library else None) for ob in grp.objects)
|
||||
|
||||
render_context = render_contexts.get('CYCLES', None)
|
||||
if render_context is None:
|
||||
render_context = render_context_create('CYCLES', objects_ignored)
|
||||
render_contexts['CYCLES'] = render_context
|
||||
|
||||
scene = bpy.data.scenes[render_context.scene, None]
|
||||
bpy.context.window.scene = scene
|
||||
|
||||
bpy.ops.object.collection_instance_add(collection=grp.name)
|
||||
grp_ob = next((
|
||||
ob for ob in scene.objects
|
||||
if ob.instance_collection and ob.instance_collection.name == grp.name
|
||||
))
|
||||
grp_obname = grp_ob.name
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
offset_matrix = Matrix.Translation(grp.instance_offset).inverted()
|
||||
|
||||
preview_render_do(render_context, 'collections', grp.name, objects, offset_matrix)
|
||||
|
||||
scene = bpy.data.scenes[render_context.scene, None]
|
||||
scene.collection.objects.unlink(bpy.data.objects[grp_obname, None])
|
||||
|
||||
bpy.context.window.scene = bpy.data.scenes[prev_scenename, None]
|
||||
for render_context in render_contexts.values():
|
||||
if not render_context_delete(render_context):
|
||||
do_save = False # Do not save file if something went wrong here, we could 'pollute' it with temp data...
|
||||
|
||||
if do_scenes:
|
||||
for scene in ids_nolib(bpy.data.scenes):
|
||||
has_camera = scene.camera is not None
|
||||
bpy.context.window.scene = scene
|
||||
render_context = render_context_create('__SCENE', objects_ignored)
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
objects = None
|
||||
if not has_camera:
|
||||
# We had to add a temp camera, now we need to place it to see interesting objects!
|
||||
objects = tuple((ob.name, ob.library.filepath if ob.library else None) for ob in scene.objects
|
||||
if (not ob.hide_render) and (ob.type in OBJECT_TYPES_RENDER))
|
||||
|
||||
preview_render_do(render_context, 'scenes', scene.name, objects)
|
||||
|
||||
if not render_context_delete(render_context):
|
||||
do_save = False
|
||||
|
||||
bpy.context.window.scene = bpy.data.scenes[prev_scenename, None]
|
||||
if do_save:
|
||||
print("Saving %s..." % bpy.data.filepath)
|
||||
try:
|
||||
bpy.ops.wm.save_mainfile()
|
||||
except Exception as e:
|
||||
# Might fail in some odd cases, like e.g. in regression files we have glsl/ram_glsl.blend which
|
||||
# references an inexistent texture... Better not break in this case, just spit error to console.
|
||||
print("ERROR:", e)
|
||||
else:
|
||||
print("*NOT* Saving %s, because some error(s) happened while deleting temp render data..." % bpy.data.filepath)
|
||||
|
||||
|
||||
def do_clear_previews(do_objects, do_collections, do_scenes, do_data_intern):
|
||||
if do_data_intern:
|
||||
bpy.ops.wm.previews_clear(id_type={'SHADING'})
|
||||
|
||||
if do_objects:
|
||||
for ob in ids_nolib_with_preview(bpy.data.objects):
|
||||
ob.preview.image_size = (0, 0)
|
||||
|
||||
if do_collections:
|
||||
for grp in ids_nolib_with_preview(bpy.data.collections):
|
||||
grp.preview.image_size = (0, 0)
|
||||
|
||||
if do_scenes:
|
||||
for scene in ids_nolib_with_preview(bpy.data.scenes):
|
||||
scene.preview.image_size = (0, 0)
|
||||
|
||||
print("Saving %s..." % bpy.data.filepath)
|
||||
bpy.ops.wm.save_mainfile()
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
import bpy
|
||||
except ImportError:
|
||||
print("This script must run from inside blender")
|
||||
return
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
# Get rid of Blender args!
|
||||
argv = sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else []
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Use Blender to generate previews for currently open Blender file's items.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--clear',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Clear previews instead of generating them.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no_backups',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Do not generate a backup .blend1 file when saving processed ones.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no_scenes',
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Do not generate/clear previews for scene IDs.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no_collections',
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Do not generate/clear previews for collection IDs.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no_objects',
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Do not generate/clear previews for object IDs.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no_data_intern',
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Do not generate/clear previews for mat/tex/image/etc. IDs (those handled by core Blender code).",
|
||||
)
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
orig_save_version = bpy.context.preferences.filepaths.save_version
|
||||
if args.no_backups:
|
||||
bpy.context.preferences.filepaths.save_version = 0
|
||||
elif orig_save_version < 1:
|
||||
bpy.context.preferences.filepaths.save_version = 1
|
||||
|
||||
if args.clear:
|
||||
print("clear!")
|
||||
do_clear_previews(do_objects=args.no_objects, do_collections=args.no_collections, do_scenes=args.no_scenes,
|
||||
do_data_intern=args.no_data_intern)
|
||||
else:
|
||||
print("render!")
|
||||
do_previews(do_objects=args.no_objects, do_collections=args.no_collections, do_scenes=args.no_scenes,
|
||||
do_data_intern=args.no_data_intern)
|
||||
|
||||
# Not really necessary, but better be consistent.
|
||||
bpy.context.preferences.filepaths.save_version = orig_save_version
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n\n *** Running %s *** \n" % __file__)
|
||||
print(" *** Blend file %s *** \n" % bpy.data.filepath)
|
||||
main()
|
||||
bpy.ops.wm.quit_blender()
|
||||
0
scripts/modules/bl_rna_utils/__init__.py
Normal file
0
scripts/modules/bl_rna_utils/__init__.py
Normal file
74
scripts/modules/bl_rna_utils/data_path.py
Normal file
74
scripts/modules/bl_rna_utils/data_path.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"property_definition_from_data_path",
|
||||
"decompose_data_path",
|
||||
)
|
||||
|
||||
|
||||
class _TokenizeDataPath:
|
||||
"""
|
||||
Class to split up tokens of a data-path.
|
||||
|
||||
Note that almost all access generates new objects with additional paths,
|
||||
with the exception of iteration which is the intended way to access the resulting data."""
|
||||
__slots__ = (
|
||||
"data_path",
|
||||
)
|
||||
|
||||
def __init__(self, attrs):
|
||||
self.data_path = attrs
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return _TokenizeDataPath(self.data_path + ((".%s" % attr),))
|
||||
|
||||
def __getitem__(self, key):
|
||||
return _TokenizeDataPath(self.data_path + (("[%r]" % (key,)),))
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
value_str = ", ".join([
|
||||
val for val in (
|
||||
", ".join(repr(value) for value in args),
|
||||
", ".join(["%s=%r" % (key, value) for key, value in kw.items()]),
|
||||
) if val])
|
||||
return _TokenizeDataPath(self.data_path + ('(%s)' % value_str, ))
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.data_path)
|
||||
|
||||
|
||||
def decompose_data_path(data_path):
|
||||
"""
|
||||
Return the components of a data path split into a list.
|
||||
"""
|
||||
ns = {"base": _TokenizeDataPath(())}
|
||||
return list(eval("base" + data_path, ns, ns))
|
||||
|
||||
|
||||
def property_definition_from_data_path(base, data_path):
|
||||
"""
|
||||
Return an RNA property definition from an object and a data path.
|
||||
|
||||
In Blender this is often used with ``context`` as the base and a
|
||||
path that it references, for example ``.space_data.lock_camera``.
|
||||
"""
|
||||
data = decompose_data_path(data_path)
|
||||
while data and (not data[-1].startswith(".")):
|
||||
data.pop()
|
||||
|
||||
if (not data) or (not data[-1].startswith(".")) or (len(data) < 2):
|
||||
return None
|
||||
|
||||
data_path_head = "".join(data[:-1])
|
||||
data_path_tail = data[-1]
|
||||
|
||||
value_head = eval("base" + data_path_head)
|
||||
value_head_rna = getattr(value_head, "bl_rna", None)
|
||||
if value_head_rna is None:
|
||||
return None
|
||||
|
||||
value_tail = value_head.bl_rna.properties.get(data_path_tail[1:])
|
||||
if not value_tail:
|
||||
return None
|
||||
|
||||
return value_tail
|
||||
0
scripts/modules/bl_ui_utils/__init__.py
Normal file
0
scripts/modules/bl_ui_utils/__init__.py
Normal file
66
scripts/modules/bl_ui_utils/bug_report_url.py
Normal file
66
scripts/modules/bl_ui_utils/bug_report_url.py
Normal file
@@ -0,0 +1,66 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
|
||||
def url_prefill_from_blender(*, addon_info=None):
|
||||
import bpy
|
||||
import gpu
|
||||
import struct
|
||||
import platform
|
||||
import urllib.parse
|
||||
import io
|
||||
|
||||
fh = io.StringIO()
|
||||
|
||||
fh.write("**System Information**\n")
|
||||
fh.write(
|
||||
"Operating system: %s %d Bits\n" % (
|
||||
platform.platform(),
|
||||
struct.calcsize("P") * 8,
|
||||
)
|
||||
)
|
||||
fh.write(
|
||||
"Graphics card: %s %s %s\n" % (
|
||||
gpu.platform.renderer_get(),
|
||||
gpu.platform.vendor_get(),
|
||||
gpu.platform.version_get(),
|
||||
)
|
||||
)
|
||||
fh.write(
|
||||
"\n"
|
||||
"**Blender Version**\n"
|
||||
)
|
||||
fh.write(
|
||||
"Broken: version: %s, branch: %s, commit date: %s %s, hash: `%s`\n" % (
|
||||
bpy.app.version_string,
|
||||
bpy.app.build_branch.decode('utf-8', 'replace'),
|
||||
bpy.app.build_commit_date.decode('utf-8', 'replace'),
|
||||
bpy.app.build_commit_time.decode('utf-8', 'replace'),
|
||||
bpy.app.build_hash.decode('ascii'),
|
||||
)
|
||||
)
|
||||
fh.write(
|
||||
"Worked: (newest version of Blender that worked as expected)\n"
|
||||
)
|
||||
if addon_info:
|
||||
fh.write(
|
||||
"\n"
|
||||
"**Addon Information**\n"
|
||||
)
|
||||
fh.write(addon_info)
|
||||
|
||||
fh.write(
|
||||
"\n"
|
||||
"**Short description of error**\n"
|
||||
"[Please fill out a short description of the error here]\n"
|
||||
"\n"
|
||||
"**Exact steps for others to reproduce the error**\n"
|
||||
"[Please describe the exact steps needed to reproduce the issue]\n"
|
||||
"[Based on the default startup or an attached .blend file (as simple as possible)]\n"
|
||||
"\n"
|
||||
)
|
||||
|
||||
form_number = 2 if addon_info else 1
|
||||
return (
|
||||
"https://developer.blender.org/maniphest/task/edit/form/%i?description=" % form_number +
|
||||
urllib.parse.quote(fh.getvalue())
|
||||
)
|
||||
145
scripts/modules/blend_render_info.py
Executable file
145
scripts/modules/blend_render_info.py
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# This module can get render info without running from inside blender.
|
||||
#
|
||||
# This struct won't change according to Ton.
|
||||
# Note that the size differs on 32/64bit
|
||||
#
|
||||
# typedef struct BHead {
|
||||
# int code, len;
|
||||
# void *old;
|
||||
# int SDNAnr, nr;
|
||||
# } BHead;
|
||||
|
||||
__all__ = (
|
||||
"read_blend_rend_chunk",
|
||||
)
|
||||
|
||||
|
||||
class RawBlendFileReader:
|
||||
"""
|
||||
Return a file handle to the raw blend file data (abstracting compressed formats).
|
||||
"""
|
||||
__slots__ = (
|
||||
# The path to load.
|
||||
"_filepath",
|
||||
# The file base file handler or None (only set for compressed formats).
|
||||
"_blendfile_base",
|
||||
# The file handler to return to the caller (always uncompressed data).
|
||||
"_blendfile",
|
||||
)
|
||||
|
||||
def __init__(self, filepath):
|
||||
self._filepath = filepath
|
||||
self._blendfile_base = None
|
||||
self._blendfile = None
|
||||
|
||||
def __enter__(self):
|
||||
blendfile = open(self._filepath, "rb")
|
||||
blendfile_base = None
|
||||
head = blendfile.read(4)
|
||||
blendfile.seek(0)
|
||||
if head[0:2] == b'\x1f\x8b': # GZIP magic.
|
||||
import gzip
|
||||
blendfile_base = blendfile
|
||||
blendfile = gzip.open(blendfile, "rb")
|
||||
elif head[0:4] == b'\x28\xb5\x2f\xfd': # Z-standard magic.
|
||||
import zstandard
|
||||
blendfile_base = blendfile
|
||||
blendfile = zstandard.open(blendfile, "rb")
|
||||
|
||||
self._blendfile_base = blendfile_base
|
||||
self._blendfile = blendfile
|
||||
|
||||
return self._blendfile
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
self._blendfile.close()
|
||||
if self._blendfile_base is not None:
|
||||
self._blendfile_base.close()
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _read_blend_rend_chunk_from_file(blendfile, filepath):
|
||||
import struct
|
||||
import sys
|
||||
|
||||
from os import SEEK_CUR
|
||||
|
||||
head = blendfile.read(7)
|
||||
if head != b'BLENDER':
|
||||
sys.stderr.write("Not a blend file: %s\n" % filepath)
|
||||
return []
|
||||
|
||||
is_64_bit = (blendfile.read(1) == b'-')
|
||||
|
||||
# true for PPC, false for X86
|
||||
is_big_endian = (blendfile.read(1) == b'V')
|
||||
|
||||
# Now read the bhead chunk!
|
||||
blendfile.seek(3, SEEK_CUR) # Skip the version.
|
||||
|
||||
scenes = []
|
||||
|
||||
sizeof_bhead = 24 if is_64_bit else 20
|
||||
|
||||
# Should always be 4, but a malformed/corrupt file may be less.
|
||||
while (bhead_id := blendfile.read(4)) != b'ENDB':
|
||||
|
||||
if len(bhead_id) != 4:
|
||||
sys.stderr.write("Unable to read until ENDB block (corrupt file): %s\n" % filepath)
|
||||
break
|
||||
|
||||
sizeof_data_left = struct.unpack('>i' if is_big_endian else '<i', blendfile.read(4))[0]
|
||||
if sizeof_data_left < 0:
|
||||
# Very unlikely, but prevent other errors.
|
||||
sys.stderr.write("Negative block size found (corrupt file): %s\n" % filepath)
|
||||
break
|
||||
|
||||
# 4 from the `head_id`, another 4 for the size of the BHEAD.
|
||||
sizeof_bhead_left = sizeof_bhead - 8
|
||||
|
||||
# The remainder of the BHEAD struct is not used.
|
||||
blendfile.seek(sizeof_bhead_left, SEEK_CUR)
|
||||
|
||||
if bhead_id == b'REND':
|
||||
# Now we want the scene name, start and end frame. this is 32bits long.
|
||||
start_frame, end_frame = struct.unpack('>2i' if is_big_endian else '<2i', blendfile.read(8))
|
||||
sizeof_data_left -= 8
|
||||
|
||||
scene_name = blendfile.read(64)
|
||||
sizeof_data_left -= 64
|
||||
|
||||
scene_name = scene_name[:scene_name.index(b'\0')]
|
||||
# It's possible old blend files are not UTF8 compliant, use `surrogateescape`.
|
||||
scene_name = scene_name.decode("utf8", errors='surrogateescape')
|
||||
|
||||
scenes.append((start_frame, end_frame, scene_name))
|
||||
|
||||
if sizeof_data_left > 0:
|
||||
blendfile.seek(sizeof_data_left, SEEK_CUR)
|
||||
elif sizeof_data_left < 0:
|
||||
# Very unlikely, but prevent attempting to further parse corrupt data.
|
||||
sys.stderr.write("Error calculating next block (corrupt file): %s\n" % filepath)
|
||||
break
|
||||
|
||||
return scenes
|
||||
|
||||
|
||||
def read_blend_rend_chunk(filepath):
|
||||
with RawBlendFileReader(filepath) as blendfile:
|
||||
return _read_blend_rend_chunk_from_file(blendfile, filepath)
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
for filepath in sys.argv[1:]:
|
||||
for value in read_blend_rend_chunk(filepath):
|
||||
print("%d %d %s" % value)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
63
scripts/modules/bpy/__init__.py
Normal file
63
scripts/modules/bpy/__init__.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
Give access to blender data and utility functions.
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
"app",
|
||||
"context",
|
||||
"data",
|
||||
"ops",
|
||||
"path",
|
||||
"props",
|
||||
"types",
|
||||
"utils",
|
||||
)
|
||||
|
||||
|
||||
# internal blender C module
|
||||
from _bpy import (
|
||||
app,
|
||||
context,
|
||||
data,
|
||||
msgbus,
|
||||
props,
|
||||
types,
|
||||
)
|
||||
|
||||
# python modules
|
||||
from . import (
|
||||
ops,
|
||||
path,
|
||||
utils,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
# Possibly temp. addons path
|
||||
from os.path import join, dirname
|
||||
sys.path.extend([
|
||||
join(dirname(dirname(dirname(__file__))), "addons", "modules"),
|
||||
join(utils.user_resource('SCRIPTS'), "addons", "modules"),
|
||||
])
|
||||
|
||||
# fake module to allow:
|
||||
# from bpy.types import Panel
|
||||
sys.modules.update({
|
||||
"bpy.app": app,
|
||||
"bpy.app.handlers": app.handlers,
|
||||
"bpy.app.translations": app.translations,
|
||||
"bpy.types": types,
|
||||
})
|
||||
|
||||
# Initializes Python classes.
|
||||
# (good place to run a profiler or trace).
|
||||
utils.load_scripts()
|
||||
|
||||
|
||||
main()
|
||||
|
||||
del main
|
||||
185
scripts/modules/bpy/ops.py
Normal file
185
scripts/modules/bpy/ops.py
Normal file
@@ -0,0 +1,185 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# for slightly faster access
|
||||
from _bpy import ops as _ops_module
|
||||
|
||||
# op_add = _ops_module.add
|
||||
_op_dir = _ops_module.dir
|
||||
_op_poll = _ops_module.poll
|
||||
_op_call = _ops_module.call
|
||||
_op_as_string = _ops_module.as_string
|
||||
_op_get_rna_type = _ops_module.get_rna_type
|
||||
_op_get_bl_options = _ops_module.get_bl_options
|
||||
|
||||
_ModuleType = type(_ops_module)
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Callable Operator Wrapper
|
||||
|
||||
class _BPyOpsSubModOp:
|
||||
"""
|
||||
Utility class to fake submodule operators.
|
||||
|
||||
eg. bpy.ops.object.somefunc
|
||||
"""
|
||||
|
||||
__slots__ = ("_module", "_func")
|
||||
|
||||
def _get_doc(self):
|
||||
idname = self.idname()
|
||||
sig = _op_as_string(self.idname())
|
||||
# XXX You never quite know what you get from bpy.types,
|
||||
# with operators... Operator and OperatorProperties
|
||||
# are shadowing each other, and not in the same way for
|
||||
# native ops and py ones! See #39158.
|
||||
# op_class = getattr(bpy.types, idname)
|
||||
op_class = _op_get_rna_type(idname)
|
||||
descr = op_class.description
|
||||
return "%s\n%s" % (sig, descr)
|
||||
|
||||
@staticmethod
|
||||
def _parse_args(args):
|
||||
C_dict = None
|
||||
C_exec = 'EXEC_DEFAULT'
|
||||
C_undo = False
|
||||
|
||||
is_dict = is_exec = is_undo = False
|
||||
|
||||
for arg in args:
|
||||
if is_dict is False and isinstance(arg, dict):
|
||||
if is_exec is True or is_undo is True:
|
||||
raise ValueError("dict arg must come first")
|
||||
C_dict = arg
|
||||
is_dict = True
|
||||
elif is_exec is False and isinstance(arg, str):
|
||||
if is_undo is True:
|
||||
raise ValueError("string arg must come before the boolean")
|
||||
C_exec = arg
|
||||
is_exec = True
|
||||
elif is_undo is False and isinstance(arg, int):
|
||||
C_undo = arg
|
||||
is_undo = True
|
||||
else:
|
||||
raise ValueError("1-3 args execution context is supported")
|
||||
|
||||
return C_dict, C_exec, C_undo
|
||||
|
||||
@staticmethod
|
||||
def _view_layer_update(context):
|
||||
view_layer = context.view_layer
|
||||
if view_layer: # None in background mode
|
||||
view_layer.update()
|
||||
else:
|
||||
import bpy
|
||||
for scene in bpy.data.scenes:
|
||||
for view_layer in scene.view_layers:
|
||||
view_layer.update()
|
||||
|
||||
__doc__ = property(_get_doc)
|
||||
|
||||
def __init__(self, module, func):
|
||||
self._module = module
|
||||
self._func = func
|
||||
|
||||
def poll(self, *args):
|
||||
C_dict, C_exec, _C_undo = _BPyOpsSubModOp._parse_args(args)
|
||||
return _op_poll(self.idname_py(), C_dict, C_exec)
|
||||
|
||||
def idname(self):
|
||||
# submod.foo -> SUBMOD_OT_foo
|
||||
return self._module.upper() + "_OT_" + self._func
|
||||
|
||||
def idname_py(self):
|
||||
return self._module + "." + self._func
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
import bpy
|
||||
context = bpy.context
|
||||
|
||||
# Get the operator from blender
|
||||
wm = context.window_manager
|
||||
|
||||
# Run to account for any RNA values the user changes.
|
||||
# NOTE: We only update active view-layer, since that's what
|
||||
# operators are supposed to operate on. There might be some
|
||||
# corner cases when operator need a full scene update though.
|
||||
_BPyOpsSubModOp._view_layer_update(context)
|
||||
|
||||
if args:
|
||||
C_dict, C_exec, C_undo = _BPyOpsSubModOp._parse_args(args)
|
||||
ret = _op_call(self.idname_py(), C_dict, kw, C_exec, C_undo)
|
||||
else:
|
||||
ret = _op_call(self.idname_py(), None, kw)
|
||||
|
||||
if 'FINISHED' in ret and context.window_manager == wm:
|
||||
_BPyOpsSubModOp._view_layer_update(context)
|
||||
|
||||
return ret
|
||||
|
||||
def get_rna_type(self):
|
||||
"""Internal function for introspection"""
|
||||
return _op_get_rna_type(self.idname())
|
||||
|
||||
@property
|
||||
def bl_options(self):
|
||||
return _op_get_bl_options(self.idname())
|
||||
|
||||
def __repr__(self): # useful display, repr(op)
|
||||
return _op_as_string(self.idname())
|
||||
|
||||
def __str__(self): # used for print(...)
|
||||
return ("<function bpy.ops.%s.%s at 0x%x'>" %
|
||||
(self._module, self._func, id(self)))
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Sub-Module Access
|
||||
|
||||
def _bpy_ops_submodule__getattr__(module, func):
|
||||
# Return a value from `bpy.ops.{module}.{func}`
|
||||
if func.startswith("__"):
|
||||
raise AttributeError(func)
|
||||
return _BPyOpsSubModOp(module, func)
|
||||
|
||||
|
||||
def _bpy_ops_submodule__dir__(module):
|
||||
functions = set()
|
||||
module_upper = module.upper()
|
||||
|
||||
for id_name in _op_dir():
|
||||
id_split = id_name.split("_OT_", 1)
|
||||
if len(id_split) == 2 and module_upper == id_split[0]:
|
||||
functions.add(id_split[1])
|
||||
|
||||
return list(functions)
|
||||
|
||||
|
||||
def _bpy_ops_submodule(module):
|
||||
result = _ModuleType("bpy.ops." + module)
|
||||
result.__getattr__ = lambda func: _bpy_ops_submodule__getattr__(module, func)
|
||||
result.__dir__ = lambda: _bpy_ops_submodule__dir__(module)
|
||||
return result
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Module Access
|
||||
|
||||
def __getattr__(module):
|
||||
# Return a value from `bpy.ops.{module}`.
|
||||
if module.startswith("__"):
|
||||
raise AttributeError(module)
|
||||
return _bpy_ops_submodule(module)
|
||||
|
||||
|
||||
def __dir__():
|
||||
submodules = set()
|
||||
for id_name in _op_dir():
|
||||
id_split = id_name.split("_OT_", 1)
|
||||
|
||||
if len(id_split) == 2:
|
||||
submodules.add(id_split[0].lower())
|
||||
else:
|
||||
submodules.add(id_split[0])
|
||||
|
||||
return list(submodules)
|
||||
446
scripts/modules/bpy/path.py
Normal file
446
scripts/modules/bpy/path.py
Normal file
@@ -0,0 +1,446 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
This module has a similar scope to os.path, containing utility
|
||||
functions for dealing with paths in Blender.
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
"abspath",
|
||||
"basename",
|
||||
"clean_name",
|
||||
"display_name",
|
||||
"display_name_to_filepath",
|
||||
"display_name_from_filepath",
|
||||
"ensure_ext",
|
||||
"extensions_image",
|
||||
"extensions_movie",
|
||||
"extensions_audio",
|
||||
"is_subdir",
|
||||
"module_names",
|
||||
"native_pathsep",
|
||||
"reduce_dirs",
|
||||
"relpath",
|
||||
"resolve_ncase",
|
||||
)
|
||||
|
||||
import bpy as _bpy
|
||||
import os as _os
|
||||
|
||||
from _bpy_path import (
|
||||
extensions_audio,
|
||||
extensions_movie,
|
||||
extensions_image,
|
||||
)
|
||||
|
||||
|
||||
def _getattr_bytes(var, attr):
|
||||
return var.path_resolve(attr, False).as_bytes()
|
||||
|
||||
|
||||
def abspath(path, *, start=None, library=None):
|
||||
"""
|
||||
Returns the absolute path relative to the current blend file
|
||||
using the "//" prefix.
|
||||
|
||||
:arg start: Relative to this path,
|
||||
when not set the current filename is used.
|
||||
:type start: string or bytes
|
||||
:arg library: The library this path is from. This is only included for
|
||||
convenience, when the library is not None its path replaces *start*.
|
||||
:type library: :class:`bpy.types.Library`
|
||||
:return: The absolute path.
|
||||
:rtype: string
|
||||
"""
|
||||
if isinstance(path, bytes):
|
||||
if path.startswith(b"//"):
|
||||
if library:
|
||||
start = _os.path.dirname(
|
||||
abspath(_getattr_bytes(library, "filepath")))
|
||||
return _os.path.join(
|
||||
_os.path.dirname(_getattr_bytes(_bpy.data, "filepath"))
|
||||
if start is None else start,
|
||||
path[2:],
|
||||
)
|
||||
else:
|
||||
if path.startswith("//"):
|
||||
if library:
|
||||
start = _os.path.dirname(
|
||||
abspath(library.filepath))
|
||||
return _os.path.join(
|
||||
_os.path.dirname(_bpy.data.filepath)
|
||||
if start is None else start,
|
||||
path[2:],
|
||||
)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def relpath(path, *, start=None):
|
||||
"""
|
||||
Returns the path relative to the current blend file using the "//" prefix.
|
||||
|
||||
:arg path: An absolute path.
|
||||
:type path: string or bytes
|
||||
:arg start: Relative to this path,
|
||||
when not set the current filename is used.
|
||||
:type start: string or bytes
|
||||
:return: The relative path.
|
||||
:rtype: string
|
||||
"""
|
||||
if isinstance(path, bytes):
|
||||
if not path.startswith(b"//"):
|
||||
if start is None:
|
||||
start = _os.path.dirname(_getattr_bytes(_bpy.data, "filepath"))
|
||||
return b"//" + _os.path.relpath(path, start)
|
||||
else:
|
||||
if not path.startswith("//"):
|
||||
if start is None:
|
||||
start = _os.path.dirname(_bpy.data.filepath)
|
||||
return "//" + _os.path.relpath(path, start)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def is_subdir(path, directory):
|
||||
"""
|
||||
Returns true if *path* in a subdirectory of *directory*.
|
||||
Both paths must be absolute.
|
||||
|
||||
:arg path: An absolute path.
|
||||
:type path: string or bytes
|
||||
:return: Whether or not the path is a subdirectory.
|
||||
:rtype: boolean
|
||||
"""
|
||||
from os.path import normpath, normcase, sep
|
||||
path = normpath(normcase(path))
|
||||
directory = normpath(normcase(directory))
|
||||
if len(path) > len(directory):
|
||||
sep = sep.encode('ascii') if isinstance(directory, bytes) else sep
|
||||
if path.startswith(directory.rstrip(sep) + sep):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def clean_name(name, *, replace="_"):
|
||||
"""
|
||||
Returns a name with characters replaced that
|
||||
may cause problems under various circumstances,
|
||||
such as writing to a file.
|
||||
All characters besides A-Z/a-z, 0-9 are replaced with "_"
|
||||
or the *replace* argument if defined.
|
||||
:arg name: The path name.
|
||||
:type name: string or bytes
|
||||
:arg replace: The replacement for non-valid characters.
|
||||
:type replace: string
|
||||
:return: The cleaned name.
|
||||
:rtype: string
|
||||
"""
|
||||
|
||||
if replace != "_":
|
||||
if len(replace) != 1 or ord(replace) > 255:
|
||||
raise ValueError("Value must be a single ascii character")
|
||||
|
||||
def maketrans_init():
|
||||
trans_cache = clean_name._trans_cache
|
||||
trans = trans_cache.get(replace)
|
||||
if trans is None:
|
||||
bad_chars = (
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
|
||||
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
|
||||
0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2e, 0x2f, 0x3a,
|
||||
0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x5b, 0x5c,
|
||||
0x5d, 0x5e, 0x60, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
|
||||
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
|
||||
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
|
||||
0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
|
||||
0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
|
||||
0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
|
||||
0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
|
||||
0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
|
||||
0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
|
||||
0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
|
||||
0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
|
||||
0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
|
||||
0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
|
||||
0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
|
||||
0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe,
|
||||
)
|
||||
trans = str.maketrans({char: replace for char in bad_chars})
|
||||
trans_cache[replace] = trans
|
||||
return trans
|
||||
|
||||
trans = maketrans_init()
|
||||
return name.translate(trans)
|
||||
|
||||
|
||||
clean_name._trans_cache = {}
|
||||
|
||||
|
||||
def _clean_utf8(name):
|
||||
if type(name) == bytes:
|
||||
return name.decode("utf8", "replace")
|
||||
else:
|
||||
return name.encode("utf8", "replace").decode("utf8")
|
||||
|
||||
|
||||
_display_name_literals = {
|
||||
":": "_colon_",
|
||||
"+": "_plus_",
|
||||
"/": "_slash_",
|
||||
}
|
||||
|
||||
|
||||
def display_name(name, *, has_ext=True, title_case=True):
|
||||
"""
|
||||
Creates a display string from name to be used menus and the user interface.
|
||||
Intended for use with filenames and module names.
|
||||
|
||||
:arg name: The name to be used for displaying the user interface.
|
||||
:type name: string
|
||||
:arg has_ext: Remove file extension from name.
|
||||
:type has_ext: boolean
|
||||
:arg title_case: Convert lowercase names to title case.
|
||||
:type title_case: boolean
|
||||
:return: The display string.
|
||||
:rtype: string
|
||||
"""
|
||||
|
||||
if has_ext:
|
||||
name = _os.path.splitext(basename(name))[0]
|
||||
|
||||
# string replacements
|
||||
for disp_value, file_value in _display_name_literals.items():
|
||||
name = name.replace(file_value, disp_value)
|
||||
|
||||
# strip to allow underscore prefix
|
||||
# (when paths can't start with numbers for eg).
|
||||
name = name.replace("_", " ").lstrip(" ")
|
||||
|
||||
if title_case and name.islower():
|
||||
name = name.lower().title()
|
||||
|
||||
name = _clean_utf8(name)
|
||||
return name
|
||||
|
||||
|
||||
def display_name_to_filepath(name):
|
||||
"""
|
||||
Performs the reverse of display_name using literal versions of characters
|
||||
which aren't supported in a filepath.
|
||||
:arg name: The display name to convert.
|
||||
:type name: string
|
||||
:return: The file path.
|
||||
:rtype: string
|
||||
"""
|
||||
for disp_value, file_value in _display_name_literals.items():
|
||||
name = name.replace(disp_value, file_value)
|
||||
return name
|
||||
|
||||
|
||||
def display_name_from_filepath(name):
|
||||
"""
|
||||
Returns the path stripped of directory and extension,
|
||||
ensured to be utf8 compatible.
|
||||
:arg name: The file path to convert.
|
||||
:type name: string
|
||||
:return: The display name.
|
||||
:rtype: string
|
||||
"""
|
||||
|
||||
name = _os.path.splitext(basename(name))[0]
|
||||
name = _clean_utf8(name)
|
||||
return name
|
||||
|
||||
|
||||
def resolve_ncase(path):
|
||||
"""
|
||||
Resolve a case insensitive path on a case sensitive system,
|
||||
returning a string with the path if found else return the original path.
|
||||
:arg path: The path name to resolve.
|
||||
:type path: string
|
||||
:return: The resolved path.
|
||||
:rtype: string
|
||||
"""
|
||||
|
||||
def _ncase_path_found(path):
|
||||
if not path or _os.path.exists(path):
|
||||
return path, True
|
||||
|
||||
# filename may be a directory or a file
|
||||
filename = _os.path.basename(path)
|
||||
dirpath = _os.path.dirname(path)
|
||||
|
||||
suffix = path[:0] # "" but ensure byte/str match
|
||||
if not filename: # dir ends with a slash?
|
||||
if len(dirpath) < len(path):
|
||||
suffix = path[:len(path) - len(dirpath)]
|
||||
|
||||
filename = _os.path.basename(dirpath)
|
||||
dirpath = _os.path.dirname(dirpath)
|
||||
|
||||
if not _os.path.exists(dirpath):
|
||||
if dirpath == path:
|
||||
return path, False
|
||||
|
||||
dirpath, found = _ncase_path_found(dirpath)
|
||||
|
||||
if not found:
|
||||
return path, False
|
||||
|
||||
# at this point, the directory exists but not the file
|
||||
|
||||
# we are expecting 'dirpath' to be a directory, but it could be a file
|
||||
if _os.path.isdir(dirpath):
|
||||
try:
|
||||
files = _os.listdir(dirpath)
|
||||
except PermissionError:
|
||||
# We might not have the permission to list dirpath...
|
||||
return path, False
|
||||
else:
|
||||
return path, False
|
||||
|
||||
filename_low = filename.lower()
|
||||
f_iter_nocase = None
|
||||
|
||||
for f_iter in files:
|
||||
if f_iter.lower() == filename_low:
|
||||
f_iter_nocase = f_iter
|
||||
break
|
||||
|
||||
if f_iter_nocase:
|
||||
return _os.path.join(dirpath, f_iter_nocase) + suffix, True
|
||||
else:
|
||||
# can't find the right one, just return the path as is.
|
||||
return path, False
|
||||
|
||||
ncase_path, found = _ncase_path_found(path)
|
||||
return ncase_path if found else path
|
||||
|
||||
|
||||
def ensure_ext(filepath, ext, *, case_sensitive=False):
|
||||
"""
|
||||
Return the path with the extension added if it is not already set.
|
||||
|
||||
:arg filepath: The file path.
|
||||
:type filepath: string
|
||||
:arg ext: The extension to check for, can be a compound extension. Should
|
||||
start with a dot, such as '.blend' or '.tar.gz'.
|
||||
:type ext: string
|
||||
:arg case_sensitive: Check for matching case when comparing extensions.
|
||||
:type case_sensitive: boolean
|
||||
:return: The file path with the given extension.
|
||||
:rtype: string
|
||||
"""
|
||||
|
||||
if case_sensitive:
|
||||
if filepath.endswith(ext):
|
||||
return filepath
|
||||
else:
|
||||
if filepath[-len(ext):].lower().endswith(ext.lower()):
|
||||
return filepath
|
||||
|
||||
return filepath + ext
|
||||
|
||||
|
||||
def module_names(path, *, recursive=False):
|
||||
"""
|
||||
Return a list of modules which can be imported from *path*.
|
||||
|
||||
:arg path: a directory to scan.
|
||||
:type path: string
|
||||
:arg recursive: Also return submodule names for packages.
|
||||
:type recursive: bool
|
||||
:return: a list of string pairs (module_name, module_file).
|
||||
:rtype: list of strings
|
||||
"""
|
||||
|
||||
from os.path import join, isfile
|
||||
|
||||
modules = []
|
||||
|
||||
for filename in sorted(_os.listdir(path)):
|
||||
if filename == "modules":
|
||||
pass # XXX, hard coded exception.
|
||||
elif filename.endswith(".py") and filename != "__init__.py":
|
||||
fullpath = join(path, filename)
|
||||
modules.append((filename[0:-3], fullpath))
|
||||
elif not filename.startswith("."):
|
||||
# Skip hidden files since they are used by for version control.
|
||||
directory = join(path, filename)
|
||||
fullpath = join(directory, "__init__.py")
|
||||
if isfile(fullpath):
|
||||
modules.append((filename, fullpath))
|
||||
if recursive:
|
||||
for mod_name, mod_path in module_names(directory, recursive=True):
|
||||
modules.append(("%s.%s" % (filename, mod_name),
|
||||
mod_path,
|
||||
))
|
||||
|
||||
return modules
|
||||
|
||||
|
||||
def basename(path):
|
||||
"""
|
||||
Equivalent to ``os.path.basename``, but skips a "//" prefix.
|
||||
|
||||
Use for Windows compatibility.
|
||||
:return: The base name of the given path.
|
||||
:rtype: string
|
||||
"""
|
||||
return _os.path.basename(path[2:] if path[:2] in {"//", b"//"} else path)
|
||||
|
||||
|
||||
def native_pathsep(path):
|
||||
"""
|
||||
Replace the path separator with the systems native ``os.sep``.
|
||||
:arg path: The path to replace.
|
||||
:type path: string
|
||||
:return: The path with system native separators.
|
||||
:rtype: string
|
||||
"""
|
||||
if type(path) is str:
|
||||
if _os.sep == "/":
|
||||
return path.replace("\\", "/")
|
||||
else:
|
||||
if path.startswith("//"):
|
||||
return "//" + path[2:].replace("/", "\\")
|
||||
else:
|
||||
return path.replace("/", "\\")
|
||||
else: # bytes
|
||||
if _os.sep == "/":
|
||||
return path.replace(b"\\", b"/")
|
||||
else:
|
||||
if path.startswith(b"//"):
|
||||
return b"//" + path[2:].replace(b"/", b"\\")
|
||||
else:
|
||||
return path.replace(b"/", b"\\")
|
||||
|
||||
|
||||
def reduce_dirs(dirs):
|
||||
"""
|
||||
Given a sequence of directories, remove duplicates and
|
||||
any directories nested in one of the other paths.
|
||||
(Useful for recursive path searching).
|
||||
|
||||
:arg dirs: Sequence of directory paths.
|
||||
:type dirs: sequence of strings
|
||||
:return: A unique list of paths.
|
||||
:rtype: list of strings
|
||||
"""
|
||||
dirs = list({_os.path.normpath(_os.path.abspath(d)) for d in dirs})
|
||||
dirs.sort(key=lambda d: len(d))
|
||||
for i in range(len(dirs) - 1, -1, -1):
|
||||
for j in range(i):
|
||||
print(i, j)
|
||||
if len(dirs[i]) == len(dirs[j]):
|
||||
break
|
||||
elif is_subdir(dirs[i], dirs[j]):
|
||||
del dirs[i]
|
||||
break
|
||||
return dirs
|
||||
1110
scripts/modules/bpy/utils/__init__.py
Normal file
1110
scripts/modules/bpy/utils/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
137
scripts/modules/bpy/utils/previews.py
Normal file
137
scripts/modules/bpy/utils/previews.py
Normal file
@@ -0,0 +1,137 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
This module contains utility functions to handle custom previews.
|
||||
|
||||
It behaves as a high-level 'cached' previews manager.
|
||||
|
||||
This allows scripts to generate their own previews, and use them as icons in UI widgets
|
||||
('icon_value' for UILayout functions).
|
||||
|
||||
|
||||
Custom Icon Example
|
||||
-------------------
|
||||
|
||||
.. literalinclude:: __/__/__/scripts/templates_py/ui_previews_custom_icon.py
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
"new",
|
||||
"remove",
|
||||
"ImagePreviewCollection",
|
||||
)
|
||||
|
||||
import _bpy
|
||||
_utils_previews = _bpy._utils_previews
|
||||
del _bpy
|
||||
|
||||
|
||||
_uuid_open = set()
|
||||
|
||||
|
||||
# High-level previews manager.
|
||||
# not accessed directly
|
||||
class ImagePreviewCollection(dict):
|
||||
"""
|
||||
Dictionary-like class of previews.
|
||||
|
||||
This is a subclass of Python's built-in dict type,
|
||||
used to store multiple image previews.
|
||||
|
||||
.. note::
|
||||
|
||||
- instance with :mod:`bpy.utils.previews.new`
|
||||
- keys must be ``str`` type.
|
||||
- values will be :class:`bpy.types.ImagePreview`
|
||||
"""
|
||||
|
||||
# Internal notes:
|
||||
# - Blender's internal 'PreviewImage' struct uses 'self._uuid' prefix.
|
||||
# - Blender's preview.new/load return the data if it exists,
|
||||
# don't do this for the Python API as it allows accidental re-use of names,
|
||||
# anyone who wants to reuse names can use dict.get() to check if it exists.
|
||||
# We could use this for the C API too (would need some investigation).
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._uuid = hex(id(self))
|
||||
_uuid_open.add(self._uuid)
|
||||
|
||||
def __del__(self):
|
||||
if self._uuid not in _uuid_open:
|
||||
return
|
||||
|
||||
raise ResourceWarning(
|
||||
"%r: left open, remove with 'bpy.utils.previews.remove()'" % self
|
||||
)
|
||||
self.close()
|
||||
|
||||
def _gen_key(self, name):
|
||||
return ":".join((self._uuid, name))
|
||||
|
||||
def new(self, name):
|
||||
if name in self:
|
||||
raise KeyError("key %r already exists" % name)
|
||||
p = self[name] = _utils_previews.new(
|
||||
self._gen_key(name))
|
||||
return p
|
||||
new.__doc__ = _utils_previews.new.__doc__
|
||||
|
||||
def load(self, name, path, path_type, force_reload=False):
|
||||
if name in self:
|
||||
raise KeyError("key %r already exists" % name)
|
||||
p = self[name] = _utils_previews.load(
|
||||
self._gen_key(name), path, path_type, force_reload)
|
||||
return p
|
||||
load.__doc__ = _utils_previews.load.__doc__
|
||||
|
||||
def clear(self):
|
||||
"""Clear all previews."""
|
||||
for name in self.keys():
|
||||
_utils_previews.release(self._gen_key(name))
|
||||
super().clear()
|
||||
|
||||
def close(self):
|
||||
"""Close the collection and clear all previews."""
|
||||
self.clear()
|
||||
_uuid_open.remove(self._uuid)
|
||||
|
||||
def __delitem__(self, key):
|
||||
_utils_previews.release(self._gen_key(key))
|
||||
super().__delitem__(key)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s id=%s[%d], %r>" % (
|
||||
self.__class__.__name__, self._uuid, len(self), super()
|
||||
)
|
||||
|
||||
|
||||
def new():
|
||||
"""
|
||||
:return: a new preview collection.
|
||||
:rtype: :class:`ImagePreviewCollection`
|
||||
"""
|
||||
|
||||
return ImagePreviewCollection()
|
||||
|
||||
|
||||
def remove(pcoll):
|
||||
"""
|
||||
Remove the specified previews collection.
|
||||
|
||||
:arg pcoll: Preview collection to close.
|
||||
:type pcoll: :class:`ImagePreviewCollection`
|
||||
"""
|
||||
pcoll.close()
|
||||
|
||||
|
||||
# don't complain about resources on exit (only unregister)
|
||||
import atexit
|
||||
|
||||
|
||||
def exit_clear_warning():
|
||||
del ImagePreviewCollection.__del__
|
||||
|
||||
|
||||
atexit.register(exit_clear_warning)
|
||||
del atexit, exit_clear_warning
|
||||
5
scripts/modules/bpy/utils/toolsystem.py
Normal file
5
scripts/modules/bpy/utils/toolsystem.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# Until we untangle ToolDef from bl_ui internals,
|
||||
# use this module to document ToolDef.
|
||||
from bl_ui.space_toolsystem_common import ToolDef
|
||||
18
scripts/modules/bpy_extras/__init__.py
Normal file
18
scripts/modules/bpy_extras/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
Utility modules associated with the bpy module.
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
"anim_utils",
|
||||
"asset_utils",
|
||||
"object_utils",
|
||||
"io_utils",
|
||||
"image_utils",
|
||||
"keyconfig_utils",
|
||||
"mesh_utils",
|
||||
"node_utils",
|
||||
"view3d_utils",
|
||||
"id_map_utils",
|
||||
)
|
||||
574
scripts/modules/bpy_extras/anim_utils.py
Normal file
574
scripts/modules/bpy_extras/anim_utils.py
Normal file
@@ -0,0 +1,574 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"bake_action",
|
||||
"bake_action_objects",
|
||||
|
||||
"bake_action_iter",
|
||||
"bake_action_objects_iter",
|
||||
)
|
||||
|
||||
import bpy
|
||||
from bpy.types import Action
|
||||
|
||||
from typing import (
|
||||
List,
|
||||
Mapping,
|
||||
Sequence,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
FCurveKey = Tuple[
|
||||
# `fcurve.data_path`.
|
||||
str,
|
||||
# `fcurve.array_index`.
|
||||
int,
|
||||
]
|
||||
|
||||
# List of `[frame0, value0, frame1, value1, ...]` pairs.
|
||||
ListKeyframes = List[float]
|
||||
|
||||
|
||||
def bake_action(
|
||||
obj,
|
||||
*,
|
||||
action, frames,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
:arg obj: Object to bake.
|
||||
:type obj: :class:`bpy.types.Object`
|
||||
:arg action: An action to bake the data into, or None for a new action
|
||||
to be created.
|
||||
:type action: :class:`bpy.types.Action` or None
|
||||
:arg frames: Frames to bake.
|
||||
:type frames: iterable of int
|
||||
|
||||
:return: an action or None
|
||||
:rtype: :class:`bpy.types.Action`
|
||||
"""
|
||||
if not (kwargs.get("do_pose") or kwargs.get("do_object")):
|
||||
return None
|
||||
|
||||
action, = bake_action_objects(
|
||||
[(obj, action)],
|
||||
frames=frames,
|
||||
**kwargs,
|
||||
)
|
||||
return action
|
||||
|
||||
|
||||
def bake_action_objects(
|
||||
object_action_pairs,
|
||||
*,
|
||||
frames,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
A version of :func:`bake_action_objects_iter` that takes frames and returns the output.
|
||||
|
||||
:arg frames: Frames to bake.
|
||||
:type frames: iterable of int
|
||||
|
||||
:return: A sequence of Action or None types (aligned with `object_action_pairs`)
|
||||
:rtype: sequence of :class:`bpy.types.Action`
|
||||
"""
|
||||
iter = bake_action_objects_iter(object_action_pairs, **kwargs)
|
||||
iter.send(None)
|
||||
for frame in frames:
|
||||
iter.send(frame)
|
||||
return iter.send(None)
|
||||
|
||||
|
||||
def bake_action_objects_iter(
|
||||
object_action_pairs,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
An coroutine that bakes actions for multiple objects.
|
||||
|
||||
:arg object_action_pairs: Sequence of object action tuples,
|
||||
action is the destination for the baked data. When None a new action will be created.
|
||||
:type object_action_pairs: Sequence of (:class:`bpy.types.Object`, :class:`bpy.types.Action`)
|
||||
"""
|
||||
scene = bpy.context.scene
|
||||
frame_back = scene.frame_current
|
||||
iter_all = tuple(
|
||||
bake_action_iter(obj, action=action, **kwargs)
|
||||
for (obj, action) in object_action_pairs
|
||||
)
|
||||
for iter in iter_all:
|
||||
iter.send(None)
|
||||
while True:
|
||||
frame = yield None
|
||||
if frame is None:
|
||||
break
|
||||
scene.frame_set(frame)
|
||||
bpy.context.view_layer.update()
|
||||
for iter in iter_all:
|
||||
iter.send(frame)
|
||||
scene.frame_set(frame_back)
|
||||
yield tuple(iter.send(None) for iter in iter_all)
|
||||
|
||||
|
||||
# XXX visual keying is actually always considered as True in this code...
|
||||
def bake_action_iter(
|
||||
obj,
|
||||
*,
|
||||
action,
|
||||
only_selected=False,
|
||||
do_pose=True,
|
||||
do_object=True,
|
||||
do_visual_keying=True,
|
||||
do_constraint_clear=False,
|
||||
do_parents_clear=False,
|
||||
do_clean=False
|
||||
):
|
||||
"""
|
||||
An coroutine that bakes action for a single object.
|
||||
|
||||
:arg obj: Object to bake.
|
||||
:type obj: :class:`bpy.types.Object`
|
||||
:arg action: An action to bake the data into, or None for a new action
|
||||
to be created.
|
||||
:type action: :class:`bpy.types.Action` or None
|
||||
:arg only_selected: Only bake selected bones.
|
||||
:type only_selected: bool
|
||||
:arg do_pose: Bake pose channels.
|
||||
:type do_pose: bool
|
||||
:arg do_object: Bake objects.
|
||||
:type do_object: bool
|
||||
:arg do_visual_keying: Use the final transformations for baking ('visual keying')
|
||||
:type do_visual_keying: bool
|
||||
:arg do_constraint_clear: Remove constraints after baking.
|
||||
:type do_constraint_clear: bool
|
||||
:arg do_parents_clear: Unparent after baking objects.
|
||||
:type do_parents_clear: bool
|
||||
:arg do_clean: Remove redundant keyframes after baking.
|
||||
:type do_clean: bool
|
||||
|
||||
:return: an action or None
|
||||
:rtype: :class:`bpy.types.Action`
|
||||
"""
|
||||
# -------------------------------------------------------------------------
|
||||
# Helper Functions and vars
|
||||
|
||||
# Note: BBONE_PROPS is a list so we can preserve the ordering
|
||||
BBONE_PROPS = [
|
||||
"bbone_curveinx", "bbone_curveoutx",
|
||||
"bbone_curveinz", "bbone_curveoutz",
|
||||
"bbone_rollin", "bbone_rollout",
|
||||
"bbone_scalein", "bbone_scaleout",
|
||||
"bbone_easein", "bbone_easeout",
|
||||
]
|
||||
BBONE_PROPS_LENGTHS = {
|
||||
"bbone_curveinx": 1,
|
||||
"bbone_curveoutx": 1,
|
||||
"bbone_curveinz": 1,
|
||||
"bbone_curveoutz": 1,
|
||||
"bbone_rollin": 1,
|
||||
"bbone_rollout": 1,
|
||||
"bbone_scalein": 3,
|
||||
"bbone_scaleout": 3,
|
||||
"bbone_easein": 1,
|
||||
"bbone_easeout": 1,
|
||||
}
|
||||
|
||||
def pose_frame_info(obj):
|
||||
matrix = {}
|
||||
bbones = {}
|
||||
for name, pbone in obj.pose.bones.items():
|
||||
if do_visual_keying:
|
||||
# Get the final transform of the bone in its own local space...
|
||||
matrix[name] = obj.convert_space(pose_bone=pbone, matrix=pbone.matrix,
|
||||
from_space='POSE', to_space='LOCAL')
|
||||
else:
|
||||
matrix[name] = pbone.matrix_basis.copy()
|
||||
|
||||
# Bendy Bones
|
||||
if pbone.bone.bbone_segments > 1:
|
||||
bbones[name] = {bb_prop: getattr(pbone, bb_prop) for bb_prop in BBONE_PROPS}
|
||||
return matrix, bbones
|
||||
|
||||
if do_parents_clear:
|
||||
if do_visual_keying:
|
||||
def obj_frame_info(obj):
|
||||
return obj.matrix_world.copy()
|
||||
else:
|
||||
def obj_frame_info(obj):
|
||||
parent = obj.parent
|
||||
matrix = obj.matrix_basis
|
||||
if parent:
|
||||
return parent.matrix_world @ matrix
|
||||
else:
|
||||
return matrix.copy()
|
||||
else:
|
||||
if do_visual_keying:
|
||||
def obj_frame_info(obj):
|
||||
parent = obj.parent
|
||||
matrix = obj.matrix_world
|
||||
if parent:
|
||||
return parent.matrix_world.inverted_safe() @ matrix
|
||||
else:
|
||||
return matrix.copy()
|
||||
else:
|
||||
def obj_frame_info(obj):
|
||||
return obj.matrix_basis.copy()
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Setup the Context
|
||||
|
||||
if obj.pose is None:
|
||||
do_pose = False
|
||||
|
||||
if not (do_pose or do_object):
|
||||
raise Exception("Pose and object baking is disabled, no action needed")
|
||||
|
||||
pose_info = []
|
||||
obj_info = []
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Collect transformations
|
||||
|
||||
while True:
|
||||
# Caller is responsible for setting the frame and updating the scene.
|
||||
frame = yield None
|
||||
|
||||
# Signal we're done!
|
||||
if frame is None:
|
||||
break
|
||||
|
||||
if do_pose:
|
||||
pose_info.append((frame, *pose_frame_info(obj)))
|
||||
if do_object:
|
||||
obj_info.append((frame, obj_frame_info(obj)))
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Clean (store initial data)
|
||||
if do_clean and action is not None:
|
||||
clean_orig_data = {fcu: {p.co[1] for p in fcu.keyframe_points} for fcu in action.fcurves}
|
||||
else:
|
||||
clean_orig_data = {}
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Create action
|
||||
|
||||
# in case animation data hasn't been created
|
||||
atd = obj.animation_data_create()
|
||||
is_new_action = action is None
|
||||
if is_new_action:
|
||||
action = bpy.data.actions.new("Action")
|
||||
|
||||
# Only leave tweak mode if we actually need to modify the action (#57159)
|
||||
if action != atd.action:
|
||||
# Leave tweak mode before trying to modify the action (#48397)
|
||||
if atd.use_tweak_mode:
|
||||
atd.use_tweak_mode = False
|
||||
|
||||
atd.action = action
|
||||
|
||||
# Baking the action only makes sense in Replace mode, so force it (#69105)
|
||||
if not atd.use_tweak_mode:
|
||||
atd.action_blend_type = 'REPLACE'
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Apply transformations to action
|
||||
|
||||
# pose
|
||||
lookup_fcurves = {(fcurve.data_path, fcurve.array_index): fcurve for fcurve in action.fcurves}
|
||||
if do_pose:
|
||||
for name, pbone in obj.pose.bones.items():
|
||||
if only_selected and not pbone.bone.select:
|
||||
continue
|
||||
|
||||
if do_constraint_clear:
|
||||
while pbone.constraints:
|
||||
pbone.constraints.remove(pbone.constraints[0])
|
||||
|
||||
# Create compatible eulers, quats.
|
||||
euler_prev = None
|
||||
quat_prev = None
|
||||
|
||||
base_fcurve_path = pbone.path_from_id() + "."
|
||||
path_location = base_fcurve_path + "location"
|
||||
path_quaternion = base_fcurve_path + "rotation_quaternion"
|
||||
path_axis_angle = base_fcurve_path + "rotation_axis_angle"
|
||||
path_euler = base_fcurve_path + "rotation_euler"
|
||||
path_scale = base_fcurve_path + "scale"
|
||||
paths_bbprops = [(base_fcurve_path + bbprop) for bbprop in BBONE_PROPS]
|
||||
|
||||
keyframes = KeyframesCo()
|
||||
keyframes.add_paths(path_location, 3)
|
||||
keyframes.add_paths(path_quaternion, 4)
|
||||
keyframes.add_paths(path_axis_angle, 4)
|
||||
keyframes.add_paths(path_euler, 3)
|
||||
keyframes.add_paths(path_scale, 3)
|
||||
|
||||
if pbone.bone.bbone_segments > 1:
|
||||
for prop_name, path in zip(BBONE_PROPS, paths_bbprops):
|
||||
keyframes.add_paths(path, BBONE_PROPS_LENGTHS[prop_name])
|
||||
|
||||
rotation_mode = pbone.rotation_mode
|
||||
total_new_keys = len(pose_info)
|
||||
for (f, matrix, bbones) in pose_info:
|
||||
pbone.matrix_basis = matrix[name].copy()
|
||||
|
||||
keyframes.extend_co_values(path_location, 3, f, pbone.location)
|
||||
|
||||
if rotation_mode == 'QUATERNION':
|
||||
if quat_prev is not None:
|
||||
quat = pbone.rotation_quaternion.copy()
|
||||
quat.make_compatible(quat_prev)
|
||||
pbone.rotation_quaternion = quat
|
||||
quat_prev = quat
|
||||
del quat
|
||||
else:
|
||||
quat_prev = pbone.rotation_quaternion.copy()
|
||||
keyframes.extend_co_values(path_quaternion, 4, f, pbone.rotation_quaternion)
|
||||
elif rotation_mode == 'AXIS_ANGLE':
|
||||
keyframes.extend_co_values(path_axis_angle, 4, f, pbone.rotation_axis_angle)
|
||||
else: # euler, XYZ, ZXY etc
|
||||
if euler_prev is not None:
|
||||
euler = pbone.matrix_basis.to_euler(pbone.rotation_mode, euler_prev)
|
||||
pbone.rotation_euler = euler
|
||||
del euler
|
||||
euler_prev = pbone.rotation_euler.copy()
|
||||
keyframes.extend_co_values(path_euler, 3, f, pbone.rotation_euler)
|
||||
|
||||
keyframes.extend_co_values(path_scale, 3, f, pbone.scale)
|
||||
|
||||
# Bendy Bones
|
||||
if pbone.bone.bbone_segments > 1:
|
||||
bbone_shape = bbones[name]
|
||||
for prop_index, prop_name in enumerate(BBONE_PROPS):
|
||||
prop_len = BBONE_PROPS_LENGTHS[prop_name]
|
||||
if prop_len > 1:
|
||||
keyframes.extend_co_values(
|
||||
paths_bbprops[prop_index], prop_len, f, bbone_shape[prop_name]
|
||||
)
|
||||
else:
|
||||
keyframes.extend_co_value(
|
||||
paths_bbprops[prop_index], f, bbone_shape[prop_name]
|
||||
)
|
||||
|
||||
if is_new_action:
|
||||
keyframes.insert_keyframes_into_new_action(total_new_keys, action, name)
|
||||
else:
|
||||
keyframes.insert_keyframes_into_existing_action(lookup_fcurves, total_new_keys, action, name)
|
||||
|
||||
# object. TODO. multiple objects
|
||||
if do_object:
|
||||
if do_constraint_clear:
|
||||
while obj.constraints:
|
||||
obj.constraints.remove(obj.constraints[0])
|
||||
|
||||
# Create compatible eulers, quats.
|
||||
euler_prev = None
|
||||
quat_prev = None
|
||||
|
||||
path_location = "location"
|
||||
path_quaternion = "rotation_quaternion"
|
||||
path_axis_angle = "rotation_axis_angle"
|
||||
path_euler = "rotation_euler"
|
||||
path_scale = "scale"
|
||||
|
||||
keyframes = KeyframesCo()
|
||||
keyframes.add_paths(path_location, 3)
|
||||
keyframes.add_paths(path_quaternion, 4)
|
||||
keyframes.add_paths(path_axis_angle, 4)
|
||||
keyframes.add_paths(path_euler, 3)
|
||||
keyframes.add_paths(path_scale, 3)
|
||||
|
||||
rotation_mode = obj.rotation_mode
|
||||
total_new_keys = len(obj_info)
|
||||
for (f, matrix) in obj_info:
|
||||
name = "Action Bake" # XXX: placeholder
|
||||
obj.matrix_basis = matrix
|
||||
|
||||
keyframes.extend_co_values(path_location, 3, f, obj.location)
|
||||
|
||||
if rotation_mode == 'QUATERNION':
|
||||
if quat_prev is not None:
|
||||
quat = obj.rotation_quaternion.copy()
|
||||
quat.make_compatible(quat_prev)
|
||||
obj.rotation_quaternion = quat
|
||||
quat_prev = quat
|
||||
del quat
|
||||
else:
|
||||
quat_prev = obj.rotation_quaternion.copy()
|
||||
keyframes.extend_co_values(path_quaternion, 4, f, obj.rotation_quaternion)
|
||||
|
||||
elif rotation_mode == 'AXIS_ANGLE':
|
||||
keyframes.extend_co_values(path_axis_angle, 4, f, obj.rotation_axis_angle)
|
||||
else: # euler, XYZ, ZXY etc
|
||||
if euler_prev is not None:
|
||||
obj.rotation_euler = matrix.to_euler(obj.rotation_mode, euler_prev)
|
||||
euler_prev = obj.rotation_euler.copy()
|
||||
keyframes.extend_co_values(path_euler, 3, f, obj.rotation_euler)
|
||||
|
||||
keyframes.extend_co_values(path_scale, 3, f, obj.scale)
|
||||
|
||||
if is_new_action:
|
||||
keyframes.insert_keyframes_into_new_action(total_new_keys, action, name)
|
||||
else:
|
||||
keyframes.insert_keyframes_into_existing_action(lookup_fcurves, total_new_keys, action, name)
|
||||
|
||||
if do_parents_clear:
|
||||
obj.parent = None
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Clean
|
||||
|
||||
if do_clean:
|
||||
for fcu in action.fcurves:
|
||||
fcu_orig_data = clean_orig_data.get(fcu, set())
|
||||
|
||||
keyframe_points = fcu.keyframe_points
|
||||
i = 1
|
||||
while i < len(keyframe_points) - 1:
|
||||
val = keyframe_points[i].co[1]
|
||||
|
||||
if val in fcu_orig_data:
|
||||
i += 1
|
||||
continue
|
||||
|
||||
val_prev = keyframe_points[i - 1].co[1]
|
||||
val_next = keyframe_points[i + 1].co[1]
|
||||
|
||||
if abs(val - val_prev) + abs(val - val_next) < 0.0001:
|
||||
keyframe_points.remove(keyframe_points[i])
|
||||
else:
|
||||
i += 1
|
||||
|
||||
yield action
|
||||
|
||||
|
||||
class KeyframesCo:
|
||||
"""
|
||||
A buffer for keyframe Co unpacked values per ``FCurveKey``. ``FCurveKeys`` are added using
|
||||
``add_paths()``, Co values stored using extend_co_values(), then finally use
|
||||
``insert_keyframes_into_*_action()`` for efficiently inserting keys into the F-curves.
|
||||
|
||||
Users are limited to one Action Group per instance.
|
||||
"""
|
||||
__slots__ = (
|
||||
"keyframes_from_fcurve",
|
||||
)
|
||||
|
||||
# `keyframes[(rna_path, array_index)] = list(time0,value0, time1,value1,...)`.
|
||||
keyframes_from_fcurve: Mapping[FCurveKey, ListKeyframes]
|
||||
|
||||
def __init__(self):
|
||||
self.keyframes_from_fcurve = {}
|
||||
|
||||
def add_paths(
|
||||
self,
|
||||
rna_path: str,
|
||||
total_indices: int,
|
||||
) -> None:
|
||||
keyframes_from_fcurve = self.keyframes_from_fcurve
|
||||
for array_index in range(0, total_indices):
|
||||
keyframes_from_fcurve[(rna_path, array_index)] = []
|
||||
|
||||
def extend_co_values(
|
||||
self,
|
||||
rna_path: str,
|
||||
total_indices: int,
|
||||
frame: float,
|
||||
values: Sequence[float],
|
||||
) -> None:
|
||||
keyframes_from_fcurve = self.keyframes_from_fcurve
|
||||
for array_index in range(0, total_indices):
|
||||
keyframes_from_fcurve[(rna_path, array_index)].extend((frame, values[array_index]))
|
||||
|
||||
def extend_co_value(
|
||||
self,
|
||||
rna_path: str,
|
||||
frame: float,
|
||||
value: float,
|
||||
) -> None:
|
||||
self.keyframes_from_fcurve[(rna_path, 0)].extend((frame, value))
|
||||
|
||||
def insert_keyframes_into_new_action(
|
||||
self,
|
||||
total_new_keys: int,
|
||||
action: Action,
|
||||
action_group_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Assumes the action is new, that it has no F-curves. Otherwise, the only difference between versions is
|
||||
performance and implementation simplicity.
|
||||
|
||||
:arg action_group_name: Name of Action Group that F-curves are added to.
|
||||
:type action_group_name: str
|
||||
"""
|
||||
linear_enum_values = [
|
||||
bpy.types.Keyframe.bl_rna.properties["interpolation"].enum_items["LINEAR"].value
|
||||
] * total_new_keys
|
||||
|
||||
for fc_key, key_values in self.keyframes_from_fcurve.items():
|
||||
if len(key_values) == 0:
|
||||
continue
|
||||
|
||||
data_path, array_index = fc_key
|
||||
keyframe_points = action.fcurves.new(
|
||||
data_path, index=array_index, action_group=action_group_name
|
||||
).keyframe_points
|
||||
|
||||
keyframe_points.add(total_new_keys)
|
||||
keyframe_points.foreach_set("co", key_values)
|
||||
keyframe_points.foreach_set("interpolation", linear_enum_values)
|
||||
|
||||
# There's no need to do fcurve.update() because the keys are already ordered, have
|
||||
# no duplicates and all handles are Linear.
|
||||
|
||||
def insert_keyframes_into_existing_action(
|
||||
self,
|
||||
lookup_fcurves: Mapping[FCurveKey, bpy.types.FCurve],
|
||||
total_new_keys: int,
|
||||
action: Action,
|
||||
action_group_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Assumes the action already exists, that it might already have F-curves. Otherwise, the
|
||||
only difference between versions is performance and implementation simplicity.
|
||||
|
||||
:arg lookup_fcurves: : This is only used for efficiency.
|
||||
It's a substitute for ``action.fcurves.find()`` which is a potentially expensive linear search.
|
||||
:type lookup_fcurves: ``Mapping[FCurveKey, bpy.types.FCurve]``
|
||||
:arg action_group_name: Name of Action Group that F-curves are added to.
|
||||
:type action_group_name: str
|
||||
"""
|
||||
linear_enum_values = [
|
||||
bpy.types.Keyframe.bl_rna.properties["interpolation"].enum_items["LINEAR"].value
|
||||
] * total_new_keys
|
||||
|
||||
for fc_key, key_values in self.keyframes_from_fcurve.items():
|
||||
if len(key_values) == 0:
|
||||
continue
|
||||
|
||||
fcurve = lookup_fcurves.get(fc_key, None)
|
||||
if fcurve is None:
|
||||
data_path, array_index = fc_key
|
||||
fcurve = action.fcurves.new(
|
||||
data_path, index=array_index, action_group=action_group_name
|
||||
)
|
||||
|
||||
keyframe_points = fcurve.keyframe_points
|
||||
|
||||
co_buffer = [0] * (2 * len(keyframe_points))
|
||||
keyframe_points.foreach_get("co", co_buffer)
|
||||
co_buffer.extend(key_values)
|
||||
|
||||
ipo_buffer = [None] * len(keyframe_points)
|
||||
keyframe_points.foreach_get("interpolation", ipo_buffer)
|
||||
ipo_buffer.extend(linear_enum_values)
|
||||
|
||||
# XXX: Currently baking inserts the same number of keys for all baked properties.
|
||||
# This block of code breaks if that's no longer true since we then will not be properly
|
||||
# initializing all the data.
|
||||
keyframe_points.add(total_new_keys)
|
||||
keyframe_points.foreach_set("co", co_buffer)
|
||||
keyframe_points.foreach_set("interpolation", ipo_buffer)
|
||||
|
||||
fcurve.update()
|
||||
52
scripts/modules/bpy_extras/asset_utils.py
Normal file
52
scripts/modules/bpy_extras/asset_utils.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
Helpers for asset management tasks.
|
||||
"""
|
||||
|
||||
import bpy
|
||||
from bpy.types import (
|
||||
Context,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"SpaceAssetInfo",
|
||||
)
|
||||
|
||||
|
||||
class SpaceAssetInfo:
|
||||
@classmethod
|
||||
def is_asset_browser(cls, space_data: bpy.types.Space):
|
||||
return space_data and space_data.type == 'FILE_BROWSER' and space_data.browse_mode == 'ASSETS'
|
||||
|
||||
@classmethod
|
||||
def is_asset_browser_poll(cls, context: Context):
|
||||
return cls.is_asset_browser(context.space_data)
|
||||
|
||||
@classmethod
|
||||
def get_active_asset(cls, context: Context):
|
||||
if hasattr(context, "active_file"):
|
||||
active_file = context.active_file
|
||||
return active_file.asset_data if active_file else None
|
||||
|
||||
|
||||
class AssetBrowserPanel:
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
|
||||
@classmethod
|
||||
def asset_browser_panel_poll(cls, context):
|
||||
return SpaceAssetInfo.is_asset_browser_poll(context)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return cls.asset_browser_panel_poll(context)
|
||||
|
||||
|
||||
class AssetMetaDataPanel:
|
||||
bl_space_type = 'FILE_BROWSER'
|
||||
bl_region_type = 'TOOL_PROPS'
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
active_file = context.active_file
|
||||
return SpaceAssetInfo.is_asset_browser_poll(context) and active_file and active_file.asset_data
|
||||
56
scripts/modules/bpy_extras/bmesh_utils.py
Normal file
56
scripts/modules/bpy_extras/bmesh_utils.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"bmesh_linked_uv_islands",
|
||||
)
|
||||
|
||||
import bmesh
|
||||
|
||||
|
||||
def match_uv(face, vert, uv, uv_layer):
|
||||
for loop in face.loops:
|
||||
if loop.vert == vert:
|
||||
return uv == loop[uv_layer].uv
|
||||
return False
|
||||
|
||||
|
||||
def bmesh_linked_uv_islands(bm, uv_layer):
|
||||
"""
|
||||
Returns lists of faces connected by UV islands.
|
||||
|
||||
For meshes use :class:`bpy.types.Mesh.mesh_linked_uv_islands` instead.
|
||||
|
||||
:arg bm: the bmesh used to group with.
|
||||
:type bmesh: :class:`BMesh`
|
||||
:arg uv_layer: the UV layer to source UVs from.
|
||||
:type bmesh: :class:`BMLayerItem`
|
||||
:return: list of lists containing polygon indices
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
result = []
|
||||
used = set()
|
||||
for seed_face in bm.faces:
|
||||
if seed_face in used:
|
||||
continue # Face has already been processed.
|
||||
used.add(seed_face)
|
||||
island = [seed_face]
|
||||
stack = [seed_face] # Faces still to consider on this island.
|
||||
while stack:
|
||||
current_face = stack.pop()
|
||||
for loop in current_face.loops:
|
||||
v = loop.vert
|
||||
uv = loop[uv_layer].uv
|
||||
for f in v.link_faces:
|
||||
if f is current_face or f in used:
|
||||
continue
|
||||
if not match_uv(f, v, uv, uv_layer):
|
||||
continue
|
||||
|
||||
# `f` is part of island, add to island and stack
|
||||
used.add(f)
|
||||
island.append(f)
|
||||
stack.append(f)
|
||||
result.append(island)
|
||||
|
||||
return result
|
||||
47
scripts/modules/bpy_extras/id_map_utils.py
Normal file
47
scripts/modules/bpy_extras/id_map_utils.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
from typing import Dict, Set
|
||||
import bpy
|
||||
from bpy.types import ID
|
||||
|
||||
|
||||
__all__ = (
|
||||
"get_id_reference_map",
|
||||
"get_all_referenced_ids",
|
||||
)
|
||||
|
||||
|
||||
def get_id_reference_map() -> Dict[ID, Set[ID]]:
|
||||
"""Return a dictionary of direct datablock references for every datablock in the blend file."""
|
||||
inv_map = {}
|
||||
for key, values in bpy.data.user_map().items():
|
||||
for value in values:
|
||||
if value == key:
|
||||
# So an object is not considered to be referencing itself.
|
||||
continue
|
||||
inv_map.setdefault(value, set()).add(key)
|
||||
return inv_map
|
||||
|
||||
|
||||
def recursive_get_referenced_ids(
|
||||
ref_map: Dict[ID, Set[ID]], id: ID, referenced_ids: Set, visited: Set
|
||||
):
|
||||
"""Recursively populate referenced_ids with IDs referenced by id."""
|
||||
if id in visited:
|
||||
# Avoid infinite recursion from circular references.
|
||||
return
|
||||
visited.add(id)
|
||||
for ref in ref_map.get(id, []):
|
||||
referenced_ids.add(ref)
|
||||
recursive_get_referenced_ids(
|
||||
ref_map=ref_map, id=ref, referenced_ids=referenced_ids, visited=visited
|
||||
)
|
||||
|
||||
|
||||
def get_all_referenced_ids(id: ID, ref_map: Dict[ID, Set[ID]]) -> Set[ID]:
|
||||
"""Return a set of IDs directly or indirectly referenced by id."""
|
||||
referenced_ids = set()
|
||||
recursive_get_referenced_ids(
|
||||
ref_map=ref_map, id=id, referenced_ids=referenced_ids, visited=set()
|
||||
)
|
||||
return referenced_ids
|
||||
192
scripts/modules/bpy_extras/image_utils.py
Normal file
192
scripts/modules/bpy_extras/image_utils.py
Normal file
@@ -0,0 +1,192 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"load_image",
|
||||
)
|
||||
|
||||
|
||||
# limited replacement for BPyImage.comprehensiveImageLoad
|
||||
def load_image(
|
||||
imagepath,
|
||||
dirname="",
|
||||
place_holder=False,
|
||||
recursive=False,
|
||||
ncase_cmp=True,
|
||||
convert_callback=None,
|
||||
verbose=False,
|
||||
relpath=None,
|
||||
check_existing=False,
|
||||
force_reload=False,
|
||||
):
|
||||
"""
|
||||
Return an image from the file path with options to search multiple paths
|
||||
and return a placeholder if its not found.
|
||||
|
||||
:arg filepath: The image filename
|
||||
If a path precedes it, this will be searched as well.
|
||||
:type filepath: string
|
||||
:arg dirname: is the directory where the image may be located - any file at
|
||||
the end will be ignored.
|
||||
:type dirname: string
|
||||
:arg place_holder: if True a new place holder image will be created.
|
||||
this is useful so later you can relink the image to its original data.
|
||||
:type place_holder: bool
|
||||
:arg recursive: If True, directories will be recursively searched.
|
||||
Be careful with this if you have files in your root directory because
|
||||
it may take a long time.
|
||||
:type recursive: bool
|
||||
:arg ncase_cmp: on non windows systems, find the correct case for the file.
|
||||
:type ncase_cmp: bool
|
||||
:arg convert_callback: a function that takes an existing path and returns
|
||||
a new one. Use this when loading image formats blender may not support,
|
||||
the CONVERT_CALLBACK can take the path for a GIF (for example),
|
||||
convert it to a PNG and return the PNG's path.
|
||||
For formats blender can read, simply return the path that is given.
|
||||
:type convert_callback: function
|
||||
:arg relpath: If not None, make the file relative to this path.
|
||||
:type relpath: None or string
|
||||
:arg check_existing: If true,
|
||||
returns already loaded image datablock if possible
|
||||
(based on file path).
|
||||
:type check_existing: bool
|
||||
:arg force_reload: If true,
|
||||
force reloading of image (only useful when `check_existing`
|
||||
is also enabled).
|
||||
:type force_reload: bool
|
||||
:return: an image or None
|
||||
:rtype: :class:`bpy.types.Image`
|
||||
"""
|
||||
import os
|
||||
import bpy
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Utility Functions
|
||||
|
||||
def _image_load_placeholder(path):
|
||||
name = path
|
||||
if type(path) is str:
|
||||
name = name.encode("utf-8", "replace")
|
||||
name = name.decode("utf-8", "replace")
|
||||
name = os.path.basename(name)
|
||||
|
||||
image = bpy.data.images.new(name, 128, 128)
|
||||
# allow the path to be resolved later
|
||||
image.filepath = path
|
||||
image.source = 'FILE'
|
||||
return image
|
||||
|
||||
def _image_load(path):
|
||||
import bpy
|
||||
|
||||
if convert_callback:
|
||||
path = convert_callback(path)
|
||||
|
||||
# Ensure we're not relying on the 'CWD' to resolve the path.
|
||||
if not os.path.isabs(path):
|
||||
path = os.path.abspath(path)
|
||||
|
||||
try:
|
||||
image = bpy.data.images.load(path, check_existing=check_existing)
|
||||
except RuntimeError:
|
||||
image = None
|
||||
|
||||
if verbose:
|
||||
if image:
|
||||
print(" image loaded '%s'" % path)
|
||||
else:
|
||||
print(" image load failed '%s'" % path)
|
||||
|
||||
# image path has been checked so the path could not be read for some
|
||||
# reason, so be sure to return a placeholder
|
||||
if place_holder and image is None:
|
||||
image = _image_load_placeholder(path)
|
||||
|
||||
if image:
|
||||
if force_reload:
|
||||
image.reload()
|
||||
if relpath is not None:
|
||||
# make relative
|
||||
from bpy.path import relpath as relpath_fn
|
||||
# can't always find the relative path
|
||||
# (between drive letters on windows)
|
||||
try:
|
||||
filepath_rel = relpath_fn(path, start=relpath)
|
||||
except ValueError:
|
||||
filepath_rel = None
|
||||
|
||||
if filepath_rel is not None:
|
||||
image.filepath_raw = filepath_rel
|
||||
|
||||
return image
|
||||
|
||||
def _recursive_search(paths, filename_check):
|
||||
for path in paths:
|
||||
for dirpath, _dirnames, filenames in os.walk(path):
|
||||
|
||||
# skip '.svn'
|
||||
if dirpath[0] in {".", b'.'}:
|
||||
continue
|
||||
|
||||
for filename in filenames:
|
||||
if filename_check(filename):
|
||||
yield os.path.join(dirpath, filename)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
imagepath = bpy.path.native_pathsep(imagepath)
|
||||
|
||||
if verbose:
|
||||
print("load_image('%s', '%s', ...)" % (imagepath, dirname))
|
||||
|
||||
if os.path.exists(imagepath):
|
||||
return _image_load(imagepath)
|
||||
|
||||
variants = [imagepath]
|
||||
|
||||
if dirname:
|
||||
variants += [
|
||||
os.path.join(dirname, imagepath),
|
||||
os.path.join(dirname, bpy.path.basename(imagepath)),
|
||||
]
|
||||
|
||||
for filepath_test in variants:
|
||||
if ncase_cmp:
|
||||
ncase_variants = (
|
||||
filepath_test,
|
||||
bpy.path.resolve_ncase(filepath_test),
|
||||
)
|
||||
else:
|
||||
ncase_variants = (filepath_test, )
|
||||
|
||||
for nfilepath in ncase_variants:
|
||||
if os.path.exists(nfilepath):
|
||||
return _image_load(nfilepath)
|
||||
|
||||
if recursive:
|
||||
search_paths = []
|
||||
|
||||
for dirpath_test in (os.path.dirname(imagepath), dirname):
|
||||
if os.path.exists(dirpath_test):
|
||||
search_paths.append(dirpath_test)
|
||||
search_paths[:] = bpy.path.reduce_dirs(search_paths)
|
||||
|
||||
imagepath_base = bpy.path.basename(imagepath)
|
||||
if ncase_cmp:
|
||||
imagepath_base = imagepath_base.lower()
|
||||
|
||||
def image_filter(fn):
|
||||
return (imagepath_base == fn.lower())
|
||||
else:
|
||||
def image_filter(fn):
|
||||
return (imagepath_base == fn)
|
||||
|
||||
nfilepath = next(_recursive_search(search_paths, image_filter), None)
|
||||
if nfilepath is not None:
|
||||
return _image_load(nfilepath)
|
||||
|
||||
# None of the paths exist so return placeholder
|
||||
if place_holder:
|
||||
return _image_load_placeholder(imagepath)
|
||||
|
||||
# TODO comprehensiveImageLoad also searched in bpy.config.textureDir
|
||||
return None
|
||||
576
scripts/modules/bpy_extras/io_utils.py
Normal file
576
scripts/modules/bpy_extras/io_utils.py
Normal file
@@ -0,0 +1,576 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"ExportHelper",
|
||||
"ImportHelper",
|
||||
"orientation_helper",
|
||||
"axis_conversion",
|
||||
"axis_conversion_ensure",
|
||||
"create_derived_objects",
|
||||
"unpack_list",
|
||||
"unpack_face_list",
|
||||
"path_reference",
|
||||
"path_reference_copy",
|
||||
"path_reference_mode",
|
||||
"unique_name",
|
||||
)
|
||||
|
||||
import bpy
|
||||
from bpy.props import (
|
||||
BoolProperty,
|
||||
EnumProperty,
|
||||
StringProperty,
|
||||
)
|
||||
from bpy.app.translations import pgettext_data as data_
|
||||
|
||||
|
||||
def _check_axis_conversion(op):
|
||||
if hasattr(op, "axis_forward") and hasattr(op, "axis_up"):
|
||||
return axis_conversion_ensure(
|
||||
op,
|
||||
"axis_forward",
|
||||
"axis_up",
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
class ExportHelper:
|
||||
filepath: StringProperty(
|
||||
name="File Path",
|
||||
description="Filepath used for exporting the file",
|
||||
maxlen=1024,
|
||||
subtype='FILE_PATH',
|
||||
)
|
||||
check_existing: BoolProperty(
|
||||
name="Check Existing",
|
||||
description="Check and warn on overwriting existing files",
|
||||
default=True,
|
||||
options={'HIDDEN'},
|
||||
)
|
||||
|
||||
# subclasses can override with decorator
|
||||
# True == use ext, False == no ext, None == do nothing.
|
||||
check_extension = True
|
||||
|
||||
def invoke(self, context, _event):
|
||||
import os
|
||||
if not self.filepath:
|
||||
blend_filepath = context.blend_data.filepath
|
||||
if not blend_filepath:
|
||||
blend_filepath = data_("untitled")
|
||||
else:
|
||||
blend_filepath = os.path.splitext(blend_filepath)[0]
|
||||
|
||||
self.filepath = blend_filepath + self.filename_ext
|
||||
|
||||
context.window_manager.fileselect_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def check(self, _context):
|
||||
import os
|
||||
change_ext = False
|
||||
change_axis = _check_axis_conversion(self)
|
||||
|
||||
check_extension = self.check_extension
|
||||
|
||||
if check_extension is not None:
|
||||
filepath = self.filepath
|
||||
if os.path.basename(filepath):
|
||||
if check_extension:
|
||||
filepath = bpy.path.ensure_ext(
|
||||
os.path.splitext(filepath)[0],
|
||||
self.filename_ext,
|
||||
)
|
||||
if filepath != self.filepath:
|
||||
self.filepath = filepath
|
||||
change_ext = True
|
||||
|
||||
return (change_ext or change_axis)
|
||||
|
||||
|
||||
class ImportHelper:
|
||||
filepath: StringProperty(
|
||||
name="File Path",
|
||||
description="Filepath used for importing the file",
|
||||
maxlen=1024,
|
||||
subtype='FILE_PATH',
|
||||
)
|
||||
|
||||
def invoke(self, context, _event):
|
||||
context.window_manager.fileselect_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def check(self, _context):
|
||||
return _check_axis_conversion(self)
|
||||
|
||||
|
||||
def orientation_helper(axis_forward='Y', axis_up='Z'):
|
||||
"""
|
||||
A decorator for import/export classes, generating properties needed by the axis conversion system and IO helpers,
|
||||
with specified default values (axes).
|
||||
"""
|
||||
def wrapper(cls):
|
||||
# Without that, we may end up adding those fields to some **parent** class' __annotations__ property
|
||||
# (like the ImportHelper or ExportHelper ones)! See #58772.
|
||||
if "__annotations__" not in cls.__dict__:
|
||||
setattr(cls, "__annotations__", {})
|
||||
|
||||
def _update_axis_forward(self, _context):
|
||||
if self.axis_forward[-1] == self.axis_up[-1]:
|
||||
self.axis_up = (
|
||||
self.axis_up[0:-1] +
|
||||
'XYZ'[('XYZ'.index(self.axis_up[-1]) + 1) % 3]
|
||||
)
|
||||
|
||||
cls.__annotations__['axis_forward'] = EnumProperty(
|
||||
name="Forward",
|
||||
items=(
|
||||
('X', "X Forward", ""),
|
||||
('Y', "Y Forward", ""),
|
||||
('Z', "Z Forward", ""),
|
||||
('-X', "-X Forward", ""),
|
||||
('-Y', "-Y Forward", ""),
|
||||
('-Z', "-Z Forward", ""),
|
||||
),
|
||||
default=axis_forward,
|
||||
update=_update_axis_forward,
|
||||
)
|
||||
|
||||
def _update_axis_up(self, _context):
|
||||
if self.axis_up[-1] == self.axis_forward[-1]:
|
||||
self.axis_forward = (
|
||||
self.axis_forward[0:-1] +
|
||||
'XYZ'[('XYZ'.index(self.axis_forward[-1]) + 1) % 3]
|
||||
)
|
||||
|
||||
cls.__annotations__['axis_up'] = EnumProperty(
|
||||
name="Up",
|
||||
items=(
|
||||
('X', "X Up", ""),
|
||||
('Y', "Y Up", ""),
|
||||
('Z', "Z Up", ""),
|
||||
('-X', "-X Up", ""),
|
||||
('-Y', "-Y Up", ""),
|
||||
('-Z', "-Z Up", ""),
|
||||
),
|
||||
default=axis_up,
|
||||
update=_update_axis_up,
|
||||
)
|
||||
|
||||
return cls
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# Axis conversion function, not pretty LUT
|
||||
# use lookup table to convert between any axis
|
||||
_axis_convert_matrix = (
|
||||
((-1.0, 0.0, 0.0), (0.0, -1.0, 0.0), (0.0, 0.0, 1.0)),
|
||||
((-1.0, 0.0, 0.0), (0.0, 0.0, -1.0), (0.0, -1.0, 0.0)),
|
||||
((-1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 1.0, 0.0)),
|
||||
((-1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, -1.0)),
|
||||
((0.0, -1.0, 0.0), (-1.0, 0.0, 0.0), (0.0, 0.0, -1.0)),
|
||||
((0.0, 0.0, 1.0), (-1.0, 0.0, 0.0), (0.0, -1.0, 0.0)),
|
||||
((0.0, 0.0, -1.0), (-1.0, 0.0, 0.0), (0.0, 1.0, 0.0)),
|
||||
((0.0, 1.0, 0.0), (-1.0, 0.0, 0.0), (0.0, 0.0, 1.0)),
|
||||
((0.0, -1.0, 0.0), (0.0, 0.0, 1.0), (-1.0, 0.0, 0.0)),
|
||||
((0.0, 0.0, -1.0), (0.0, -1.0, 0.0), (-1.0, 0.0, 0.0)),
|
||||
((0.0, 0.0, 1.0), (0.0, 1.0, 0.0), (-1.0, 0.0, 0.0)),
|
||||
((0.0, 1.0, 0.0), (0.0, 0.0, -1.0), (-1.0, 0.0, 0.0)),
|
||||
((0.0, -1.0, 0.0), (0.0, 0.0, -1.0), (1.0, 0.0, 0.0)),
|
||||
((0.0, 0.0, 1.0), (0.0, -1.0, 0.0), (1.0, 0.0, 0.0)),
|
||||
((0.0, 0.0, -1.0), (0.0, 1.0, 0.0), (1.0, 0.0, 0.0)),
|
||||
((0.0, 1.0, 0.0), (0.0, 0.0, 1.0), (1.0, 0.0, 0.0)),
|
||||
((0.0, -1.0, 0.0), (1.0, 0.0, 0.0), (0.0, 0.0, 1.0)),
|
||||
((0.0, 0.0, -1.0), (1.0, 0.0, 0.0), (0.0, -1.0, 0.0)),
|
||||
((0.0, 0.0, 1.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0)),
|
||||
((0.0, 1.0, 0.0), (1.0, 0.0, 0.0), (0.0, 0.0, -1.0)),
|
||||
((1.0, 0.0, 0.0), (0.0, -1.0, 0.0), (0.0, 0.0, -1.0)),
|
||||
((1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, -1.0, 0.0)),
|
||||
((1.0, 0.0, 0.0), (0.0, 0.0, -1.0), (0.0, 1.0, 0.0)),
|
||||
)
|
||||
|
||||
# store args as a single int
|
||||
# (X Y Z -X -Y -Z) --> (0, 1, 2, 3, 4, 5)
|
||||
# each value is ((src_forward, src_up), (dst_forward, dst_up))
|
||||
# where all 4 values are or'd into a single value...
|
||||
# (i1<<0 | i1<<3 | i1<<6 | i1<<9)
|
||||
_axis_convert_lut = (
|
||||
{0x8C8, 0x4D0, 0x2E0, 0xAE8, 0x701, 0x511, 0x119, 0xB29, 0x682, 0x88A,
|
||||
0x09A, 0x2A2, 0x80B, 0x413, 0x223, 0xA2B, 0x644, 0x454, 0x05C, 0xA6C,
|
||||
0x745, 0x94D, 0x15D, 0x365},
|
||||
{0xAC8, 0x8D0, 0x4E0, 0x2E8, 0x741, 0x951, 0x159, 0x369, 0x702, 0xB0A,
|
||||
0x11A, 0x522, 0xA0B, 0x813, 0x423, 0x22B, 0x684, 0x894, 0x09C, 0x2AC,
|
||||
0x645, 0xA4D, 0x05D, 0x465},
|
||||
{0x4C8, 0x2D0, 0xAE0, 0x8E8, 0x681, 0x291, 0x099, 0x8A9, 0x642, 0x44A,
|
||||
0x05A, 0xA62, 0x40B, 0x213, 0xA23, 0x82B, 0x744, 0x354, 0x15C, 0x96C,
|
||||
0x705, 0x50D, 0x11D, 0xB25},
|
||||
{0x2C8, 0xAD0, 0x8E0, 0x4E8, 0x641, 0xA51, 0x059, 0x469, 0x742, 0x34A,
|
||||
0x15A, 0x962, 0x20B, 0xA13, 0x823, 0x42B, 0x704, 0xB14, 0x11C, 0x52C,
|
||||
0x685, 0x28D, 0x09D, 0x8A5},
|
||||
{0x708, 0xB10, 0x120, 0x528, 0x8C1, 0xAD1, 0x2D9, 0x4E9, 0x942, 0x74A,
|
||||
0x35A, 0x162, 0x64B, 0xA53, 0x063, 0x46B, 0x804, 0xA14, 0x21C, 0x42C,
|
||||
0x885, 0x68D, 0x29D, 0x0A5},
|
||||
{0xB08, 0x110, 0x520, 0x728, 0x941, 0x151, 0x359, 0x769, 0x802, 0xA0A,
|
||||
0x21A, 0x422, 0xA4B, 0x053, 0x463, 0x66B, 0x884, 0x094, 0x29C, 0x6AC,
|
||||
0x8C5, 0xACD, 0x2DD, 0x4E5},
|
||||
{0x508, 0x710, 0xB20, 0x128, 0x881, 0x691, 0x299, 0x0A9, 0x8C2, 0x4CA,
|
||||
0x2DA, 0xAE2, 0x44B, 0x653, 0xA63, 0x06B, 0x944, 0x754, 0x35C, 0x16C,
|
||||
0x805, 0x40D, 0x21D, 0xA25},
|
||||
{0x108, 0x510, 0x720, 0xB28, 0x801, 0x411, 0x219, 0xA29, 0x882, 0x08A,
|
||||
0x29A, 0x6A2, 0x04B, 0x453, 0x663, 0xA6B, 0x8C4, 0x4D4, 0x2DC, 0xAEC,
|
||||
0x945, 0x14D, 0x35D, 0x765},
|
||||
{0x748, 0x350, 0x160, 0x968, 0xAC1, 0x2D1, 0x4D9, 0x8E9, 0xA42, 0x64A,
|
||||
0x45A, 0x062, 0x68B, 0x293, 0x0A3, 0x8AB, 0xA04, 0x214, 0x41C, 0x82C,
|
||||
0xB05, 0x70D, 0x51D, 0x125},
|
||||
{0x948, 0x750, 0x360, 0x168, 0xB01, 0x711, 0x519, 0x129, 0xAC2, 0x8CA,
|
||||
0x4DA, 0x2E2, 0x88B, 0x693, 0x2A3, 0x0AB, 0xA44, 0x654, 0x45C, 0x06C,
|
||||
0xA05, 0x80D, 0x41D, 0x225},
|
||||
{0x348, 0x150, 0x960, 0x768, 0xA41, 0x051, 0x459, 0x669, 0xA02, 0x20A,
|
||||
0x41A, 0x822, 0x28B, 0x093, 0x8A3, 0x6AB, 0xB04, 0x114, 0x51C, 0x72C,
|
||||
0xAC5, 0x2CD, 0x4DD, 0x8E5},
|
||||
{0x148, 0x950, 0x760, 0x368, 0xA01, 0x811, 0x419, 0x229, 0xB02, 0x10A,
|
||||
0x51A, 0x722, 0x08B, 0x893, 0x6A3, 0x2AB, 0xAC4, 0x8D4, 0x4DC, 0x2EC,
|
||||
0xA45, 0x04D, 0x45D, 0x665},
|
||||
{0x688, 0x890, 0x0A0, 0x2A8, 0x4C1, 0x8D1, 0xAD9, 0x2E9, 0x502, 0x70A,
|
||||
0xB1A, 0x122, 0x74B, 0x953, 0x163, 0x36B, 0x404, 0x814, 0xA1C, 0x22C,
|
||||
0x445, 0x64D, 0xA5D, 0x065},
|
||||
{0x888, 0x090, 0x2A0, 0x6A8, 0x501, 0x111, 0xB19, 0x729, 0x402, 0x80A,
|
||||
0xA1A, 0x222, 0x94B, 0x153, 0x363, 0x76B, 0x444, 0x054, 0xA5C, 0x66C,
|
||||
0x4C5, 0x8CD, 0xADD, 0x2E5},
|
||||
{0x288, 0x690, 0x8A0, 0x0A8, 0x441, 0x651, 0xA59, 0x069, 0x4C2, 0x2CA,
|
||||
0xADA, 0x8E2, 0x34B, 0x753, 0x963, 0x16B, 0x504, 0x714, 0xB1C, 0x12C,
|
||||
0x405, 0x20D, 0xA1D, 0x825},
|
||||
{0x088, 0x290, 0x6A0, 0x8A8, 0x401, 0x211, 0xA19, 0x829, 0x442, 0x04A,
|
||||
0xA5A, 0x662, 0x14B, 0x353, 0x763, 0x96B, 0x4C4, 0x2D4, 0xADC, 0x8EC,
|
||||
0x505, 0x10D, 0xB1D, 0x725},
|
||||
{0x648, 0x450, 0x060, 0xA68, 0x2C1, 0x4D1, 0x8D9, 0xAE9, 0x282, 0x68A,
|
||||
0x89A, 0x0A2, 0x70B, 0x513, 0x123, 0xB2B, 0x204, 0x414, 0x81C, 0xA2C,
|
||||
0x345, 0x74D, 0x95D, 0x165},
|
||||
{0xA48, 0x650, 0x460, 0x068, 0x341, 0x751, 0x959, 0x169, 0x2C2, 0xACA,
|
||||
0x8DA, 0x4E2, 0xB0B, 0x713, 0x523, 0x12B, 0x284, 0x694, 0x89C, 0x0AC,
|
||||
0x205, 0xA0D, 0x81D, 0x425},
|
||||
{0x448, 0x050, 0xA60, 0x668, 0x281, 0x091, 0x899, 0x6A9, 0x202, 0x40A,
|
||||
0x81A, 0xA22, 0x50B, 0x113, 0xB23, 0x72B, 0x344, 0x154, 0x95C, 0x76C,
|
||||
0x2C5, 0x4CD, 0x8DD, 0xAE5},
|
||||
{0x048, 0xA50, 0x660, 0x468, 0x201, 0xA11, 0x819, 0x429, 0x342, 0x14A,
|
||||
0x95A, 0x762, 0x10B, 0xB13, 0x723, 0x52B, 0x2C4, 0xAD4, 0x8DC, 0x4EC,
|
||||
0x285, 0x08D, 0x89D, 0x6A5},
|
||||
{0x808, 0xA10, 0x220, 0x428, 0x101, 0xB11, 0x719, 0x529, 0x142, 0x94A,
|
||||
0x75A, 0x362, 0x8CB, 0xAD3, 0x2E3, 0x4EB, 0x044, 0xA54, 0x65C, 0x46C,
|
||||
0x085, 0x88D, 0x69D, 0x2A5},
|
||||
{0xA08, 0x210, 0x420, 0x828, 0x141, 0x351, 0x759, 0x969, 0x042, 0xA4A,
|
||||
0x65A, 0x462, 0xACB, 0x2D3, 0x4E3, 0x8EB, 0x084, 0x294, 0x69C, 0x8AC,
|
||||
0x105, 0xB0D, 0x71D, 0x525},
|
||||
{0x408, 0x810, 0xA20, 0x228, 0x081, 0x891, 0x699, 0x2A9, 0x102, 0x50A,
|
||||
0x71A, 0xB22, 0x4CB, 0x8D3, 0xAE3, 0x2EB, 0x144, 0x954, 0x75C, 0x36C,
|
||||
0x045, 0x44D, 0x65D, 0xA65},
|
||||
)
|
||||
|
||||
_axis_convert_num = {'X': 0, 'Y': 1, 'Z': 2, '-X': 3, '-Y': 4, '-Z': 5}
|
||||
|
||||
|
||||
def axis_conversion(from_forward='Y', from_up='Z', to_forward='Y', to_up='Z'):
|
||||
"""
|
||||
Each argument us an axis in ['X', 'Y', 'Z', '-X', '-Y', '-Z']
|
||||
where the first 2 are a source and the second 2 are the target.
|
||||
"""
|
||||
from mathutils import Matrix
|
||||
from functools import reduce
|
||||
|
||||
if from_forward == to_forward and from_up == to_up:
|
||||
return Matrix().to_3x3()
|
||||
|
||||
if from_forward[-1] == from_up[-1] or to_forward[-1] == to_up[-1]:
|
||||
raise Exception("Invalid axis arguments passed, "
|
||||
"can't use up/forward on the same axis")
|
||||
|
||||
value = reduce(int.__or__, (_axis_convert_num[a] << (i * 3)
|
||||
for i, a in enumerate((from_forward,
|
||||
from_up,
|
||||
to_forward,
|
||||
to_up,
|
||||
))))
|
||||
|
||||
for i, axis_lut in enumerate(_axis_convert_lut):
|
||||
if value in axis_lut:
|
||||
return Matrix(_axis_convert_matrix[i])
|
||||
assert 0
|
||||
|
||||
|
||||
def axis_conversion_ensure(operator, forward_attr, up_attr):
|
||||
"""
|
||||
Function to ensure an operator has valid axis conversion settings, intended
|
||||
to be used from :class:`bpy.types.Operator.check`.
|
||||
|
||||
:arg operator: the operator to access axis attributes from.
|
||||
:type operator: :class:`bpy.types.Operator`
|
||||
:arg forward_attr: attribute storing the forward axis
|
||||
:type forward_attr: string
|
||||
:arg up_attr: attribute storing the up axis
|
||||
:type up_attr: string
|
||||
:return: True if the value was modified.
|
||||
:rtype: boolean
|
||||
"""
|
||||
def validate(axis_forward, axis_up):
|
||||
if axis_forward[-1] == axis_up[-1]:
|
||||
axis_up = axis_up[0:-1] + 'XYZ'[('XYZ'.index(axis_up[-1]) + 1) % 3]
|
||||
|
||||
return axis_forward, axis_up
|
||||
|
||||
axis = getattr(operator, forward_attr), getattr(operator, up_attr)
|
||||
axis_new = validate(*axis)
|
||||
|
||||
if axis != axis_new:
|
||||
setattr(operator, forward_attr, axis_new[0])
|
||||
setattr(operator, up_attr, axis_new[1])
|
||||
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def create_derived_objects(depsgraph, objects):
|
||||
"""
|
||||
This function takes a sequence of objects, returning their instances.
|
||||
|
||||
:arg depsgraph: The evaluated depsgraph.
|
||||
:type depsgraph: :class:`bpy.types.Depsgraph`
|
||||
:arg objects: A sequencer of objects.
|
||||
:type objects: sequence of :class:`bpy.types.Object`
|
||||
:return: A dictionary where each key is an object from `objects`,
|
||||
values are lists of (:class:`bpy.types.Object`, :class:`mathutils.Matrix`) tuples representing instances.
|
||||
:rtype: dict
|
||||
"""
|
||||
result = {}
|
||||
for ob in objects:
|
||||
ob_parent = ob.parent
|
||||
if ob_parent and ob_parent.instance_type in {'VERTS', 'FACES'}:
|
||||
continue
|
||||
result[ob] = [] if ob.is_instancer else [(ob, ob.matrix_world.copy())]
|
||||
|
||||
if result:
|
||||
for dup in depsgraph.object_instances:
|
||||
dup_parent = dup.parent
|
||||
if dup_parent is None:
|
||||
continue
|
||||
dup_parent_original = dup_parent.original
|
||||
if not dup_parent_original.is_instancer:
|
||||
# The instance has already been added (on assignment).
|
||||
continue
|
||||
instance_list = result.get(dup_parent_original)
|
||||
if instance_list is None:
|
||||
continue
|
||||
instance_list.append((dup.instance_object.original, dup.matrix_world.copy()))
|
||||
return result
|
||||
|
||||
|
||||
def unpack_list(list_of_tuples):
|
||||
flat_list = []
|
||||
flat_list_extend = flat_list.extend # a tiny bit faster
|
||||
for t in list_of_tuples:
|
||||
flat_list_extend(t)
|
||||
return flat_list
|
||||
|
||||
|
||||
# same as above except that it adds 0 for triangle faces
|
||||
def unpack_face_list(list_of_tuples):
|
||||
# allocate the entire list
|
||||
flat_ls = [0] * (len(list_of_tuples) * 4)
|
||||
i = 0
|
||||
|
||||
for t in list_of_tuples:
|
||||
if len(t) == 3:
|
||||
if t[2] == 0:
|
||||
t = t[1], t[2], t[0]
|
||||
else: # assume quad
|
||||
if t[3] == 0 or t[2] == 0:
|
||||
t = t[2], t[3], t[0], t[1]
|
||||
|
||||
flat_ls[i:i + len(t)] = t
|
||||
i += 4
|
||||
return flat_ls
|
||||
|
||||
|
||||
path_reference_mode = EnumProperty(
|
||||
name="Path Mode",
|
||||
description="Method used to reference paths",
|
||||
items=(
|
||||
('AUTO', "Auto", "Use relative paths with subdirectories only"),
|
||||
('ABSOLUTE', "Absolute", "Always write absolute paths"),
|
||||
('RELATIVE', "Relative", "Always write relative paths "
|
||||
"(where possible)"),
|
||||
('MATCH', "Match", "Match absolute/relative "
|
||||
"setting with input path"),
|
||||
('STRIP', "Strip Path", "Filename only"),
|
||||
('COPY', "Copy", "Copy the file to the destination path "
|
||||
"(or subdirectory)"),
|
||||
),
|
||||
default='AUTO',
|
||||
)
|
||||
|
||||
|
||||
def path_reference(
|
||||
filepath,
|
||||
base_src,
|
||||
base_dst,
|
||||
mode='AUTO',
|
||||
copy_subdir="",
|
||||
copy_set=None,
|
||||
library=None,
|
||||
):
|
||||
"""
|
||||
Return a filepath relative to a destination directory, for use with
|
||||
exporters.
|
||||
|
||||
:arg filepath: the file path to return,
|
||||
supporting blenders relative '//' prefix.
|
||||
:type filepath: string
|
||||
:arg base_src: the directory the *filepath* is relative too
|
||||
(normally the blend file).
|
||||
:type base_src: string
|
||||
:arg base_dst: the directory the *filepath* will be referenced from
|
||||
(normally the export path).
|
||||
:type base_dst: string
|
||||
:arg mode: the method used get the path in
|
||||
['AUTO', 'ABSOLUTE', 'RELATIVE', 'MATCH', 'STRIP', 'COPY']
|
||||
:type mode: string
|
||||
:arg copy_subdir: the subdirectory of *base_dst* to use when mode='COPY'.
|
||||
:type copy_subdir: string
|
||||
:arg copy_set: collect from/to pairs when mode='COPY',
|
||||
pass to *path_reference_copy* when exporting is done.
|
||||
:type copy_set: set
|
||||
:arg library: The library this path is relative to.
|
||||
:type library: :class:`bpy.types.Library` or None
|
||||
:return: the new filepath.
|
||||
:rtype: string
|
||||
"""
|
||||
import os
|
||||
is_relative = filepath.startswith("//")
|
||||
filepath_abs = bpy.path.abspath(filepath, start=base_src, library=library)
|
||||
filepath_abs = os.path.normpath(filepath_abs)
|
||||
|
||||
if mode in {'ABSOLUTE', 'RELATIVE', 'STRIP'}:
|
||||
pass
|
||||
elif mode == 'MATCH':
|
||||
mode = 'RELATIVE' if is_relative else 'ABSOLUTE'
|
||||
elif mode == 'AUTO':
|
||||
mode = ('RELATIVE'
|
||||
if bpy.path.is_subdir(filepath_abs, base_dst)
|
||||
else 'ABSOLUTE')
|
||||
elif mode == 'COPY':
|
||||
subdir_abs = os.path.normpath(base_dst)
|
||||
if copy_subdir:
|
||||
subdir_abs = os.path.join(subdir_abs, copy_subdir)
|
||||
|
||||
filepath_cpy = os.path.join(subdir_abs, os.path.basename(filepath_abs))
|
||||
|
||||
copy_set.add((filepath_abs, filepath_cpy))
|
||||
|
||||
filepath_abs = filepath_cpy
|
||||
mode = 'RELATIVE'
|
||||
else:
|
||||
raise Exception("invalid mode given %r" % mode)
|
||||
|
||||
if mode == 'ABSOLUTE':
|
||||
return filepath_abs
|
||||
elif mode == 'RELATIVE':
|
||||
# can't always find the relative path
|
||||
# (between drive letters on windows)
|
||||
try:
|
||||
return os.path.relpath(filepath_abs, base_dst)
|
||||
except ValueError:
|
||||
return filepath_abs
|
||||
elif mode == 'STRIP':
|
||||
return os.path.basename(filepath_abs)
|
||||
|
||||
|
||||
def path_reference_copy(copy_set, report=print):
|
||||
"""
|
||||
Execute copying files of path_reference
|
||||
|
||||
:arg copy_set: set of (from, to) pairs to copy.
|
||||
:type copy_set: set
|
||||
:arg report: function used for reporting warnings, takes a string argument.
|
||||
:type report: function
|
||||
"""
|
||||
if not copy_set:
|
||||
return
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
for file_src, file_dst in copy_set:
|
||||
if not os.path.exists(file_src):
|
||||
report("missing %r, not copying" % file_src)
|
||||
elif os.path.exists(file_dst) and os.path.samefile(file_src, file_dst):
|
||||
pass
|
||||
else:
|
||||
dir_to = os.path.dirname(file_dst)
|
||||
|
||||
try:
|
||||
os.makedirs(dir_to, exist_ok=True)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
try:
|
||||
shutil.copy(file_src, file_dst)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def unique_name(key, name, name_dict, name_max=-1, clean_func=None, sep="."):
|
||||
"""
|
||||
Helper function for storing unique names which may have special characters
|
||||
stripped and restricted to a maximum length.
|
||||
|
||||
:arg key: unique item this name belongs to, name_dict[key] will be reused
|
||||
when available.
|
||||
This can be the object, mesh, material, etc instance itself.
|
||||
:type key: any hashable object associated with the *name*.
|
||||
:arg name: The name used to create a unique value in *name_dict*.
|
||||
:type name: string
|
||||
:arg name_dict: This is used to cache namespace to ensure no collisions
|
||||
occur, this should be an empty dict initially and only modified by this
|
||||
function.
|
||||
:type name_dict: dict
|
||||
:arg clean_func: Function to call on *name* before creating a unique value.
|
||||
:type clean_func: function
|
||||
:arg sep: Separator to use when between the name and a number when a
|
||||
duplicate name is found.
|
||||
:type sep: string
|
||||
"""
|
||||
name_new = name_dict.get(key)
|
||||
if name_new is None:
|
||||
count = 1
|
||||
name_dict_values = name_dict.values()
|
||||
name_new = name_new_orig = (
|
||||
name if clean_func is None
|
||||
else clean_func(name)
|
||||
)
|
||||
|
||||
if name_max == -1:
|
||||
while name_new in name_dict_values:
|
||||
name_new = "%s%s%03d" % (
|
||||
name_new_orig,
|
||||
sep,
|
||||
count,
|
||||
)
|
||||
count += 1
|
||||
else:
|
||||
name_new = name_new[:name_max]
|
||||
while name_new in name_dict_values:
|
||||
count_str = "%03d" % count
|
||||
name_new = "%.*s%s%s" % (
|
||||
name_max - (len(count_str) + 1),
|
||||
name_new_orig,
|
||||
sep,
|
||||
count_str,
|
||||
)
|
||||
count += 1
|
||||
|
||||
name_dict[key] = name_new
|
||||
|
||||
return name_new
|
||||
127
scripts/modules/bpy_extras/keyconfig_utils.py
Normal file
127
scripts/modules/bpy_extras/keyconfig_utils.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Add-on helpers to properly (un)register their own keymaps.
|
||||
|
||||
def addon_keymap_register(keymap_data):
|
||||
"""
|
||||
Register a set of keymaps for addons using a list of keymaps.
|
||||
|
||||
See 'blender_defaults.py' for examples of the format this takes.
|
||||
"""
|
||||
import bpy
|
||||
wm = bpy.context.window_manager
|
||||
|
||||
from bl_keymap_utils.io import keymap_init_from_data
|
||||
|
||||
kconf = wm.keyconfigs.addon
|
||||
if not kconf:
|
||||
return # happens in background mode...
|
||||
for km_name, km_args, km_content in keymap_data:
|
||||
km_space_type = km_args["space_type"]
|
||||
km_region_type = km_args["region_type"]
|
||||
km_modal = km_args.get("modal", False)
|
||||
kmap = next(iter(
|
||||
k for k in kconf.keymaps
|
||||
if k.name == km_name and
|
||||
k.region_type == km_region_type and
|
||||
k.space_type == km_space_type and
|
||||
k.is_modal == km_modal
|
||||
), None)
|
||||
if kmap is None:
|
||||
kmap = kconf.keymaps.new(km_name, **km_args)
|
||||
keymap_init_from_data(kmap, km_content["items"], is_modal=km_modal)
|
||||
|
||||
|
||||
def addon_keymap_unregister(keymap_data):
|
||||
"""
|
||||
Unregister a set of keymaps for addons.
|
||||
"""
|
||||
# NOTE: We must also clean up user keyconfig, else, if user has customized one of add-on's shortcut, this
|
||||
# customization remains in memory, and comes back when re-enabling the addon, causing a segfault... :/
|
||||
import bpy
|
||||
wm = bpy.context.window_manager
|
||||
|
||||
kconfs = wm.keyconfigs
|
||||
for kconf in (kconfs.user, kconfs.addon):
|
||||
for km_name, km_args, km_content in keymap_data:
|
||||
km_space_type = km_args["space_type"]
|
||||
km_region_type = km_args["region_type"]
|
||||
km_modal = km_args.get("modal", False)
|
||||
kmaps = (
|
||||
k for k in kconf.keymaps
|
||||
if k.name == km_name and
|
||||
k.region_type == km_region_type and
|
||||
k.space_type == km_space_type and
|
||||
k.is_modal == km_modal
|
||||
)
|
||||
for kmap in kmaps:
|
||||
for kmi_idname, _, _ in km_content["items"]:
|
||||
for kmi in kmap.keymap_items:
|
||||
if kmi.idname == kmi_idname:
|
||||
kmap.keymap_items.remove(kmi)
|
||||
# NOTE: We won't remove addons keymaps themselves, other addons might also use them!
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Utility Functions
|
||||
|
||||
def keyconfig_test(kc):
|
||||
|
||||
def testEntry(kc, entry, src=None, parent=None):
|
||||
result = False
|
||||
|
||||
idname, spaceid, regionid, children = entry
|
||||
|
||||
km = kc.keymaps.find(idname, space_type=spaceid, region_type=regionid)
|
||||
|
||||
if km:
|
||||
km = km.active()
|
||||
is_modal = km.is_modal
|
||||
|
||||
if src:
|
||||
for item in km.keymap_items:
|
||||
if src.compare(item):
|
||||
print("===========")
|
||||
print(parent.name)
|
||||
print(_kmistr(src, is_modal).strip())
|
||||
print(km.name)
|
||||
print(_kmistr(item, is_modal).strip())
|
||||
result = True
|
||||
|
||||
for child in children:
|
||||
if testEntry(kc, child, src, parent):
|
||||
result = True
|
||||
else:
|
||||
for i in range(len(km.keymap_items)):
|
||||
src = km.keymap_items[i]
|
||||
|
||||
for child in children:
|
||||
if testEntry(kc, child, src, km):
|
||||
result = True
|
||||
|
||||
for j in range(len(km.keymap_items) - i - 1):
|
||||
item = km.keymap_items[j + i + 1]
|
||||
if src.compare(item):
|
||||
print("===========")
|
||||
print(km.name)
|
||||
print(_kmistr(src, is_modal).strip())
|
||||
print(_kmistr(item, is_modal).strip())
|
||||
result = True
|
||||
|
||||
for child in children:
|
||||
if testEntry(kc, child):
|
||||
result = True
|
||||
|
||||
return result
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Function body
|
||||
|
||||
from bl_keymap_utils import keymap_hierarchy
|
||||
result = False
|
||||
for entry in keymap_hierarchy.generate():
|
||||
if testEntry(kc, entry):
|
||||
result = True
|
||||
return result
|
||||
463
scripts/modules/bpy_extras/mesh_utils.py
Normal file
463
scripts/modules/bpy_extras/mesh_utils.py
Normal file
@@ -0,0 +1,463 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"mesh_linked_uv_islands",
|
||||
"mesh_linked_triangles",
|
||||
"edge_face_count_dict",
|
||||
"edge_face_count",
|
||||
"edge_loops_from_edges",
|
||||
"ngon_tessellate",
|
||||
"triangle_random_points",
|
||||
)
|
||||
|
||||
|
||||
def mesh_linked_uv_islands(mesh):
|
||||
"""
|
||||
Returns lists of polygon indices connected by UV islands.
|
||||
|
||||
:arg mesh: the mesh used to group with.
|
||||
:type mesh: :class:`bpy.types.Mesh`
|
||||
:return: list of lists containing polygon indices
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
if mesh.polygons and not mesh.uv_layers.active.data:
|
||||
# Currently, when in edit mode, UV Layer data will always be empty
|
||||
# when accessed though RNA. This may change in the future.
|
||||
raise ValueError(
|
||||
"UV Layers are not currently available from python in Edit Mode. "
|
||||
"Use bmesh and bpy_extras.bmesh_utils.bmesh_linked_uv_islands instead."
|
||||
)
|
||||
|
||||
uv_loops = [luv.uv[:] for luv in mesh.uv_layers.active.data]
|
||||
poly_loops = [poly.loop_indices for poly in mesh.polygons]
|
||||
luv_hash = {}
|
||||
luv_hash_get = luv_hash.get
|
||||
luv_hash_ls = [None] * len(uv_loops)
|
||||
for pi, poly_indices in enumerate(poly_loops):
|
||||
for li in poly_indices:
|
||||
uv = uv_loops[li]
|
||||
uv_hub = luv_hash_get(uv)
|
||||
if uv_hub is None:
|
||||
uv_hub = luv_hash[uv] = [pi]
|
||||
else:
|
||||
uv_hub.append(pi)
|
||||
luv_hash_ls[li] = uv_hub
|
||||
|
||||
poly_islands = []
|
||||
|
||||
# 0 = none, 1 = added, 2 = searched
|
||||
poly_tag = [0] * len(poly_loops)
|
||||
|
||||
while True:
|
||||
poly_index = -1
|
||||
for i in range(len(poly_loops)):
|
||||
if poly_tag[i] == 0:
|
||||
poly_index = i
|
||||
break
|
||||
|
||||
if poly_index != -1:
|
||||
island = [poly_index]
|
||||
poly_tag[poly_index] = 1
|
||||
poly_islands.append(island)
|
||||
else:
|
||||
break # we're done
|
||||
|
||||
added = True
|
||||
while added:
|
||||
added = False
|
||||
for poly_index in island[:]:
|
||||
if poly_tag[poly_index] == 1:
|
||||
for li in poly_loops[poly_index]:
|
||||
for poly_index_shared in luv_hash_ls[li]:
|
||||
if poly_tag[poly_index_shared] == 0:
|
||||
added = True
|
||||
poly_tag[poly_index_shared] = 1
|
||||
island.append(poly_index_shared)
|
||||
poly_tag[poly_index] = 2
|
||||
|
||||
return poly_islands
|
||||
|
||||
|
||||
def mesh_linked_triangles(mesh):
|
||||
"""
|
||||
Splits the mesh into connected triangles, use this for separating cubes from
|
||||
other mesh elements within 1 mesh datablock.
|
||||
|
||||
:arg mesh: the mesh used to group with.
|
||||
:type mesh: :class:`bpy.types.Mesh`
|
||||
:return: lists of lists containing triangles.
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
# Build vert face connectivity
|
||||
vert_tris = [[] for i in range(len(mesh.vertices))]
|
||||
for t in mesh.loop_triangles:
|
||||
for v in t.vertices:
|
||||
vert_tris[v].append(t)
|
||||
|
||||
# sort triangles into connectivity groups
|
||||
tri_groups = [[t] for t in mesh.loop_triangles]
|
||||
# map old, new tri location
|
||||
tri_mapping = list(range(len(mesh.loop_triangles)))
|
||||
|
||||
# Now clump triangles iteratively
|
||||
ok = True
|
||||
while ok:
|
||||
ok = False
|
||||
|
||||
for t in mesh.loop_triangles:
|
||||
mapped_index = tri_mapping[t.index]
|
||||
mapped_group = tri_groups[mapped_index]
|
||||
|
||||
for v in t.vertices:
|
||||
for nxt_t in vert_tris[v]:
|
||||
if nxt_t != t:
|
||||
nxt_mapped_index = tri_mapping[nxt_t.index]
|
||||
|
||||
# We are not a part of the same group
|
||||
if mapped_index != nxt_mapped_index:
|
||||
ok = True
|
||||
|
||||
# Assign mapping to this group so they
|
||||
# all map to this group
|
||||
for grp_t in tri_groups[nxt_mapped_index]:
|
||||
tri_mapping[grp_t.index] = mapped_index
|
||||
|
||||
# Move triangles into this group
|
||||
mapped_group.extend(tri_groups[nxt_mapped_index])
|
||||
|
||||
# remove reference to the list
|
||||
tri_groups[nxt_mapped_index] = None
|
||||
|
||||
# return all tri groups that are not null
|
||||
# this is all the triangles that are connected in their own lists.
|
||||
return [tg for tg in tri_groups if tg]
|
||||
|
||||
|
||||
def edge_face_count_dict(mesh):
|
||||
"""
|
||||
:return: dict of edge keys with their value set to the number of
|
||||
faces using each edge.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
face_edge_count = {}
|
||||
loops = mesh.loops
|
||||
edges = mesh.edges
|
||||
for poly in mesh.polygons:
|
||||
for i in poly.loop_indices:
|
||||
key = edges[loops[i].edge_index].key
|
||||
try:
|
||||
face_edge_count[key] += 1
|
||||
except:
|
||||
face_edge_count[key] = 1
|
||||
|
||||
return face_edge_count
|
||||
|
||||
|
||||
def edge_face_count(mesh):
|
||||
"""
|
||||
:return: list face users for each item in mesh.edges.
|
||||
:rtype: list
|
||||
"""
|
||||
edge_face_count = edge_face_count_dict(mesh)
|
||||
get = dict.get
|
||||
return [get(edge_face_count, ed.key, 0) for ed in mesh.edges]
|
||||
|
||||
|
||||
def edge_loops_from_edges(mesh, edges=None):
|
||||
"""
|
||||
Edge loops defined by edges
|
||||
|
||||
Takes me.edges or a list of edges and returns the edge loops
|
||||
|
||||
return a list of vertex indices.
|
||||
[ [1, 6, 7, 2], ...]
|
||||
|
||||
closed loops have matching start and end values.
|
||||
"""
|
||||
line_polys = []
|
||||
|
||||
# Get edges not used by a face
|
||||
if edges is None:
|
||||
edges = mesh.edges
|
||||
|
||||
if not hasattr(edges, "pop"):
|
||||
edges = edges[:]
|
||||
|
||||
while edges:
|
||||
current_edge = edges.pop()
|
||||
vert_end, vert_start = current_edge.vertices[:]
|
||||
line_poly = [vert_start, vert_end]
|
||||
|
||||
ok = True
|
||||
while ok:
|
||||
ok = False
|
||||
# for i, ed in enumerate(edges):
|
||||
i = len(edges)
|
||||
while i:
|
||||
i -= 1
|
||||
ed = edges[i]
|
||||
v1, v2 = ed.vertices
|
||||
if v1 == vert_end:
|
||||
line_poly.append(v2)
|
||||
vert_end = line_poly[-1]
|
||||
ok = 1
|
||||
del edges[i]
|
||||
# break
|
||||
elif v2 == vert_end:
|
||||
line_poly.append(v1)
|
||||
vert_end = line_poly[-1]
|
||||
ok = 1
|
||||
del edges[i]
|
||||
# break
|
||||
elif v1 == vert_start:
|
||||
line_poly.insert(0, v2)
|
||||
vert_start = line_poly[0]
|
||||
ok = 1
|
||||
del edges[i]
|
||||
# break
|
||||
elif v2 == vert_start:
|
||||
line_poly.insert(0, v1)
|
||||
vert_start = line_poly[0]
|
||||
ok = 1
|
||||
del edges[i]
|
||||
# break
|
||||
line_polys.append(line_poly)
|
||||
|
||||
return line_polys
|
||||
|
||||
|
||||
def ngon_tessellate(from_data, indices, fix_loops=True, debug_print=True):
|
||||
"""
|
||||
Takes a polyline of indices (ngon) and returns a list of face
|
||||
index lists. Designed to be used for importers that need indices for an
|
||||
ngon to create from existing verts.
|
||||
|
||||
:arg from_data: either a mesh, or a list/tuple of vectors.
|
||||
:type from_data: list or :class:`bpy.types.Mesh`
|
||||
:arg indices: a list of indices to use this list
|
||||
is the ordered closed polyline
|
||||
to fill, and can be a subset of the data given.
|
||||
:type indices: list
|
||||
:arg fix_loops: If this is enabled polylines
|
||||
that use loops to make multiple
|
||||
polylines are delt with correctly.
|
||||
:type fix_loops: bool
|
||||
"""
|
||||
|
||||
from mathutils.geometry import tessellate_polygon
|
||||
from mathutils import Vector
|
||||
vector_to_tuple = Vector.to_tuple
|
||||
|
||||
if not indices:
|
||||
return []
|
||||
|
||||
def mlen(co):
|
||||
# Manhatten length of a vector, faster then length.
|
||||
return abs(co[0]) + abs(co[1]) + abs(co[2])
|
||||
|
||||
def vert_from_vector_with_extra_data(v, i):
|
||||
# Calculate data per-vector, for reuse.
|
||||
return v, vector_to_tuple(v, 6), i, mlen(v)
|
||||
|
||||
def ed_key_mlen(v1, v2):
|
||||
if v1[3] > v2[3]:
|
||||
return v2[1], v1[1]
|
||||
else:
|
||||
return v1[1], v2[1]
|
||||
|
||||
if not fix_loops:
|
||||
# Normal single concave loop filling.
|
||||
|
||||
if type(from_data) in {tuple, list}:
|
||||
verts = [Vector(from_data[i]) for ii, i in enumerate(indices)]
|
||||
else:
|
||||
verts = [from_data.vertices[i].co for ii, i in enumerate(indices)]
|
||||
|
||||
# same as reversed(range(1, len(verts))):
|
||||
for i in range(len(verts) - 1, 0, -1):
|
||||
if verts[i][1] == verts[i - 1][0]:
|
||||
verts.pop(i - 1)
|
||||
|
||||
fill = tessellate_polygon([verts])
|
||||
|
||||
else:
|
||||
# Separate this loop into multiple loops be finding edges that are
|
||||
# used twice. This is used by Light-Wave LWO files a lot.
|
||||
|
||||
if type(from_data) in {tuple, list}:
|
||||
verts = [
|
||||
vert_from_vector_with_extra_data(Vector(from_data[i]), ii)
|
||||
for ii, i in enumerate(indices)
|
||||
]
|
||||
else:
|
||||
verts = [
|
||||
vert_from_vector_with_extra_data(from_data.vertices[i].co, ii)
|
||||
for ii, i in enumerate(indices)
|
||||
]
|
||||
|
||||
edges = [(i, i - 1) for i in range(len(verts))]
|
||||
if edges:
|
||||
edges[0] = (0, len(verts) - 1)
|
||||
|
||||
if not verts:
|
||||
return []
|
||||
|
||||
edges_used = set()
|
||||
edges_doubles = set()
|
||||
# We need to check if any edges are used twice location based.
|
||||
for ed in edges:
|
||||
edkey = ed_key_mlen(verts[ed[0]], verts[ed[1]])
|
||||
if edkey in edges_used:
|
||||
edges_doubles.add(edkey)
|
||||
else:
|
||||
edges_used.add(edkey)
|
||||
|
||||
# Store a list of unconnected loop segments split by double edges.
|
||||
# will join later
|
||||
loop_segments = []
|
||||
|
||||
v_prev = verts[0]
|
||||
context_loop = [v_prev]
|
||||
loop_segments = [context_loop]
|
||||
|
||||
for v in verts:
|
||||
if v != v_prev:
|
||||
# Are we crossing an edge we removed?
|
||||
if ed_key_mlen(v, v_prev) in edges_doubles:
|
||||
context_loop = [v]
|
||||
loop_segments.append(context_loop)
|
||||
else:
|
||||
if context_loop and context_loop[-1][1] == v[1]:
|
||||
pass
|
||||
else:
|
||||
context_loop.append(v)
|
||||
|
||||
v_prev = v
|
||||
# Now join loop segments
|
||||
|
||||
def join_seg(s1, s2):
|
||||
if s2[-1][1] == s1[0][1]:
|
||||
s1, s2 = s2, s1
|
||||
elif s1[-1][1] == s2[0][1]:
|
||||
pass
|
||||
else:
|
||||
return False
|
||||
|
||||
# If were still here s1 and s2 are 2 segments in the same poly-line.
|
||||
s1.pop() # remove the last vert from s1
|
||||
s1.extend(s2) # add segment 2 to segment 1
|
||||
|
||||
if s1[0][1] == s1[-1][1]: # remove endpoints double
|
||||
s1.pop()
|
||||
|
||||
del s2[:] # Empty this segment s2 so we don't use it again.
|
||||
return True
|
||||
|
||||
joining_segments = True
|
||||
while joining_segments:
|
||||
joining_segments = False
|
||||
segcount = len(loop_segments)
|
||||
|
||||
for j in range(segcount - 1, -1, -1): # reversed(range(segcount)):
|
||||
seg_j = loop_segments[j]
|
||||
if seg_j:
|
||||
for k in range(j - 1, -1, -1): # reversed(range(j)):
|
||||
if not seg_j:
|
||||
break
|
||||
seg_k = loop_segments[k]
|
||||
|
||||
if seg_k and join_seg(seg_j, seg_k):
|
||||
joining_segments = True
|
||||
|
||||
loop_list = loop_segments
|
||||
|
||||
for verts in loop_list:
|
||||
while verts and verts[0][1] == verts[-1][1]:
|
||||
verts.pop()
|
||||
|
||||
loop_list = [verts for verts in loop_list if len(verts) > 2]
|
||||
# DONE DEALING WITH LOOP FIXING
|
||||
|
||||
# vert mapping
|
||||
vert_map = [None] * len(indices)
|
||||
ii = 0
|
||||
for verts in loop_list:
|
||||
if len(verts) > 2:
|
||||
for i, vert in enumerate(verts):
|
||||
vert_map[i + ii] = vert[2]
|
||||
ii += len(verts)
|
||||
|
||||
fill = tessellate_polygon([[v[0] for v in loop] for loop in loop_list])
|
||||
# draw_loops(loop_list)
|
||||
#raise Exception("done loop")
|
||||
# map to original indices
|
||||
fill = [[vert_map[i] for i in f] for f in fill]
|
||||
|
||||
if not fill:
|
||||
if debug_print:
|
||||
print('Warning Cannot scanfill, fallback on a triangle fan.')
|
||||
fill = [[0, i - 1, i] for i in range(2, len(indices))]
|
||||
else:
|
||||
# Use real scan-fill.
|
||||
# See if its flipped the wrong way.
|
||||
flip = None
|
||||
for fi in fill:
|
||||
if flip is not None:
|
||||
break
|
||||
for i, vi in enumerate(fi):
|
||||
if vi == 0 and fi[i - 1] == 1:
|
||||
flip = False
|
||||
break
|
||||
elif vi == 1 and fi[i - 1] == 0:
|
||||
flip = True
|
||||
break
|
||||
|
||||
if not flip:
|
||||
for i, fi in enumerate(fill):
|
||||
fill[i] = tuple([ii for ii in reversed(fi)])
|
||||
|
||||
return fill
|
||||
|
||||
|
||||
def triangle_random_points(num_points, loop_triangles):
|
||||
"""
|
||||
Generates a list of random points over mesh loop triangles.
|
||||
|
||||
:arg num_points: the number of random points to generate on each triangle.
|
||||
:type int:
|
||||
:arg loop_triangles: list of the triangles to generate points on.
|
||||
:type loop_triangles: :class:`bpy.types.MeshLoopTriangle`, sequence
|
||||
:return: list of random points over all triangles.
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
from random import random
|
||||
|
||||
# For each triangle, generate the required number of random points
|
||||
sampled_points = [None] * (num_points * len(loop_triangles))
|
||||
for i, lt in enumerate(loop_triangles):
|
||||
# Get triangle vertex coordinates
|
||||
verts = lt.id_data.vertices
|
||||
ltv = lt.vertices[:]
|
||||
tv = (verts[ltv[0]].co, verts[ltv[1]].co, verts[ltv[2]].co)
|
||||
|
||||
for k in range(num_points):
|
||||
u1 = random()
|
||||
u2 = random()
|
||||
u_tot = u1 + u2
|
||||
|
||||
if u_tot > 1:
|
||||
u1 = 1.0 - u1
|
||||
u2 = 1.0 - u2
|
||||
|
||||
side1 = tv[1] - tv[0]
|
||||
side2 = tv[2] - tv[0]
|
||||
|
||||
p = tv[0] + u1 * side1 + u2 * side2
|
||||
|
||||
sampled_points[num_points * i + k] = p
|
||||
|
||||
return sampled_points
|
||||
814
scripts/modules/bpy_extras/node_shader_utils.py
Normal file
814
scripts/modules/bpy_extras/node_shader_utils.py
Normal file
@@ -0,0 +1,814 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
from mathutils import Color, Vector
|
||||
|
||||
__all__ = (
|
||||
"PrincipledBSDFWrapper",
|
||||
)
|
||||
|
||||
|
||||
def _set_check(func):
|
||||
from functools import wraps
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if self.is_readonly:
|
||||
assert not "Trying to set value to read-only shader!"
|
||||
return
|
||||
return func(self, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def rgb_to_rgba(rgb):
|
||||
return list(rgb) + [1.0]
|
||||
|
||||
|
||||
def rgba_to_rgb(rgba):
|
||||
return Color((rgba[0], rgba[1], rgba[2]))
|
||||
|
||||
|
||||
# All clamping value shall follow Blender's defined min/max (check relevant node definition .c file).
|
||||
def values_clamp(val, minv, maxv):
|
||||
if hasattr(val, "__iter__"):
|
||||
return tuple(max(minv, min(maxv, v)) for v in val)
|
||||
else:
|
||||
return max(minv, min(maxv, val))
|
||||
|
||||
|
||||
class ShaderWrapper():
|
||||
"""
|
||||
Base class with minimal common ground for all types of shader interfaces we may want/need to implement.
|
||||
"""
|
||||
|
||||
# The two mandatory nodes any children class should support.
|
||||
NODES_LIST = (
|
||||
"node_out",
|
||||
|
||||
"_node_texcoords",
|
||||
)
|
||||
|
||||
__slots__ = (
|
||||
"is_readonly",
|
||||
"material",
|
||||
"_textures",
|
||||
"_grid_locations",
|
||||
*NODES_LIST,
|
||||
)
|
||||
|
||||
_col_size = 300
|
||||
_row_size = 300
|
||||
|
||||
def _grid_to_location(self, x, y, dst_node=None, ref_node=None):
|
||||
if ref_node is not None: # x and y are relative to this node location.
|
||||
nx = round(ref_node.location.x / self._col_size)
|
||||
ny = round(ref_node.location.y / self._row_size)
|
||||
x += nx
|
||||
y += ny
|
||||
loc = None
|
||||
while True:
|
||||
loc = (x * self._col_size, y * self._row_size)
|
||||
if loc not in self._grid_locations:
|
||||
break
|
||||
loc = (x * self._col_size, (y - 1) * self._row_size)
|
||||
if loc not in self._grid_locations:
|
||||
break
|
||||
loc = (x * self._col_size, (y - 2) * self._row_size)
|
||||
if loc not in self._grid_locations:
|
||||
break
|
||||
x -= 1
|
||||
self._grid_locations.add(loc)
|
||||
if dst_node is not None:
|
||||
dst_node.location = loc
|
||||
dst_node.width = min(dst_node.width, self._col_size - 20)
|
||||
return loc
|
||||
|
||||
def __init__(self, material, is_readonly=True, use_nodes=True):
|
||||
self.is_readonly = is_readonly
|
||||
self.material = material
|
||||
if not is_readonly:
|
||||
self.use_nodes = use_nodes
|
||||
self.update()
|
||||
|
||||
def update(self): # Should be re-implemented by children classes...
|
||||
for node in self.NODES_LIST:
|
||||
setattr(self, node, None)
|
||||
self._textures = {}
|
||||
self._grid_locations = set()
|
||||
|
||||
def use_nodes_get(self):
|
||||
return self.material.use_nodes
|
||||
|
||||
@_set_check
|
||||
def use_nodes_set(self, val):
|
||||
self.material.use_nodes = val
|
||||
self.update()
|
||||
|
||||
use_nodes = property(use_nodes_get, use_nodes_set)
|
||||
|
||||
def node_texcoords_get(self):
|
||||
if not self.use_nodes:
|
||||
return None
|
||||
if self._node_texcoords is ...:
|
||||
# Running only once, trying to find a valid texcoords node.
|
||||
for n in self.material.node_tree.nodes:
|
||||
if n.bl_idname == 'ShaderNodeTexCoord':
|
||||
self._node_texcoords = n
|
||||
self._grid_to_location(0, 0, ref_node=n)
|
||||
break
|
||||
if self._node_texcoords is ...:
|
||||
self._node_texcoords = None
|
||||
if self._node_texcoords is None and not self.is_readonly:
|
||||
tree = self.material.node_tree
|
||||
nodes = tree.nodes
|
||||
# links = tree.links
|
||||
|
||||
node_texcoords = nodes.new(type='ShaderNodeTexCoord')
|
||||
node_texcoords.label = "Texture Coords"
|
||||
self._grid_to_location(-5, 1, dst_node=node_texcoords)
|
||||
self._node_texcoords = node_texcoords
|
||||
return self._node_texcoords
|
||||
|
||||
node_texcoords = property(node_texcoords_get)
|
||||
|
||||
|
||||
class PrincipledBSDFWrapper(ShaderWrapper):
|
||||
"""
|
||||
Hard coded shader setup, based in Principled BSDF.
|
||||
Should cover most common cases on import, and gives a basic nodal shaders support for export.
|
||||
Supports basic: diffuse/spec/reflect/transparency/normal, with texturing.
|
||||
"""
|
||||
NODES_LIST = (
|
||||
"node_out",
|
||||
"node_principled_bsdf",
|
||||
|
||||
"_node_normalmap",
|
||||
"_node_texcoords",
|
||||
)
|
||||
|
||||
__slots__ = (
|
||||
"is_readonly",
|
||||
"material",
|
||||
*NODES_LIST,
|
||||
)
|
||||
|
||||
NODES_LIST = ShaderWrapper.NODES_LIST + NODES_LIST
|
||||
|
||||
def __init__(self, material, is_readonly=True, use_nodes=True):
|
||||
super(PrincipledBSDFWrapper, self).__init__(material, is_readonly, use_nodes)
|
||||
|
||||
def update(self):
|
||||
super(PrincipledBSDFWrapper, self).update()
|
||||
|
||||
if not self.use_nodes:
|
||||
return
|
||||
|
||||
tree = self.material.node_tree
|
||||
|
||||
nodes = tree.nodes
|
||||
links = tree.links
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Main output and shader.
|
||||
node_out = None
|
||||
node_principled = None
|
||||
for n in nodes:
|
||||
if n.bl_idname == 'ShaderNodeOutputMaterial' and n.inputs[0].is_linked:
|
||||
node_out = n
|
||||
node_principled = n.inputs[0].links[0].from_node
|
||||
elif n.bl_idname == 'ShaderNodeBsdfPrincipled' and n.outputs[0].is_linked:
|
||||
node_principled = n
|
||||
for lnk in n.outputs[0].links:
|
||||
node_out = lnk.to_node
|
||||
if node_out.bl_idname == 'ShaderNodeOutputMaterial':
|
||||
break
|
||||
if (
|
||||
node_out is not None and node_principled is not None and
|
||||
node_out.bl_idname == 'ShaderNodeOutputMaterial' and
|
||||
node_principled.bl_idname == 'ShaderNodeBsdfPrincipled'
|
||||
):
|
||||
break
|
||||
node_out = node_principled = None # Could not find a valid pair, let's try again
|
||||
|
||||
if node_out is not None:
|
||||
self._grid_to_location(0, 0, ref_node=node_out)
|
||||
elif not self.is_readonly:
|
||||
node_out = nodes.new(type='ShaderNodeOutputMaterial')
|
||||
node_out.label = "Material Out"
|
||||
node_out.target = 'ALL'
|
||||
self._grid_to_location(1, 1, dst_node=node_out)
|
||||
self.node_out = node_out
|
||||
|
||||
if node_principled is not None:
|
||||
self._grid_to_location(0, 0, ref_node=node_principled)
|
||||
elif not self.is_readonly:
|
||||
node_principled = nodes.new(type='ShaderNodeBsdfPrincipled')
|
||||
node_principled.label = "Principled BSDF"
|
||||
self._grid_to_location(0, 1, dst_node=node_principled)
|
||||
# Link
|
||||
links.new(node_principled.outputs["BSDF"], self.node_out.inputs["Surface"])
|
||||
self.node_principled_bsdf = node_principled
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Normal Map, lazy initialization...
|
||||
self._node_normalmap = ...
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Tex Coords, lazy initialization...
|
||||
self._node_texcoords = ...
|
||||
|
||||
def node_normalmap_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
node_principled = self.node_principled_bsdf
|
||||
if self._node_normalmap is ...:
|
||||
# Running only once, trying to find a valid normalmap node.
|
||||
if node_principled.inputs["Normal"].is_linked:
|
||||
node_normalmap = node_principled.inputs["Normal"].links[0].from_node
|
||||
if node_normalmap.bl_idname == 'ShaderNodeNormalMap':
|
||||
self._node_normalmap = node_normalmap
|
||||
self._grid_to_location(0, 0, ref_node=node_normalmap)
|
||||
if self._node_normalmap is ...:
|
||||
self._node_normalmap = None
|
||||
if self._node_normalmap is None and not self.is_readonly:
|
||||
tree = self.material.node_tree
|
||||
nodes = tree.nodes
|
||||
links = tree.links
|
||||
|
||||
node_normalmap = nodes.new(type='ShaderNodeNormalMap')
|
||||
node_normalmap.label = "Normal/Map"
|
||||
self._grid_to_location(-1, -2, dst_node=node_normalmap, ref_node=node_principled)
|
||||
# Link
|
||||
links.new(node_normalmap.outputs["Normal"], node_principled.inputs["Normal"])
|
||||
self._node_normalmap = node_normalmap
|
||||
return self._node_normalmap
|
||||
|
||||
node_normalmap = property(node_normalmap_get)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Base Color.
|
||||
|
||||
def base_color_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return self.material.diffuse_color
|
||||
return rgba_to_rgb(self.node_principled_bsdf.inputs["Base Color"].default_value)
|
||||
|
||||
@_set_check
|
||||
def base_color_set(self, color):
|
||||
color = values_clamp(color, 0.0, 1.0)
|
||||
color = rgb_to_rgba(color)
|
||||
self.material.diffuse_color = color
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Base Color"].default_value = color
|
||||
|
||||
base_color = property(base_color_get, base_color_set)
|
||||
|
||||
def base_color_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Base Color"],
|
||||
grid_row_diff=1,
|
||||
)
|
||||
|
||||
base_color_texture = property(base_color_texture_get)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Specular.
|
||||
|
||||
def specular_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return self.material.specular_intensity
|
||||
return self.node_principled_bsdf.inputs["Specular"].default_value
|
||||
|
||||
@_set_check
|
||||
def specular_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1.0)
|
||||
self.material.specular_intensity = value
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Specular"].default_value = value
|
||||
|
||||
specular = property(specular_get, specular_set)
|
||||
|
||||
def specular_tint_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return 0.0
|
||||
return self.node_principled_bsdf.inputs["Specular Tint"].default_value
|
||||
|
||||
@_set_check
|
||||
def specular_tint_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1.0)
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Specular Tint"].default_value = value
|
||||
|
||||
specular_tint = property(specular_tint_get, specular_tint_set)
|
||||
|
||||
# Will only be used as gray-scale one...
|
||||
def specular_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
print("NO NODES!")
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Specular"],
|
||||
grid_row_diff=0,
|
||||
colorspace_name='Non-Color',
|
||||
)
|
||||
|
||||
specular_texture = property(specular_texture_get)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Roughness (also sort of inverse of specular hardness...).
|
||||
|
||||
def roughness_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return self.material.roughness
|
||||
return self.node_principled_bsdf.inputs["Roughness"].default_value
|
||||
|
||||
@_set_check
|
||||
def roughness_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1.0)
|
||||
self.material.roughness = value
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Roughness"].default_value = value
|
||||
|
||||
roughness = property(roughness_get, roughness_set)
|
||||
|
||||
# Will only be used as gray-scale one...
|
||||
def roughness_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Roughness"],
|
||||
grid_row_diff=0,
|
||||
colorspace_name='Non-Color',
|
||||
)
|
||||
|
||||
roughness_texture = property(roughness_texture_get)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Metallic (a.k.a reflection, mirror).
|
||||
|
||||
def metallic_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return self.material.metallic
|
||||
return self.node_principled_bsdf.inputs["Metallic"].default_value
|
||||
|
||||
@_set_check
|
||||
def metallic_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1.0)
|
||||
self.material.metallic = value
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Metallic"].default_value = value
|
||||
|
||||
metallic = property(metallic_get, metallic_set)
|
||||
|
||||
# Will only be used as gray-scale one...
|
||||
def metallic_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Metallic"],
|
||||
grid_row_diff=0,
|
||||
colorspace_name='Non-Color',
|
||||
)
|
||||
|
||||
metallic_texture = property(metallic_texture_get)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Transparency settings.
|
||||
|
||||
def ior_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return 1.0
|
||||
return self.node_principled_bsdf.inputs["IOR"].default_value
|
||||
|
||||
@_set_check
|
||||
def ior_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1000.0)
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["IOR"].default_value = value
|
||||
|
||||
ior = property(ior_get, ior_set)
|
||||
|
||||
# Will only be used as gray-scale one...
|
||||
def ior_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["IOR"],
|
||||
grid_row_diff=-1,
|
||||
colorspace_name='Non-Color',
|
||||
)
|
||||
|
||||
ior_texture = property(ior_texture_get)
|
||||
|
||||
def transmission_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return 0.0
|
||||
return self.node_principled_bsdf.inputs["Transmission"].default_value
|
||||
|
||||
@_set_check
|
||||
def transmission_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1.0)
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Transmission"].default_value = value
|
||||
|
||||
transmission = property(transmission_get, transmission_set)
|
||||
|
||||
# Will only be used as gray-scale one...
|
||||
def transmission_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Transmission"],
|
||||
grid_row_diff=-1,
|
||||
colorspace_name='Non-Color',
|
||||
)
|
||||
|
||||
transmission_texture = property(transmission_texture_get)
|
||||
|
||||
def alpha_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return 1.0
|
||||
return self.node_principled_bsdf.inputs["Alpha"].default_value
|
||||
|
||||
@_set_check
|
||||
def alpha_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1.0)
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Alpha"].default_value = value
|
||||
|
||||
alpha = property(alpha_get, alpha_set)
|
||||
|
||||
# Will only be used as gray-scale one...
|
||||
def alpha_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Alpha"],
|
||||
use_alpha=True,
|
||||
grid_row_diff=-1,
|
||||
colorspace_name='Non-Color',
|
||||
)
|
||||
|
||||
alpha_texture = property(alpha_texture_get)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Emission color.
|
||||
|
||||
def emission_color_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return Color((0.0, 0.0, 0.0))
|
||||
return rgba_to_rgb(self.node_principled_bsdf.inputs["Emission"].default_value)
|
||||
|
||||
@_set_check
|
||||
def emission_color_set(self, color):
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
color = values_clamp(color, 0.0, 1000000.0)
|
||||
color = rgb_to_rgba(color)
|
||||
self.node_principled_bsdf.inputs["Emission"].default_value = color
|
||||
|
||||
emission_color = property(emission_color_get, emission_color_set)
|
||||
|
||||
def emission_color_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Emission"],
|
||||
grid_row_diff=1,
|
||||
)
|
||||
|
||||
emission_color_texture = property(emission_color_texture_get)
|
||||
|
||||
def emission_strength_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return 1.0
|
||||
return self.node_principled_bsdf.inputs["Emission Strength"].default_value
|
||||
|
||||
@_set_check
|
||||
def emission_strength_set(self, value):
|
||||
value = values_clamp(value, 0.0, 1000000.0)
|
||||
if self.use_nodes and self.node_principled_bsdf is not None:
|
||||
self.node_principled_bsdf.inputs["Emission Strength"].default_value = value
|
||||
|
||||
emission_strength = property(emission_strength_get, emission_strength_set)
|
||||
|
||||
def emission_strength_texture_get(self):
|
||||
if not self.use_nodes or self.node_principled_bsdf is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_principled_bsdf,
|
||||
self.node_principled_bsdf.inputs["Emission Strength"],
|
||||
grid_row_diff=-1,
|
||||
colorspace_name='Non-Color',
|
||||
)
|
||||
|
||||
emission_strength_texture = property(emission_strength_texture_get)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Normal map.
|
||||
|
||||
def normalmap_strength_get(self):
|
||||
if not self.use_nodes or self.node_normalmap is None:
|
||||
return 0.0
|
||||
return self.node_normalmap.inputs["Strength"].default_value
|
||||
|
||||
@_set_check
|
||||
def normalmap_strength_set(self, value):
|
||||
value = values_clamp(value, 0.0, 10.0)
|
||||
if self.use_nodes and self.node_normalmap is not None:
|
||||
self.node_normalmap.inputs["Strength"].default_value = value
|
||||
|
||||
normalmap_strength = property(normalmap_strength_get, normalmap_strength_set)
|
||||
|
||||
def normalmap_texture_get(self):
|
||||
if not self.use_nodes or self.node_normalmap is None:
|
||||
return None
|
||||
return ShaderImageTextureWrapper(
|
||||
self, self.node_normalmap,
|
||||
self.node_normalmap.inputs["Color"],
|
||||
grid_row_diff=-2,
|
||||
colorspace_is_data=True,
|
||||
)
|
||||
|
||||
normalmap_texture = property(normalmap_texture_get)
|
||||
|
||||
|
||||
class ShaderImageTextureWrapper():
|
||||
"""
|
||||
Generic 'image texture'-like wrapper, handling image node, some mapping (texture coordinates transformations),
|
||||
and texture coordinates source.
|
||||
"""
|
||||
|
||||
# Note: this class assumes we are using nodes, otherwise it should never be used...
|
||||
|
||||
NODES_LIST = (
|
||||
"node_dst",
|
||||
"socket_dst",
|
||||
|
||||
"_node_image",
|
||||
"_node_mapping",
|
||||
)
|
||||
|
||||
__slots__ = (
|
||||
"owner_shader",
|
||||
"is_readonly",
|
||||
"grid_row_diff",
|
||||
"use_alpha",
|
||||
"colorspace_is_data",
|
||||
"colorspace_name",
|
||||
*NODES_LIST,
|
||||
)
|
||||
|
||||
def __new__(cls, owner_shader: ShaderWrapper, node_dst, socket_dst, *_args, **_kwargs):
|
||||
instance = owner_shader._textures.get((node_dst, socket_dst), None)
|
||||
if instance is not None:
|
||||
return instance
|
||||
instance = super(ShaderImageTextureWrapper, cls).__new__(cls)
|
||||
owner_shader._textures[(node_dst, socket_dst)] = instance
|
||||
return instance
|
||||
|
||||
def __init__(self, owner_shader: ShaderWrapper, node_dst, socket_dst, grid_row_diff=0,
|
||||
use_alpha=False, colorspace_is_data=..., colorspace_name=...):
|
||||
self.owner_shader = owner_shader
|
||||
self.is_readonly = owner_shader.is_readonly
|
||||
self.node_dst = node_dst
|
||||
self.socket_dst = socket_dst
|
||||
self.grid_row_diff = grid_row_diff
|
||||
self.use_alpha = use_alpha
|
||||
self.colorspace_is_data = colorspace_is_data
|
||||
self.colorspace_name = colorspace_name
|
||||
|
||||
self._node_image = ...
|
||||
self._node_mapping = ...
|
||||
|
||||
# tree = node_dst.id_data
|
||||
# nodes = tree.nodes
|
||||
# links = tree.links
|
||||
|
||||
if socket_dst.is_linked:
|
||||
from_node = socket_dst.links[0].from_node
|
||||
if from_node.bl_idname == 'ShaderNodeTexImage':
|
||||
self._node_image = from_node
|
||||
|
||||
if self.node_image is not None:
|
||||
socket_dst = self.node_image.inputs["Vector"]
|
||||
if socket_dst.is_linked:
|
||||
from_node = socket_dst.links[0].from_node
|
||||
if from_node.bl_idname == 'ShaderNodeMapping':
|
||||
self._node_mapping = from_node
|
||||
|
||||
def copy_from(self, tex):
|
||||
# Avoid generating any node in source texture.
|
||||
is_readonly_back = tex.is_readonly
|
||||
tex.is_readonly = True
|
||||
|
||||
if tex.node_image is not None:
|
||||
self.image = tex.image
|
||||
self.projection = tex.projection
|
||||
self.texcoords = tex.texcoords
|
||||
self.copy_mapping_from(tex)
|
||||
|
||||
tex.is_readonly = is_readonly_back
|
||||
|
||||
def copy_mapping_from(self, tex):
|
||||
# Avoid generating any node in source texture.
|
||||
is_readonly_back = tex.is_readonly
|
||||
tex.is_readonly = True
|
||||
|
||||
if tex.node_mapping is None: # Used to actually remove mapping node.
|
||||
if self.has_mapping_node():
|
||||
# We assume node_image can never be None in that case...
|
||||
# Find potential existing link into image's Vector input.
|
||||
socket_dst = socket_src = None
|
||||
if self.node_mapping.inputs["Vector"].is_linked:
|
||||
socket_dst = self.node_image.inputs["Vector"]
|
||||
socket_src = self.node_mapping.inputs["Vector"].links[0].from_socket
|
||||
|
||||
tree = self.owner_shader.material.node_tree
|
||||
tree.nodes.remove(self.node_mapping)
|
||||
self._node_mapping = None
|
||||
|
||||
# If previously existing, re-link texcoords -> image
|
||||
if socket_src is not None:
|
||||
tree.links.new(socket_src, socket_dst)
|
||||
elif self.node_mapping is not None:
|
||||
self.translation = tex.translation
|
||||
self.rotation = tex.rotation
|
||||
self.scale = tex.scale
|
||||
|
||||
tex.is_readonly = is_readonly_back
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Image.
|
||||
|
||||
def node_image_get(self):
|
||||
if self._node_image is ...:
|
||||
# Running only once, trying to find a valid image node.
|
||||
if self.socket_dst.is_linked:
|
||||
node_image = self.socket_dst.links[0].from_node
|
||||
if node_image.bl_idname == 'ShaderNodeTexImage':
|
||||
self._node_image = node_image
|
||||
self.owner_shader._grid_to_location(0, 0, ref_node=node_image)
|
||||
if self._node_image is ...:
|
||||
self._node_image = None
|
||||
if self._node_image is None and not self.is_readonly:
|
||||
tree = self.owner_shader.material.node_tree
|
||||
|
||||
node_image = tree.nodes.new(type='ShaderNodeTexImage')
|
||||
self.owner_shader._grid_to_location(
|
||||
-1, 0 + self.grid_row_diff,
|
||||
dst_node=node_image, ref_node=self.node_dst,
|
||||
)
|
||||
|
||||
tree.links.new(node_image.outputs["Alpha" if self.use_alpha else "Color"], self.socket_dst)
|
||||
if self.use_alpha:
|
||||
self.owner_shader.material.blend_method = 'BLEND'
|
||||
|
||||
self._node_image = node_image
|
||||
return self._node_image
|
||||
|
||||
node_image = property(node_image_get)
|
||||
|
||||
def image_get(self):
|
||||
return self.node_image.image if self.node_image is not None else None
|
||||
|
||||
@_set_check
|
||||
def image_set(self, image):
|
||||
if self.colorspace_is_data is not ...:
|
||||
if image.colorspace_settings.is_data != self.colorspace_is_data and image.users >= 1:
|
||||
image = image.copy()
|
||||
image.colorspace_settings.is_data = self.colorspace_is_data
|
||||
if self.colorspace_name is not ...:
|
||||
if image.colorspace_settings.name != self.colorspace_name and image.users >= 1:
|
||||
image = image.copy()
|
||||
image.colorspace_settings.name = self.colorspace_name
|
||||
if self.use_alpha:
|
||||
# Try to be smart, and only use image's alpha output if image actually has alpha data.
|
||||
tree = self.owner_shader.material.node_tree
|
||||
if image.channels < 4 or image.depth in {24, 8}:
|
||||
tree.links.new(self.node_image.outputs["Color"], self.socket_dst)
|
||||
else:
|
||||
tree.links.new(self.node_image.outputs["Alpha"], self.socket_dst)
|
||||
self.node_image.image = image
|
||||
|
||||
image = property(image_get, image_set)
|
||||
|
||||
def projection_get(self):
|
||||
return self.node_image.projection if self.node_image is not None else 'FLAT'
|
||||
|
||||
@_set_check
|
||||
def projection_set(self, projection):
|
||||
self.node_image.projection = projection
|
||||
|
||||
projection = property(projection_get, projection_set)
|
||||
|
||||
def texcoords_get(self):
|
||||
if self.node_image is not None:
|
||||
socket = (self.node_mapping if self.has_mapping_node() else self.node_image).inputs["Vector"]
|
||||
if socket.is_linked:
|
||||
return socket.links[0].from_socket.name
|
||||
return 'UV'
|
||||
|
||||
@_set_check
|
||||
def texcoords_set(self, texcoords):
|
||||
# Image texture node already defaults to UVs, no extra node needed.
|
||||
# ONLY in case we do not have any texcoords mapping!!!
|
||||
if texcoords == 'UV' and not self.has_mapping_node():
|
||||
return
|
||||
tree = self.node_image.id_data
|
||||
links = tree.links
|
||||
node_dst = self.node_mapping if self.has_mapping_node() else self.node_image
|
||||
socket_src = self.owner_shader.node_texcoords.outputs[texcoords]
|
||||
links.new(socket_src, node_dst.inputs["Vector"])
|
||||
|
||||
texcoords = property(texcoords_get, texcoords_set)
|
||||
|
||||
def extension_get(self):
|
||||
return self.node_image.extension if self.node_image is not None else 'REPEAT'
|
||||
|
||||
@_set_check
|
||||
def extension_set(self, extension):
|
||||
self.node_image.extension = extension
|
||||
|
||||
extension = property(extension_get, extension_set)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Mapping.
|
||||
|
||||
def has_mapping_node(self):
|
||||
return self._node_mapping not in {None, ...}
|
||||
|
||||
def node_mapping_get(self):
|
||||
if self._node_mapping is ...:
|
||||
# Running only once, trying to find a valid mapping node.
|
||||
if self.node_image is None:
|
||||
return None
|
||||
if self.node_image.inputs["Vector"].is_linked:
|
||||
node_mapping = self.node_image.inputs["Vector"].links[0].from_node
|
||||
if node_mapping.bl_idname == 'ShaderNodeMapping':
|
||||
self._node_mapping = node_mapping
|
||||
self.owner_shader._grid_to_location(0, 0 + self.grid_row_diff, ref_node=node_mapping)
|
||||
if self._node_mapping is ...:
|
||||
self._node_mapping = None
|
||||
if self._node_mapping is None and not self.is_readonly:
|
||||
# Find potential existing link into image's Vector input.
|
||||
socket_dst = self.node_image.inputs["Vector"]
|
||||
# If not already existing, we need to create texcoords -> mapping link (from UV).
|
||||
socket_src = (socket_dst.links[0].from_socket if socket_dst.is_linked
|
||||
else self.owner_shader.node_texcoords.outputs['UV'])
|
||||
|
||||
tree = self.owner_shader.material.node_tree
|
||||
node_mapping = tree.nodes.new(type='ShaderNodeMapping')
|
||||
node_mapping.vector_type = 'TEXTURE'
|
||||
self.owner_shader._grid_to_location(-1, 0, dst_node=node_mapping, ref_node=self.node_image)
|
||||
|
||||
# Link mapping -> image node.
|
||||
tree.links.new(node_mapping.outputs["Vector"], socket_dst)
|
||||
# Link texcoords -> mapping.
|
||||
tree.links.new(socket_src, node_mapping.inputs["Vector"])
|
||||
|
||||
self._node_mapping = node_mapping
|
||||
return self._node_mapping
|
||||
|
||||
node_mapping = property(node_mapping_get)
|
||||
|
||||
def translation_get(self):
|
||||
if self.node_mapping is None:
|
||||
return Vector((0.0, 0.0, 0.0))
|
||||
return self.node_mapping.inputs['Location'].default_value
|
||||
|
||||
@_set_check
|
||||
def translation_set(self, translation):
|
||||
self.node_mapping.inputs['Location'].default_value = translation
|
||||
|
||||
translation = property(translation_get, translation_set)
|
||||
|
||||
def rotation_get(self):
|
||||
if self.node_mapping is None:
|
||||
return Vector((0.0, 0.0, 0.0))
|
||||
return self.node_mapping.inputs['Rotation'].default_value
|
||||
|
||||
@_set_check
|
||||
def rotation_set(self, rotation):
|
||||
self.node_mapping.inputs['Rotation'].default_value = rotation
|
||||
|
||||
rotation = property(rotation_get, rotation_set)
|
||||
|
||||
def scale_get(self):
|
||||
if self.node_mapping is None:
|
||||
return Vector((1.0, 1.0, 1.0))
|
||||
return self.node_mapping.inputs['Scale'].default_value
|
||||
|
||||
@_set_check
|
||||
def scale_set(self, scale):
|
||||
self.node_mapping.inputs['Scale'].default_value = scale
|
||||
|
||||
scale = property(scale_get, scale_set)
|
||||
14
scripts/modules/bpy_extras/node_utils.py
Normal file
14
scripts/modules/bpy_extras/node_utils.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"find_node_input",
|
||||
)
|
||||
|
||||
|
||||
# XXX Names are not unique. Returns the first match.
|
||||
def find_node_input(node, name):
|
||||
for input in node.inputs:
|
||||
if input.name == name:
|
||||
return input
|
||||
|
||||
return None
|
||||
262
scripts/modules/bpy_extras/object_utils.py
Normal file
262
scripts/modules/bpy_extras/object_utils.py
Normal file
@@ -0,0 +1,262 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = (
|
||||
"add_object_align_init",
|
||||
"object_data_add",
|
||||
"AddObjectHelper",
|
||||
"object_add_grid_scale",
|
||||
"object_add_grid_scale_apply_operator",
|
||||
"world_to_camera_view",
|
||||
)
|
||||
|
||||
|
||||
import bpy
|
||||
|
||||
from bpy.props import (
|
||||
FloatVectorProperty,
|
||||
EnumProperty,
|
||||
)
|
||||
|
||||
|
||||
def add_object_align_init(context, operator):
|
||||
"""
|
||||
Return a matrix using the operator settings and view context.
|
||||
|
||||
:arg context: The context to use.
|
||||
:type context: :class:`bpy.types.Context`
|
||||
:arg operator: The operator, checked for location and rotation properties.
|
||||
:type operator: :class:`bpy.types.Operator`
|
||||
:return: the matrix from the context and settings.
|
||||
:rtype: :class:`mathutils.Matrix`
|
||||
"""
|
||||
|
||||
from mathutils import Matrix, Vector
|
||||
properties = operator.properties if operator is not None else None
|
||||
|
||||
space_data = context.space_data
|
||||
if space_data and space_data.type != 'VIEW_3D':
|
||||
space_data = None
|
||||
|
||||
# location
|
||||
if operator and properties.is_property_set("location"):
|
||||
location = Matrix.Translation(Vector(properties.location))
|
||||
else:
|
||||
location = Matrix.Translation(context.scene.cursor.location)
|
||||
|
||||
if operator:
|
||||
properties.location = location.to_translation()
|
||||
|
||||
# rotation
|
||||
add_align_preference = context.preferences.edit.object_align
|
||||
if operator:
|
||||
if not properties.is_property_set("rotation"):
|
||||
# So one of "align" and "rotation" will be set
|
||||
properties.align = add_align_preference
|
||||
|
||||
if properties.align == 'WORLD':
|
||||
rotation = properties.rotation.to_matrix().to_4x4()
|
||||
elif properties.align == 'VIEW':
|
||||
rotation = space_data.region_3d.view_matrix.to_3x3().inverted()
|
||||
rotation.resize_4x4()
|
||||
properties.rotation = rotation.to_euler()
|
||||
elif properties.align == 'CURSOR':
|
||||
rotation = context.scene.cursor.matrix
|
||||
rotation.col[3][0:3] = 0.0, 0.0, 0.0
|
||||
properties.rotation = rotation.to_euler()
|
||||
else:
|
||||
rotation = properties.rotation.to_matrix().to_4x4()
|
||||
else:
|
||||
if (add_align_preference == 'VIEW') and space_data:
|
||||
rotation = space_data.region_3d.view_matrix.to_3x3().inverted()
|
||||
rotation.resize_4x4()
|
||||
elif add_align_preference == 'CURSOR':
|
||||
rotation = context.scene.cursor.rotation_euler.to_matrix().to_4x4()
|
||||
else:
|
||||
rotation = Matrix()
|
||||
|
||||
return location @ rotation
|
||||
|
||||
|
||||
def object_data_add(context, obdata, operator=None, name=None):
|
||||
"""
|
||||
Add an object using the view context and preference to initialize the
|
||||
location, rotation and layer.
|
||||
|
||||
:arg context: The context to use.
|
||||
:type context: :class:`bpy.types.Context`
|
||||
:arg obdata: the data used for the new object.
|
||||
:type obdata: valid object data type or None.
|
||||
:arg operator: The operator, checked for location and rotation properties.
|
||||
:type operator: :class:`bpy.types.Operator`
|
||||
:arg name: Optional name
|
||||
:type name: string
|
||||
:return: the newly created object in the scene.
|
||||
:rtype: :class:`bpy.types.Object`
|
||||
"""
|
||||
layer = context.view_layer
|
||||
layer_collection = context.layer_collection or layer.active_layer_collection
|
||||
scene_collection = layer_collection.collection
|
||||
|
||||
for ob in layer.objects:
|
||||
ob.select_set(False)
|
||||
|
||||
if name is None:
|
||||
name = "Object" if obdata is None else obdata.name
|
||||
|
||||
obj_act = layer.objects.active
|
||||
obj_new = bpy.data.objects.new(name, obdata)
|
||||
scene_collection.objects.link(obj_new)
|
||||
obj_new.select_set(True)
|
||||
obj_new.matrix_world = add_object_align_init(context, operator)
|
||||
|
||||
space_data = context.space_data
|
||||
if space_data and space_data.type != 'VIEW_3D':
|
||||
space_data = None
|
||||
|
||||
if space_data:
|
||||
if space_data.local_view:
|
||||
obj_new.local_view_set(space_data, True)
|
||||
|
||||
if obj_act and obj_act.mode == 'EDIT' and obj_act.type == obj_new.type:
|
||||
bpy.ops.mesh.select_all(action='DESELECT')
|
||||
obj_act.select_set(True)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
obj_act.select_set(True)
|
||||
layer.update() # apply location
|
||||
# layer.objects.active = obj_new
|
||||
|
||||
# Match up UV layers, this is needed so adding an object with UVs
|
||||
# doesn't create new layers when there happens to be a naming mismatch.
|
||||
uv_new = obdata.uv_layers.active
|
||||
if uv_new is not None:
|
||||
uv_act = obj_act.data.uv_layers.active
|
||||
if uv_act is not None:
|
||||
uv_new.name = uv_act.name
|
||||
|
||||
bpy.ops.object.join() # join into the active.
|
||||
if obdata:
|
||||
bpy.data.meshes.remove(obdata)
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
else:
|
||||
layer.objects.active = obj_new
|
||||
if context.preferences.edit.use_enter_edit_mode:
|
||||
if obdata and obdata.library is None:
|
||||
obtype = obj_new.type
|
||||
mode = None
|
||||
if obtype in {'ARMATURE', 'CURVE', 'CURVES', 'FONT', 'LATTICE', 'MESH', 'META', 'SURFACE'}:
|
||||
mode = 'EDIT'
|
||||
elif obtype == 'GPENCIL':
|
||||
mode = 'EDIT_GPENCIL'
|
||||
|
||||
if mode is not None:
|
||||
bpy.ops.object.mode_set(mode=mode)
|
||||
|
||||
return obj_new
|
||||
|
||||
|
||||
class AddObjectHelper:
|
||||
def align_update_callback(self, _context):
|
||||
if self.align == 'WORLD':
|
||||
self.rotation.zero()
|
||||
|
||||
align: EnumProperty(
|
||||
name="Align",
|
||||
items=(
|
||||
('WORLD', "World", "Align the new object to the world"),
|
||||
('VIEW', "View", "Align the new object to the view"),
|
||||
('CURSOR', "3D Cursor", "Use the 3D cursor orientation for the new object"),
|
||||
),
|
||||
default='WORLD',
|
||||
update=AddObjectHelper.align_update_callback,
|
||||
)
|
||||
location: FloatVectorProperty(
|
||||
name="Location",
|
||||
subtype='TRANSLATION',
|
||||
)
|
||||
rotation: FloatVectorProperty(
|
||||
name="Rotation",
|
||||
subtype='EULER',
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.scene.library is None
|
||||
|
||||
|
||||
def object_add_grid_scale(context):
|
||||
"""
|
||||
Return scale which should be applied on object
|
||||
data to align it to grid scale
|
||||
"""
|
||||
|
||||
space_data = context.space_data
|
||||
|
||||
if space_data and space_data.type == 'VIEW_3D':
|
||||
return space_data.overlay.grid_scale_unit
|
||||
|
||||
return 1.0
|
||||
|
||||
|
||||
def object_add_grid_scale_apply_operator(operator, context):
|
||||
"""
|
||||
Scale an operators distance values by the grid size.
|
||||
"""
|
||||
# This is a Python version of the C function `WM_operator_view3d_unit_defaults`.
|
||||
grid_scale = object_add_grid_scale(context)
|
||||
|
||||
properties = operator.properties
|
||||
properties_def = properties.bl_rna.properties
|
||||
for prop_id in properties_def.keys():
|
||||
if not properties.is_property_set(prop_id, ghost=False):
|
||||
prop_def = properties_def[prop_id]
|
||||
if prop_def.unit == 'LENGTH' and prop_def.subtype == 'DISTANCE':
|
||||
setattr(operator, prop_id,
|
||||
getattr(operator, prop_id) * grid_scale)
|
||||
|
||||
|
||||
def world_to_camera_view(scene, obj, coord):
|
||||
"""
|
||||
Returns the camera space coords for a 3d point.
|
||||
(also known as: normalized device coordinates - NDC).
|
||||
|
||||
Where (0, 0) is the bottom left and (1, 1)
|
||||
is the top right of the camera frame.
|
||||
values outside 0-1 are also supported.
|
||||
A negative 'z' value means the point is behind the camera.
|
||||
|
||||
Takes shift-x/y, lens angle and sensor size into account
|
||||
as well as perspective/ortho projections.
|
||||
|
||||
:arg scene: Scene to use for frame size.
|
||||
:type scene: :class:`bpy.types.Scene`
|
||||
:arg obj: Camera object.
|
||||
:type obj: :class:`bpy.types.Object`
|
||||
:arg coord: World space location.
|
||||
:type coord: :class:`mathutils.Vector`
|
||||
:return: a vector where X and Y map to the view plane and
|
||||
Z is the depth on the view axis.
|
||||
:rtype: :class:`mathutils.Vector`
|
||||
"""
|
||||
from mathutils import Vector
|
||||
|
||||
co_local = obj.matrix_world.normalized().inverted() @ coord
|
||||
z = -co_local.z
|
||||
|
||||
camera = obj.data
|
||||
frame = [v for v in camera.view_frame(scene=scene)[:3]]
|
||||
if camera.type != 'ORTHO':
|
||||
if z == 0.0:
|
||||
return Vector((0.5, 0.5, 0.0))
|
||||
else:
|
||||
frame = [-(v / (v.z / z)) for v in frame]
|
||||
|
||||
min_x, max_x = frame[2].x, frame[1].x
|
||||
min_y, max_y = frame[1].y, frame[0].y
|
||||
|
||||
x = (co_local.x - min_x) / (max_x - min_x)
|
||||
y = (co_local.y - min_y) / (max_y - min_y)
|
||||
|
||||
return Vector((x, y, z))
|
||||
179
scripts/modules/bpy_extras/view3d_utils.py
Normal file
179
scripts/modules/bpy_extras/view3d_utils.py
Normal file
@@ -0,0 +1,179 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"region_2d_to_vector_3d",
|
||||
"region_2d_to_origin_3d",
|
||||
"region_2d_to_location_3d",
|
||||
"location_3d_to_region_2d",
|
||||
)
|
||||
|
||||
|
||||
def region_2d_to_vector_3d(region, rv3d, coord):
|
||||
"""
|
||||
Return a direction vector from the viewport at the specific 2d region
|
||||
coordinate.
|
||||
|
||||
:arg region: region of the 3D viewport, typically bpy.context.region.
|
||||
:type region: :class:`bpy.types.Region`
|
||||
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
|
||||
:type rv3d: :class:`bpy.types.RegionView3D`
|
||||
:arg coord: 2d coordinates relative to the region:
|
||||
(event.mouse_region_x, event.mouse_region_y) for example.
|
||||
:type coord: 2d vector
|
||||
:return: normalized 3d vector.
|
||||
:rtype: :class:`mathutils.Vector`
|
||||
"""
|
||||
from mathutils import Vector
|
||||
|
||||
viewinv = rv3d.view_matrix.inverted()
|
||||
if rv3d.is_perspective:
|
||||
persinv = rv3d.perspective_matrix.inverted()
|
||||
|
||||
out = Vector((
|
||||
(2.0 * coord[0] / region.width) - 1.0,
|
||||
(2.0 * coord[1] / region.height) - 1.0,
|
||||
-0.5
|
||||
))
|
||||
|
||||
w = out.dot(persinv[3].xyz) + persinv[3][3]
|
||||
|
||||
view_vector = ((persinv @ out) / w) - viewinv.translation
|
||||
else:
|
||||
view_vector = -viewinv.col[2].xyz
|
||||
|
||||
view_vector.normalize()
|
||||
|
||||
return view_vector
|
||||
|
||||
|
||||
def region_2d_to_origin_3d(region, rv3d, coord, *, clamp=None):
|
||||
"""
|
||||
Return the 3d view origin from the region relative 2d coords.
|
||||
|
||||
.. note::
|
||||
|
||||
Orthographic views have a less obvious origin,
|
||||
the far clip is used to define the viewport near/far extents.
|
||||
Since far clip can be a very large value,
|
||||
the result may give with numeric precision issues.
|
||||
|
||||
To avoid this problem, you can optionally clamp the far clip to a
|
||||
smaller value based on the data you're operating on.
|
||||
|
||||
:arg region: region of the 3D viewport, typically bpy.context.region.
|
||||
:type region: :class:`bpy.types.Region`
|
||||
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
|
||||
:type rv3d: :class:`bpy.types.RegionView3D`
|
||||
:arg coord: 2d coordinates relative to the region;
|
||||
(event.mouse_region_x, event.mouse_region_y) for example.
|
||||
:type coord: 2d vector
|
||||
:arg clamp: Clamp the maximum far-clip value used.
|
||||
(negative value will move the offset away from the view_location)
|
||||
:type clamp: float or None
|
||||
:return: The origin of the viewpoint in 3d space.
|
||||
:rtype: :class:`mathutils.Vector`
|
||||
"""
|
||||
viewinv = rv3d.view_matrix.inverted()
|
||||
|
||||
if rv3d.is_perspective:
|
||||
origin_start = viewinv.translation.copy()
|
||||
else:
|
||||
persmat = rv3d.perspective_matrix.copy()
|
||||
dx = (2.0 * coord[0] / region.width) - 1.0
|
||||
dy = (2.0 * coord[1] / region.height) - 1.0
|
||||
persinv = persmat.inverted()
|
||||
origin_start = (
|
||||
(persinv.col[0].xyz * dx) +
|
||||
(persinv.col[1].xyz * dy) +
|
||||
persinv.translation
|
||||
)
|
||||
|
||||
if clamp != 0.0:
|
||||
if rv3d.view_perspective != 'CAMERA':
|
||||
# this value is scaled to the far clip already
|
||||
origin_offset = persinv.col[2].xyz
|
||||
if clamp is not None:
|
||||
if clamp < 0.0:
|
||||
origin_offset.negate()
|
||||
clamp = -clamp
|
||||
if origin_offset.length > clamp:
|
||||
origin_offset.length = clamp
|
||||
|
||||
origin_start -= origin_offset
|
||||
|
||||
return origin_start
|
||||
|
||||
|
||||
def region_2d_to_location_3d(region, rv3d, coord, depth_location):
|
||||
"""
|
||||
Return a 3d location from the region relative 2d coords, aligned with
|
||||
*depth_location*.
|
||||
|
||||
:arg region: region of the 3D viewport, typically bpy.context.region.
|
||||
:type region: :class:`bpy.types.Region`
|
||||
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
|
||||
:type rv3d: :class:`bpy.types.RegionView3D`
|
||||
:arg coord: 2d coordinates relative to the region;
|
||||
(event.mouse_region_x, event.mouse_region_y) for example.
|
||||
:type coord: 2d vector
|
||||
:arg depth_location: the returned vectors depth is aligned with this since
|
||||
there is no defined depth with a 2d region input.
|
||||
:type depth_location: 3d vector
|
||||
:return: normalized 3d vector.
|
||||
:rtype: :class:`mathutils.Vector`
|
||||
"""
|
||||
from mathutils import Vector
|
||||
|
||||
coord_vec = region_2d_to_vector_3d(region, rv3d, coord)
|
||||
depth_location = Vector(depth_location)
|
||||
|
||||
origin_start = region_2d_to_origin_3d(region, rv3d, coord)
|
||||
origin_end = origin_start + coord_vec
|
||||
|
||||
if rv3d.is_perspective:
|
||||
from mathutils.geometry import intersect_line_plane
|
||||
viewinv = rv3d.view_matrix.inverted()
|
||||
view_vec = viewinv.col[2].copy()
|
||||
return intersect_line_plane(
|
||||
origin_start,
|
||||
origin_end,
|
||||
depth_location,
|
||||
view_vec, 1,
|
||||
)
|
||||
else:
|
||||
from mathutils.geometry import intersect_point_line
|
||||
return intersect_point_line(
|
||||
depth_location,
|
||||
origin_start,
|
||||
origin_end,
|
||||
)[0]
|
||||
|
||||
|
||||
def location_3d_to_region_2d(region, rv3d, coord, *, default=None):
|
||||
"""
|
||||
Return the *region* relative 2d location of a 3d position.
|
||||
|
||||
:arg region: region of the 3D viewport, typically bpy.context.region.
|
||||
:type region: :class:`bpy.types.Region`
|
||||
:arg rv3d: 3D region data, typically bpy.context.space_data.region_3d.
|
||||
:type rv3d: :class:`bpy.types.RegionView3D`
|
||||
:arg coord: 3d worldspace location.
|
||||
:type coord: 3d vector
|
||||
:arg default: Return this value if ``coord``
|
||||
is behind the origin of a perspective view.
|
||||
:return: 2d location
|
||||
:rtype: :class:`mathutils.Vector` or ``default`` argument.
|
||||
"""
|
||||
from mathutils import Vector
|
||||
|
||||
prj = rv3d.perspective_matrix @ Vector((coord[0], coord[1], coord[2], 1.0))
|
||||
if prj.w > 0.0:
|
||||
width_half = region.width / 2.0
|
||||
height_half = region.height / 2.0
|
||||
|
||||
return Vector((
|
||||
width_half + width_half * (prj.x / prj.w),
|
||||
height_half + height_half * (prj.y / prj.w),
|
||||
))
|
||||
else:
|
||||
return default
|
||||
149
scripts/modules/bpy_extras/wm_utils/progress_report.py
Normal file
149
scripts/modules/bpy_extras/wm_utils/progress_report.py
Normal file
@@ -0,0 +1,149 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import time
|
||||
|
||||
|
||||
class ProgressReport:
|
||||
"""
|
||||
A basic 'progress report' using either simple prints in console, or WindowManager's 'progress' API.
|
||||
|
||||
This object can be used as a context manager.
|
||||
|
||||
It supports multiple levels of 'substeps' - you shall always enter at least one substep (because level 0
|
||||
has only one single step, representing the whole 'area' of the progress stuff).
|
||||
|
||||
You should give the expected number of substeps each time you enter a new one (you may then step more or less then
|
||||
given number, but this will give incoherent progression).
|
||||
|
||||
Leaving a substep automatically steps by one the parent level.
|
||||
|
||||
with ProgressReport() as progress: # Not giving a WindowManager here will default to console printing.
|
||||
progress.enter_substeps(10)
|
||||
for i in range(10):
|
||||
progress.enter_substeps(100)
|
||||
for j in range(100):
|
||||
progress.step()
|
||||
progress.leave_substeps() # No need to step here, this implicitly does it.
|
||||
progress.leave_substeps("Finished!") # You may pass some message too.
|
||||
"""
|
||||
__slots__ = ('wm', 'running', 'steps', 'curr_step', 'start_time')
|
||||
|
||||
def __init__(self, wm=None):
|
||||
self_wm = getattr(self, 'wm', None)
|
||||
if self_wm:
|
||||
self.finalize()
|
||||
self.running = False
|
||||
|
||||
self.wm = wm
|
||||
self.steps = [100000]
|
||||
self.curr_step = [0]
|
||||
|
||||
initialize = __init__
|
||||
|
||||
def __enter__(self):
|
||||
self.start_time = [time.time()]
|
||||
if self.wm:
|
||||
self.wm.progress_begin(0, self.steps[0])
|
||||
self.update()
|
||||
self.running = True
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
|
||||
self.running = False
|
||||
if self.wm:
|
||||
self.wm.progress_end()
|
||||
self.wm = None
|
||||
print("\n")
|
||||
self.steps = [100000]
|
||||
self.curr_step = [0]
|
||||
self.start_time = [time.time()]
|
||||
|
||||
def start(self):
|
||||
self.__enter__()
|
||||
|
||||
def finalize(self):
|
||||
self.__exit__()
|
||||
|
||||
def update(self, msg=""):
|
||||
steps = sum(s * cs for (s, cs) in zip(self.steps, self.curr_step))
|
||||
steps_percent = steps / self.steps[0] * 100.0
|
||||
tm = time.time()
|
||||
loc_tm = tm - self.start_time[-1]
|
||||
tm -= self.start_time[0]
|
||||
if self.wm and self.running:
|
||||
self.wm.progress_update(steps)
|
||||
if msg:
|
||||
prefix = " " * (len(self.steps) - 1)
|
||||
print(prefix + "(%8.4f sec | %8.4f sec) %s\nProgress: %6.2f%%\r" %
|
||||
(tm, loc_tm, msg, steps_percent), end='')
|
||||
else:
|
||||
print("Progress: %6.2f%%\r" % (steps_percent,), end='')
|
||||
|
||||
def enter_substeps(self, nbr, msg=""):
|
||||
if msg:
|
||||
self.update(msg)
|
||||
self.steps.append(self.steps[-1] / max(nbr, 1))
|
||||
self.curr_step.append(0)
|
||||
self.start_time.append(time.time())
|
||||
|
||||
def step(self, msg="", nbr=1):
|
||||
self.curr_step[-1] += nbr
|
||||
self.update(msg)
|
||||
|
||||
def leave_substeps(self, msg=""):
|
||||
if (msg):
|
||||
self.update(msg)
|
||||
assert len(self.steps) > 1
|
||||
del self.steps[-1]
|
||||
del self.curr_step[-1]
|
||||
del self.start_time[-1]
|
||||
self.step()
|
||||
|
||||
|
||||
class ProgressReportSubstep:
|
||||
"""
|
||||
A sub-step context manager for ProgressReport.
|
||||
|
||||
It can be used to generate other sub-step contexts too, and can act as a (limited) proxy of its real ProgressReport.
|
||||
|
||||
Its exit method always ensure ProgressReport is back on 'level' it was before entering this context.
|
||||
This means it is especially useful to ensure a coherent behavior around code that could return/continue/break
|
||||
from many places, without having to bother to explicitly leave substep in each and every possible place!
|
||||
|
||||
with ProgressReport() as progress: # Not giving a WindowManager here will default to console printing.
|
||||
with ProgressReportSubstep(progress, 10, final_msg="Finished!") as subprogress1:
|
||||
for i in range(10):
|
||||
with ProgressReportSubstep(subprogress1, 100) as subprogress2:
|
||||
for j in range(100):
|
||||
subprogress2.step()
|
||||
"""
|
||||
__slots__ = ('progress', 'nbr', 'msg', 'final_msg', 'level')
|
||||
|
||||
def __init__(self, progress, nbr, msg="", final_msg=""):
|
||||
# Allows to generate a subprogress context handler from another one.
|
||||
progress = getattr(progress, 'progress', progress)
|
||||
|
||||
self.progress = progress
|
||||
self.nbr = nbr
|
||||
self.msg = msg
|
||||
self.final_msg = final_msg
|
||||
|
||||
def __enter__(self):
|
||||
self.level = len(self.progress.steps)
|
||||
self.progress.enter_substeps(self.nbr, self.msg)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
assert len(self.progress.steps) > self.level
|
||||
while len(self.progress.steps) > self.level + 1:
|
||||
self.progress.leave_substeps()
|
||||
self.progress.leave_substeps(self.final_msg)
|
||||
|
||||
def enter_substeps(self, nbr, msg=""):
|
||||
self.progress.enter_substeps(nbr, msg)
|
||||
|
||||
def step(self, msg="", nbr=1):
|
||||
self.progress.step(msg, nbr)
|
||||
|
||||
def leave_substeps(self, msg=""):
|
||||
self.progress.leave_substeps(msg)
|
||||
48
scripts/modules/bpy_restrict_state.py
Normal file
48
scripts/modules/bpy_restrict_state.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
"""
|
||||
This module contains RestrictBlend context manager.
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
"RestrictBlend",
|
||||
)
|
||||
|
||||
import bpy as _bpy
|
||||
|
||||
|
||||
class _RestrictContext:
|
||||
__slots__ = ()
|
||||
_real_data = _bpy.data
|
||||
# safe, the pointer never changes
|
||||
_real_pref = _bpy.context.preferences
|
||||
|
||||
@property
|
||||
def window_manager(self):
|
||||
return self._real_data.window_managers[0]
|
||||
|
||||
@property
|
||||
def preferences(self):
|
||||
return self._real_pref
|
||||
|
||||
|
||||
class _RestrictData:
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
_context_restrict = _RestrictContext()
|
||||
_data_restrict = _RestrictData()
|
||||
|
||||
|
||||
class RestrictBlend:
|
||||
__slots__ = ("context", "data")
|
||||
|
||||
def __enter__(self):
|
||||
self.data = _bpy.data
|
||||
self.context = _bpy.context
|
||||
_bpy.data = _data_restrict
|
||||
_bpy.context = _context_restrict
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
_bpy.data = self.data
|
||||
_bpy.context = self.context
|
||||
1169
scripts/modules/bpy_types.py
Normal file
1169
scripts/modules/bpy_types.py
Normal file
File diff suppressed because it is too large
Load Diff
337
scripts/modules/console_python.py
Normal file
337
scripts/modules/console_python.py
Normal file
@@ -0,0 +1,337 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
import sys
|
||||
import bpy
|
||||
|
||||
language_id = "python"
|
||||
|
||||
# store our own __main__ module, not 100% needed
|
||||
# but python expects this in some places
|
||||
_BPY_MAIN_OWN = True
|
||||
|
||||
|
||||
def add_scrollback(text, text_type):
|
||||
for l in text.split("\n"):
|
||||
bpy.ops.console.scrollback_append(text=l.replace("\t", " "),
|
||||
type=text_type)
|
||||
|
||||
|
||||
def replace_help(namespace):
|
||||
def _help(*args):
|
||||
# because of how the console works. we need our own help() pager func.
|
||||
# replace the bold function because it adds crazy chars
|
||||
import pydoc
|
||||
pydoc.getpager = lambda: pydoc.plainpager
|
||||
pydoc.Helper.getline = lambda self, prompt: None
|
||||
pydoc.TextDoc.use_bold = lambda self, text: text
|
||||
|
||||
pydoc.help(*args)
|
||||
|
||||
namespace["help"] = _help
|
||||
|
||||
|
||||
def get_console(console_id):
|
||||
"""
|
||||
helper function for console operators
|
||||
currently each text data block gets its own
|
||||
console - code.InteractiveConsole()
|
||||
...which is stored in this function.
|
||||
|
||||
console_id can be any hashable type
|
||||
"""
|
||||
from code import InteractiveConsole
|
||||
|
||||
consoles = getattr(get_console, "consoles", None)
|
||||
hash_next = hash(bpy.context.window_manager)
|
||||
|
||||
if consoles is None:
|
||||
consoles = get_console.consoles = {}
|
||||
get_console.consoles_namespace_hash = hash_next
|
||||
else:
|
||||
# check if clearing the namespace is needed to avoid a memory leak.
|
||||
# the window manager is normally loaded with new blend files
|
||||
# so this is a reasonable way to deal with namespace clearing.
|
||||
# bpy.data hashing is reset by undo so can't be used.
|
||||
hash_prev = getattr(get_console, "consoles_namespace_hash", 0)
|
||||
|
||||
if hash_prev != hash_next:
|
||||
get_console.consoles_namespace_hash = hash_next
|
||||
consoles.clear()
|
||||
|
||||
console_data = consoles.get(console_id)
|
||||
|
||||
if console_data:
|
||||
console, stdout, stderr = console_data
|
||||
|
||||
# XXX, bug in python 3.1.2, 3.2 ? (worked in 3.1.1)
|
||||
# seems there is no way to clear StringIO objects for writing, have to
|
||||
# make new ones each time.
|
||||
import io
|
||||
stdout = io.StringIO()
|
||||
stderr = io.StringIO()
|
||||
else:
|
||||
if _BPY_MAIN_OWN:
|
||||
import types
|
||||
bpy_main_mod = types.ModuleType("__main__")
|
||||
namespace = bpy_main_mod.__dict__
|
||||
else:
|
||||
namespace = {}
|
||||
|
||||
namespace["__builtins__"] = sys.modules["builtins"]
|
||||
namespace["bpy"] = bpy
|
||||
|
||||
# weak! - but highly convenient
|
||||
namespace["C"] = bpy.context
|
||||
namespace["D"] = bpy.data
|
||||
|
||||
replace_help(namespace)
|
||||
|
||||
console = InteractiveConsole(locals=namespace,
|
||||
filename="<blender_console>")
|
||||
|
||||
console.push("from mathutils import *")
|
||||
console.push("from math import *")
|
||||
|
||||
if _BPY_MAIN_OWN:
|
||||
console._bpy_main_mod = bpy_main_mod
|
||||
|
||||
import io
|
||||
stdout = io.StringIO()
|
||||
stderr = io.StringIO()
|
||||
|
||||
consoles[console_id] = console, stdout, stderr
|
||||
|
||||
return console, stdout, stderr
|
||||
|
||||
|
||||
# Both prompts must be the same length
|
||||
PROMPT = '>>> '
|
||||
PROMPT_MULTI = '... '
|
||||
|
||||
|
||||
def execute(context, is_interactive):
|
||||
sc = context.space_data
|
||||
|
||||
try:
|
||||
line_object = sc.history[-1]
|
||||
except:
|
||||
return {'CANCELLED'}
|
||||
|
||||
console, stdout, stderr = get_console(hash(context.region))
|
||||
|
||||
if _BPY_MAIN_OWN:
|
||||
main_mod_back = sys.modules["__main__"]
|
||||
sys.modules["__main__"] = console._bpy_main_mod
|
||||
|
||||
# redirect output
|
||||
from contextlib import (
|
||||
redirect_stdout,
|
||||
redirect_stderr,
|
||||
)
|
||||
|
||||
# not included with Python
|
||||
class redirect_stdin(redirect_stdout.__base__):
|
||||
_stream = "stdin"
|
||||
|
||||
# don't allow the stdin to be used, can lock blender.
|
||||
with redirect_stdout(stdout), \
|
||||
redirect_stderr(stderr), \
|
||||
redirect_stdin(None):
|
||||
|
||||
# in case exception happens
|
||||
line = "" # in case of encoding error
|
||||
is_multiline = False
|
||||
|
||||
try:
|
||||
line = line_object.body
|
||||
|
||||
# run the console, "\n" executes a multi line statement
|
||||
line_exec = line if line.strip() else "\n"
|
||||
|
||||
is_multiline = console.push(line_exec)
|
||||
except:
|
||||
# unlikely, but this can happen with unicode errors for example.
|
||||
import traceback
|
||||
stderr.write(traceback.format_exc())
|
||||
|
||||
if _BPY_MAIN_OWN:
|
||||
sys.modules["__main__"] = main_mod_back
|
||||
|
||||
output = stdout.getvalue()
|
||||
output_err = stderr.getvalue()
|
||||
|
||||
# cleanup
|
||||
sys.last_traceback = None
|
||||
|
||||
# So we can reuse, clear all data
|
||||
stdout.truncate(0)
|
||||
stderr.truncate(0)
|
||||
|
||||
# special exception. its possible the command loaded a new user interface
|
||||
if hash(sc) != hash(context.space_data):
|
||||
return {'FINISHED'}
|
||||
|
||||
bpy.ops.console.scrollback_append(text=sc.prompt + line, type='INPUT')
|
||||
|
||||
if is_multiline:
|
||||
sc.prompt = PROMPT_MULTI
|
||||
if is_interactive:
|
||||
indent = line[:len(line) - len(line.lstrip())]
|
||||
if line.rstrip().endswith(":"):
|
||||
indent += " "
|
||||
else:
|
||||
indent = ""
|
||||
else:
|
||||
sc.prompt = PROMPT
|
||||
indent = ""
|
||||
|
||||
# insert a new blank line
|
||||
bpy.ops.console.history_append(text=indent, current_character=0,
|
||||
remove_duplicates=True)
|
||||
sc.history[-1].current_character = len(indent)
|
||||
|
||||
# Insert the output into the editor
|
||||
# not quite correct because the order might have changed,
|
||||
# but ok 99% of the time.
|
||||
if output:
|
||||
add_scrollback(output, 'OUTPUT')
|
||||
if output_err:
|
||||
add_scrollback(output_err, 'ERROR')
|
||||
|
||||
# execute any hooks
|
||||
for func, args in execute.hooks:
|
||||
func(*args)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
execute.hooks = []
|
||||
|
||||
|
||||
def autocomplete(context):
|
||||
from bl_console_utils.autocomplete import intellisense
|
||||
|
||||
sc = context.space_data
|
||||
|
||||
console = get_console(hash(context.region))[0]
|
||||
|
||||
if not console:
|
||||
return {'CANCELLED'}
|
||||
|
||||
# don't allow the stdin to be used, can lock blender.
|
||||
# note: unlikely stdin would be used for autocomplete. but its possible.
|
||||
stdin_backup = sys.stdin
|
||||
sys.stdin = None
|
||||
|
||||
scrollback = ""
|
||||
scrollback_error = ""
|
||||
|
||||
if _BPY_MAIN_OWN:
|
||||
main_mod_back = sys.modules["__main__"]
|
||||
sys.modules["__main__"] = console._bpy_main_mod
|
||||
|
||||
try:
|
||||
current_line = sc.history[-1]
|
||||
line = current_line.body
|
||||
|
||||
# This function isn't aware of the text editor or being an operator
|
||||
# just does the autocomplete then copy its results back
|
||||
result = intellisense.expand(
|
||||
line=line,
|
||||
cursor=current_line.current_character,
|
||||
namespace=console.locals,
|
||||
private=bpy.app.debug_python)
|
||||
|
||||
line_new = result[0]
|
||||
current_line.body, current_line.current_character, scrollback = result
|
||||
del result
|
||||
|
||||
# update selection. setting body should really do this!
|
||||
ofs = len(line_new) - len(line)
|
||||
sc.select_start += ofs
|
||||
sc.select_end += ofs
|
||||
except:
|
||||
# unlikely, but this can happen with unicode errors for example.
|
||||
# or if the api attribute access itself causes an error.
|
||||
import traceback
|
||||
scrollback_error = traceback.format_exc()
|
||||
|
||||
if _BPY_MAIN_OWN:
|
||||
sys.modules["__main__"] = main_mod_back
|
||||
|
||||
# Separate autocomplete output by command prompts
|
||||
if scrollback != '':
|
||||
bpy.ops.console.scrollback_append(text=sc.prompt + current_line.body,
|
||||
type='INPUT')
|
||||
|
||||
# Now we need to copy back the line from blender back into the
|
||||
# text editor. This will change when we don't use the text editor
|
||||
# anymore
|
||||
if scrollback:
|
||||
add_scrollback(scrollback, 'INFO')
|
||||
|
||||
if scrollback_error:
|
||||
add_scrollback(scrollback_error, 'ERROR')
|
||||
|
||||
# restore the stdin
|
||||
sys.stdin = stdin_backup
|
||||
|
||||
context.area.tag_redraw()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def copy_as_script(context):
|
||||
sc = context.space_data
|
||||
lines = [
|
||||
"import bpy",
|
||||
"from bpy import data as D",
|
||||
"from bpy import context as C",
|
||||
"from mathutils import *",
|
||||
"from math import *",
|
||||
"",
|
||||
]
|
||||
|
||||
for line in sc.scrollback:
|
||||
text = line.body
|
||||
type = line.type
|
||||
|
||||
if type == 'INFO': # ignore autocomp.
|
||||
continue
|
||||
if type == 'INPUT':
|
||||
if text.startswith(PROMPT):
|
||||
text = text[len(PROMPT):]
|
||||
elif text.startswith(PROMPT_MULTI):
|
||||
text = text[len(PROMPT_MULTI):]
|
||||
elif type == 'OUTPUT':
|
||||
text = "#~ " + text
|
||||
elif type == 'ERROR':
|
||||
text = "#! " + text
|
||||
|
||||
lines.append(text)
|
||||
|
||||
context.window_manager.clipboard = "\n".join(lines)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def banner(context):
|
||||
sc = context.space_data
|
||||
version_string = sys.version.strip().replace('\n', ' ')
|
||||
|
||||
message = (
|
||||
"PYTHON INTERACTIVE CONSOLE %s" % version_string,
|
||||
"",
|
||||
"Builtin Modules: "
|
||||
"bpy, bpy.data, bpy.ops, bpy.props, bpy.types, bpy.context, bpy.utils, bgl, gpu, blf, mathutils",
|
||||
|
||||
"Convenience Imports: from mathutils import *; from math import *",
|
||||
"Convenience Variables: C = bpy.context, D = bpy.data",
|
||||
"",
|
||||
)
|
||||
|
||||
for line in message:
|
||||
add_scrollback(line, 'OUTPUT')
|
||||
|
||||
sc.prompt = PROMPT
|
||||
|
||||
return {'FINISHED'}
|
||||
61
scripts/modules/console_shell.py
Normal file
61
scripts/modules/console_shell.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
import os
|
||||
import bpy
|
||||
|
||||
language_id = "shell"
|
||||
|
||||
|
||||
def add_scrollback(text, text_type):
|
||||
for l in text.split("\n"):
|
||||
bpy.ops.console.scrollback_append(text=l.replace("\t", " "),
|
||||
type=text_type)
|
||||
|
||||
|
||||
def shell_run(text):
|
||||
import subprocess
|
||||
val, output = subprocess.getstatusoutput(text)
|
||||
|
||||
if not val:
|
||||
style = 'OUTPUT'
|
||||
else:
|
||||
style = 'ERROR'
|
||||
|
||||
add_scrollback(output, style)
|
||||
|
||||
|
||||
PROMPT = "$ "
|
||||
|
||||
|
||||
def execute(context, _is_interactive):
|
||||
sc = context.space_data
|
||||
|
||||
try:
|
||||
line = sc.history[-1].body
|
||||
except:
|
||||
return {'CANCELLED'}
|
||||
|
||||
bpy.ops.console.scrollback_append(text=sc.prompt + line, type='INPUT')
|
||||
|
||||
shell_run(line)
|
||||
|
||||
# insert a new blank line
|
||||
bpy.ops.console.history_append(text="", current_character=0,
|
||||
remove_duplicates=True)
|
||||
|
||||
sc.prompt = os.getcwd() + PROMPT
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def autocomplete(_context):
|
||||
# sc = context.space_data
|
||||
# TODO
|
||||
return {'CANCELLED'}
|
||||
|
||||
|
||||
def banner(context):
|
||||
sc = context.space_data
|
||||
|
||||
shell_run("bash --version")
|
||||
sc.prompt = os.getcwd() + PROMPT
|
||||
|
||||
return {'FINISHED'}
|
||||
6
scripts/modules/gpu_extras/__init__.py
Normal file
6
scripts/modules/gpu_extras/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"batch",
|
||||
"presets",
|
||||
)
|
||||
76
scripts/modules/gpu_extras/batch.py
Normal file
76
scripts/modules/gpu_extras/batch.py
Normal file
@@ -0,0 +1,76 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"batch_for_shader",
|
||||
)
|
||||
|
||||
|
||||
def batch_for_shader(shader, type, content, *, indices=None):
|
||||
"""
|
||||
Return a batch already configured and compatible with the shader.
|
||||
|
||||
:arg shader: shader for which a compatible format will be computed.
|
||||
:type shader: :class:`gpu.types.GPUShader`
|
||||
:arg type: "'POINTS', 'LINES', 'TRIS' or 'LINES_ADJ'".
|
||||
:type type: str
|
||||
:arg content: Maps the name of the shader attribute with the data to fill the vertex buffer.
|
||||
:type content: dict
|
||||
:return: compatible batch
|
||||
:rtype: :class:`gpu.types.Batch`
|
||||
"""
|
||||
from gpu.types import (
|
||||
GPUBatch,
|
||||
GPUIndexBuf,
|
||||
GPUVertBuf,
|
||||
GPUVertFormat,
|
||||
)
|
||||
|
||||
def recommended_comp_type(attr_type):
|
||||
if attr_type in {'FLOAT', 'VEC2', 'VEC3', 'VEC4', 'MAT3', 'MAT4'}:
|
||||
return 'F32'
|
||||
if attr_type in {'UINT', 'UVEC2', 'UVEC3', 'UVEC4'}:
|
||||
return 'U32'
|
||||
# `attr_type` in {'INT', 'IVEC2', 'IVEC3', 'IVEC4', 'BOOL'}.
|
||||
return 'I32'
|
||||
|
||||
def recommended_attr_len(attr_name):
|
||||
attr_len = 1
|
||||
try:
|
||||
item = content[attr_name][0]
|
||||
while True:
|
||||
attr_len *= len(item)
|
||||
item = item[0]
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
return attr_len
|
||||
|
||||
def recommended_fetch_mode(comp_type):
|
||||
if comp_type == 'F32':
|
||||
return 'FLOAT'
|
||||
return 'INT'
|
||||
|
||||
for data in content.values():
|
||||
vbo_len = len(data)
|
||||
break
|
||||
else:
|
||||
raise ValueError("Empty 'content'")
|
||||
|
||||
vbo_format = GPUVertFormat()
|
||||
attrs_info = shader.attrs_info_get()
|
||||
for name, attr_type in attrs_info:
|
||||
comp_type = recommended_comp_type(attr_type)
|
||||
attr_len = recommended_attr_len(name)
|
||||
vbo_format.attr_add(id=name, comp_type=comp_type, len=attr_len, fetch_mode=recommended_fetch_mode(comp_type))
|
||||
|
||||
vbo = GPUVertBuf(vbo_format, vbo_len)
|
||||
|
||||
for id, data in content.items():
|
||||
if len(data) != vbo_len:
|
||||
raise ValueError("Length mismatch for 'content' values")
|
||||
vbo.attr_fill(id, data)
|
||||
|
||||
if indices is None:
|
||||
return GPUBatch(type=type, buf=vbo)
|
||||
else:
|
||||
ibo = GPUIndexBuf(type=type, seq=indices)
|
||||
return GPUBatch(type=type, buf=vbo, elem=ibo)
|
||||
91
scripts/modules/gpu_extras/presets.py
Normal file
91
scripts/modules/gpu_extras/presets.py
Normal file
@@ -0,0 +1,91 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
def draw_circle_2d(position, color, radius, *, segments=None):
|
||||
"""
|
||||
Draw a circle.
|
||||
|
||||
:arg position: Position where the circle will be drawn.
|
||||
:type position: 2D Vector
|
||||
:arg color: Color of the circle. To use transparency GL_BLEND has to be enabled.
|
||||
:type color: tuple containing RGBA values
|
||||
:arg radius: Radius of the circle.
|
||||
:type radius: float
|
||||
:arg segments: How many segments will be used to draw the circle.
|
||||
Higher values give better results but the drawing will take longer.
|
||||
If None or not specified, an automatic value will be calculated.
|
||||
:type segments: int or None
|
||||
"""
|
||||
from math import sin, cos, pi, ceil, acos
|
||||
import gpu
|
||||
from gpu.types import (
|
||||
GPUBatch,
|
||||
GPUVertBuf,
|
||||
GPUVertFormat,
|
||||
)
|
||||
|
||||
if segments is None:
|
||||
max_pixel_error = 0.25 # TODO: multiply 0.5 by display dpi
|
||||
segments = int(ceil(pi / acos(1.0 - max_pixel_error / radius)))
|
||||
segments = max(segments, 8)
|
||||
segments = min(segments, 1000)
|
||||
|
||||
if segments <= 0:
|
||||
raise ValueError("Amount of segments must be greater than 0.")
|
||||
|
||||
with gpu.matrix.push_pop():
|
||||
gpu.matrix.translate(position)
|
||||
gpu.matrix.scale_uniform(radius)
|
||||
mul = (1.0 / (segments - 1)) * (pi * 2)
|
||||
verts = [(sin(i * mul), cos(i * mul)) for i in range(segments)]
|
||||
fmt = GPUVertFormat()
|
||||
pos_id = fmt.attr_add(id="pos", comp_type='F32', len=2, fetch_mode='FLOAT')
|
||||
vbo = GPUVertBuf(len=len(verts), format=fmt)
|
||||
vbo.attr_fill(id=pos_id, data=verts)
|
||||
batch = GPUBatch(type='LINE_STRIP', buf=vbo)
|
||||
shader = gpu.shader.from_builtin('UNIFORM_COLOR')
|
||||
batch.program_set(shader)
|
||||
shader.uniform_float("color", color)
|
||||
batch.draw()
|
||||
|
||||
|
||||
def draw_texture_2d(texture, position, width, height):
|
||||
"""
|
||||
Draw a 2d texture.
|
||||
|
||||
:arg texture: GPUTexture to draw (e.g. gpu.texture.from_image(image) for :class:`bpy.types.Image`).
|
||||
:type texture: :class:`gpu.types.GPUTexture`
|
||||
:arg position: Position of the lower left corner.
|
||||
:type position: 2D Vector
|
||||
:arg width: Width of the image when drawn (not necessarily
|
||||
the original width of the texture).
|
||||
:type width: float
|
||||
:arg height: Height of the image when drawn.
|
||||
:type height: float
|
||||
"""
|
||||
import gpu
|
||||
from . batch import batch_for_shader
|
||||
|
||||
coords = ((0, 0), (1, 0), (1, 1), (0, 1))
|
||||
|
||||
shader = gpu.shader.from_builtin('IMAGE')
|
||||
batch = batch_for_shader(
|
||||
shader, 'TRI_FAN',
|
||||
{"pos": coords, "texCoord": coords},
|
||||
)
|
||||
|
||||
with gpu.matrix.push_pop():
|
||||
gpu.matrix.translate(position)
|
||||
gpu.matrix.scale((width, height))
|
||||
|
||||
shader = gpu.shader.from_builtin('IMAGE')
|
||||
|
||||
if isinstance(texture, int):
|
||||
# Call the legacy bgl to not break the existing API
|
||||
import bgl
|
||||
bgl.glActiveTexture(bgl.GL_TEXTURE0)
|
||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, texture)
|
||||
shader.uniform_int("image", 0)
|
||||
else:
|
||||
shader.uniform_sampler("image", texture)
|
||||
|
||||
batch.draw(shader)
|
||||
191
scripts/modules/graphviz_export.py
Normal file
191
scripts/modules/graphviz_export.py
Normal file
@@ -0,0 +1,191 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import bpy
|
||||
|
||||
header = '''
|
||||
digraph ancestors {
|
||||
graph [fontsize=30 labelloc="t" label="" splines=false overlap=true, rankdir=BT];
|
||||
ratio = "auto" ;
|
||||
'''
|
||||
|
||||
footer = '''
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
def compat_str(text, line_length=0):
|
||||
|
||||
if line_length:
|
||||
text_ls = []
|
||||
while len(text) > line_length:
|
||||
text_ls.append(text[:line_length])
|
||||
text = text[line_length:]
|
||||
|
||||
if text:
|
||||
text_ls.append(text)
|
||||
text = '\n '.join(text_ls)
|
||||
|
||||
#text = text.replace('.', '.\n')
|
||||
#text = text.replace(']', ']\n')
|
||||
text = text.replace("\n", "\\n")
|
||||
text = text.replace('"', '\\"')
|
||||
return text
|
||||
|
||||
|
||||
def graph_armature(obj, filepath, FAKE_PARENT=True, CONSTRAINTS=True, DRIVERS=True, XTRA_INFO=True):
|
||||
CONSTRAINTS = DRIVERS = True
|
||||
|
||||
fileobject = open(filepath, "w")
|
||||
fw = fileobject.write
|
||||
fw(header)
|
||||
fw('label = "%s::%s" ;' % (bpy.data.filepath.split("/")[-1].split("\\")[-1], obj.name))
|
||||
|
||||
arm = obj.data
|
||||
|
||||
bones = [bone.name for bone in arm.bones]
|
||||
bones.sort()
|
||||
print("")
|
||||
for bone in bones:
|
||||
b = arm.bones[bone]
|
||||
print(">>", bone, ["*>", "->"][b.use_connect], getattr(getattr(b, "parent", ""), "name", ""))
|
||||
label = [bone]
|
||||
bone = arm.bones[bone]
|
||||
|
||||
for key, value in obj.pose.bones[bone.name].items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
if type(value) == float:
|
||||
value = "%.3f" % value
|
||||
elif type(value) == str:
|
||||
value = compat_str(value)
|
||||
|
||||
label.append("%s = %s" % (key, value))
|
||||
|
||||
opts = [
|
||||
"shape=box",
|
||||
"regular=1",
|
||||
"style=filled",
|
||||
"fixedsize=false",
|
||||
'label="%s"' % compat_str('\n'.join(label)),
|
||||
]
|
||||
|
||||
if bone.name.startswith('ORG'):
|
||||
opts.append("fillcolor=yellow")
|
||||
else:
|
||||
opts.append("fillcolor=white")
|
||||
|
||||
fw('"%s" [%s];\n' % (bone.name, ','.join(opts)))
|
||||
|
||||
fw('\n\n# Hierarchy:\n')
|
||||
|
||||
# Root node.
|
||||
if FAKE_PARENT:
|
||||
fw('"Object::%s" [];\n' % obj.name)
|
||||
|
||||
for bone in bones:
|
||||
bone = arm.bones[bone]
|
||||
|
||||
parent = bone.parent
|
||||
if parent:
|
||||
parent_name = parent.name
|
||||
connected = bone.use_connect
|
||||
elif FAKE_PARENT:
|
||||
parent_name = 'Object::%s' % obj.name
|
||||
connected = False
|
||||
else:
|
||||
continue
|
||||
|
||||
opts = ["dir=forward", "weight=2", "arrowhead=normal"]
|
||||
if not connected:
|
||||
opts.append("style=dotted")
|
||||
|
||||
fw('"%s" -> "%s" [%s] ;\n' % (bone.name, parent_name, ','.join(opts)))
|
||||
del bone
|
||||
|
||||
# constraints
|
||||
if CONSTRAINTS:
|
||||
fw('\n\n# Constraints:\n')
|
||||
for bone in bones:
|
||||
pbone = obj.pose.bones[bone]
|
||||
# must be ordered
|
||||
for constraint in pbone.constraints:
|
||||
subtarget = getattr(constraint, "subtarget", "")
|
||||
if subtarget:
|
||||
# TODO, not internal links
|
||||
opts = [
|
||||
'dir=forward',
|
||||
"weight=1",
|
||||
"arrowhead=normal",
|
||||
"arrowtail=none",
|
||||
"constraint=false",
|
||||
'color="red"',
|
||||
'labelfontsize=4',
|
||||
]
|
||||
if XTRA_INFO:
|
||||
label = "%s\n%s" % (constraint.type, constraint.name)
|
||||
opts.append('label="%s"' % compat_str(label))
|
||||
fw('"%s" -> "%s" [%s] ;\n' % (pbone.name, subtarget, ','.join(opts)))
|
||||
|
||||
# Drivers
|
||||
if DRIVERS:
|
||||
fw('\n\n# Drivers:\n')
|
||||
|
||||
def rna_path_as_pbone(rna_path):
|
||||
if not rna_path.startswith("pose.bones["):
|
||||
return None
|
||||
|
||||
#rna_path_bone = rna_path[:rna_path.index("]") + 1]
|
||||
# return obj.path_resolve(rna_path_bone)
|
||||
bone_name = rna_path.split("[")[1].split("]")[0]
|
||||
return obj.pose.bones[bone_name[1:-1]]
|
||||
|
||||
animation_data = obj.animation_data
|
||||
if animation_data:
|
||||
|
||||
fcurve_drivers = [fcurve_driver for fcurve_driver in animation_data.drivers]
|
||||
fcurve_drivers.sort(key=lambda fcurve_driver: fcurve_driver.data_path)
|
||||
|
||||
for fcurve_driver in fcurve_drivers:
|
||||
rna_path = fcurve_driver.data_path
|
||||
pbone = rna_path_as_pbone(rna_path)
|
||||
|
||||
if pbone:
|
||||
for var in fcurve_driver.driver.variables:
|
||||
for target in var.targets:
|
||||
pbone_target = rna_path_as_pbone(target.data_path)
|
||||
rna_path_target = target.data_path
|
||||
if pbone_target:
|
||||
opts = [
|
||||
'dir=forward',
|
||||
"weight=1",
|
||||
"arrowhead=normal",
|
||||
"arrowtail=none",
|
||||
"constraint=false",
|
||||
'color="blue"',
|
||||
"labelfontsize=4",
|
||||
]
|
||||
display_source = rna_path.replace("pose.bones", "")
|
||||
display_target = rna_path_target.replace("pose.bones", "")
|
||||
if XTRA_INFO:
|
||||
label = "%s\\n%s" % (display_source, display_target)
|
||||
opts.append('label="%s"' % compat_str(label))
|
||||
fw('"%s" -> "%s" [%s] ;\n' % (pbone_target.name, pbone.name, ','.join(opts)))
|
||||
|
||||
fw(footer)
|
||||
fileobject.close()
|
||||
|
||||
'''
|
||||
print(".", end="")
|
||||
import sys
|
||||
sys.stdout.flush()
|
||||
'''
|
||||
print("\nSaved:", filepath)
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
tmppath = "/tmp/test.dot"
|
||||
graph_armature(bpy.context.object, tmppath, CONSTRAINTS=True, DRIVERS=True)
|
||||
os.system("dot -Tpng %s > %s; eog %s &" % (tmppath, tmppath + '.png', tmppath + '.png'))
|
||||
281
scripts/modules/keyingsets_utils.py
Normal file
281
scripts/modules/keyingsets_utils.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# This file defines a set of methods that are useful for various
|
||||
# Relative Keying Set (RKS) related operations, such as: callbacks
|
||||
# for polling, iterator callbacks, and also generate callbacks.
|
||||
# All of these can be used in conjunction with the others.
|
||||
|
||||
__all__ = (
|
||||
"path_add_property",
|
||||
"RKS_POLL_selected_objects",
|
||||
"RKS_POLL_selected_bones",
|
||||
"RKS_POLL_selected_items",
|
||||
"RKS_ITER_selected_objects",
|
||||
"RKS_ITER_selected_bones",
|
||||
"RKS_ITER_selected_item",
|
||||
"RKS_GEN_available",
|
||||
"RKS_GEN_location",
|
||||
"RKS_GEN_rotation",
|
||||
"RKS_GEN_scaling",
|
||||
"RKS_GEN_bendy_bones",
|
||||
)
|
||||
|
||||
import bpy
|
||||
|
||||
###########################
|
||||
# General Utilities
|
||||
|
||||
|
||||
# Append the specified property name on the the existing path
|
||||
def path_add_property(path, prop):
|
||||
if path:
|
||||
return path + "." + prop
|
||||
else:
|
||||
return prop
|
||||
|
||||
###########################
|
||||
# Poll Callbacks
|
||||
|
||||
|
||||
# selected objects (active object must be in object mode)
|
||||
def RKS_POLL_selected_objects(_ksi, context):
|
||||
ob = context.active_object
|
||||
if ob:
|
||||
return ob.mode == 'OBJECT'
|
||||
else:
|
||||
return bool(context.selected_objects)
|
||||
|
||||
|
||||
# selected bones
|
||||
def RKS_POLL_selected_bones(_ksi, context):
|
||||
# we must be in Pose Mode, and there must be some bones selected
|
||||
ob = context.active_object
|
||||
if ob and ob.mode == 'POSE':
|
||||
if context.active_pose_bone or context.selected_pose_bones:
|
||||
return True
|
||||
|
||||
# nothing selected
|
||||
return False
|
||||
|
||||
|
||||
# selected bones or objects
|
||||
def RKS_POLL_selected_items(ksi, context):
|
||||
return (RKS_POLL_selected_bones(ksi, context) or
|
||||
RKS_POLL_selected_objects(ksi, context))
|
||||
|
||||
###########################
|
||||
# Iterator Callbacks
|
||||
|
||||
|
||||
# All selected objects or pose bones, depending on which we've got.
|
||||
def RKS_ITER_selected_item(ksi, context, ks):
|
||||
ob = context.active_object
|
||||
if ob and ob.mode == 'POSE':
|
||||
for bone in context.selected_pose_bones:
|
||||
ksi.generate(context, ks, bone)
|
||||
else:
|
||||
for ob in context.selected_objects:
|
||||
ksi.generate(context, ks, ob)
|
||||
|
||||
|
||||
# All selected objects only.
|
||||
def RKS_ITER_selected_objects(ksi, context, ks):
|
||||
for ob in context.selected_objects:
|
||||
ksi.generate(context, ks, ob)
|
||||
|
||||
|
||||
# All selected bones only.
|
||||
def RKS_ITER_selected_bones(ksi, context, ks):
|
||||
for bone in context.selected_pose_bones:
|
||||
ksi.generate(context, ks, bone)
|
||||
|
||||
###########################
|
||||
# Generate Callbacks
|
||||
|
||||
|
||||
# 'Available' F-Curves
|
||||
def RKS_GEN_available(_ksi, _context, ks, data):
|
||||
# try to get the animation data associated with the closest
|
||||
# ID-block to the data (neither of which may exist/be easy to find)
|
||||
id_block = data.id_data
|
||||
adt = getattr(id_block, "animation_data", None)
|
||||
|
||||
# there must also be an active action...
|
||||
if adt is None or adt.action is None:
|
||||
return
|
||||
|
||||
# if we haven't got an ID-block as 'data', try to restrict
|
||||
# paths added to only those which branch off from here
|
||||
# i.e. for bones
|
||||
if id_block != data:
|
||||
basePath = data.path_from_id()
|
||||
else:
|
||||
basePath = None # this is not needed...
|
||||
|
||||
# for each F-Curve, include a path to key it
|
||||
# NOTE: we don't need to set the group settings here
|
||||
for fcu in adt.action.fcurves:
|
||||
if basePath:
|
||||
if basePath in fcu.data_path:
|
||||
ks.paths.add(id_block, fcu.data_path, index=fcu.array_index)
|
||||
else:
|
||||
ks.paths.add(id_block, fcu.data_path, index=fcu.array_index)
|
||||
|
||||
# ------
|
||||
|
||||
|
||||
# get ID block and based ID path for transform generators
|
||||
# private function
|
||||
def get_transform_generators_base_info(data):
|
||||
# ID-block for the data
|
||||
id_block = data.id_data
|
||||
|
||||
# get base path and grouping method/name
|
||||
if isinstance(data, bpy.types.ID):
|
||||
# no path in this case
|
||||
path = ""
|
||||
|
||||
# transform data on ID-blocks directly should get grouped under a
|
||||
# hardcoded label ("Object Transforms") so that they get grouped
|
||||
# consistently when keyframed directly
|
||||
grouping = "Object Transforms"
|
||||
else:
|
||||
# get the path to the ID-block
|
||||
path = data.path_from_id()
|
||||
|
||||
# try to use the name of the data element to group the F-Curve
|
||||
# else fallback on the KeyingSet name
|
||||
grouping = getattr(data, "name", None)
|
||||
|
||||
# return the ID-block and the path
|
||||
return id_block, path, grouping
|
||||
|
||||
|
||||
# Location
|
||||
def RKS_GEN_location(_ksi, _context, ks, data):
|
||||
# get id-block and path info
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
# add the property name to the base path
|
||||
path = path_add_property(base_path, "location")
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
|
||||
|
||||
# Rotation
|
||||
def RKS_GEN_rotation(_ksi, _context, ks, data):
|
||||
# get id-block and path info
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
# add the property name to the base path
|
||||
# rotation mode affects the property used
|
||||
if data.rotation_mode == 'QUATERNION':
|
||||
path = path_add_property(base_path, "rotation_quaternion")
|
||||
elif data.rotation_mode == 'AXIS_ANGLE':
|
||||
path = path_add_property(base_path, "rotation_axis_angle")
|
||||
else:
|
||||
path = path_add_property(base_path, "rotation_euler")
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
|
||||
|
||||
# Scaling
|
||||
def RKS_GEN_scaling(_ksi, _context, ks, data):
|
||||
# get id-block and path info
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
# add the property name to the base path
|
||||
path = path_add_property(base_path, "scale")
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
|
||||
|
||||
# Custom Properties
|
||||
def RKS_GEN_custom_props(_ksi, _context, ks, data):
|
||||
# get id-block and path info
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
# Only some RNA types can be animated.
|
||||
prop_type_compat = {bpy.types.BoolProperty,
|
||||
bpy.types.IntProperty,
|
||||
bpy.types.FloatProperty}
|
||||
|
||||
# When working with a pose, 'id_block' is the armature object (which should
|
||||
# get the animation data), whereas 'data' is the bone being keyed.
|
||||
for cprop_name in data.keys():
|
||||
# ignore special "_RNA_UI" used for UI editing
|
||||
if cprop_name == "_RNA_UI":
|
||||
continue
|
||||
|
||||
prop_path = '["%s"]' % bpy.utils.escape_identifier(cprop_name)
|
||||
|
||||
try:
|
||||
rna_property = data.path_resolve(prop_path, False)
|
||||
except ValueError:
|
||||
# Can technically happen, but there is no known case.
|
||||
continue
|
||||
if rna_property is None:
|
||||
# In this case the property cannot be converted to an
|
||||
# FCurve-compatible value, so we can't keyframe it anyways.
|
||||
continue
|
||||
if rna_property.rna_type not in prop_type_compat:
|
||||
continue
|
||||
|
||||
path = "%s%s" % (base_path, prop_path)
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
|
||||
# ------
|
||||
|
||||
|
||||
# Property identifiers for Bendy Bones
|
||||
bbone_property_ids = (
|
||||
"bbone_curveinx",
|
||||
"bbone_curveiny",
|
||||
"bbone_curveoutx",
|
||||
"bbone_curveouty",
|
||||
|
||||
"bbone_rollin",
|
||||
"bbone_rollout",
|
||||
|
||||
"bbone_scalein",
|
||||
"bbone_scaleout",
|
||||
|
||||
# NOTE: These are in the nested bone struct
|
||||
# Do it this way to force them to be included
|
||||
# in whatever actions are being keyed here
|
||||
"bone.bbone_in",
|
||||
"bone.bbone_out",
|
||||
)
|
||||
|
||||
|
||||
# Add Keying Set entries for bendy bones
|
||||
def RKS_GEN_bendy_bones(_ksi, _context, ks, data):
|
||||
# get id-block and path info
|
||||
# NOTE: This assumes that we're dealing with a bone here...
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
# for each of the bendy bone properties, add a Keying Set entry for it...
|
||||
for propname in bbone_property_ids:
|
||||
# add the property name to the base path
|
||||
path = path_add_property(base_path, propname)
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
164
scripts/modules/nodeitems_utils.py
Normal file
164
scripts/modules/nodeitems_utils.py
Normal file
@@ -0,0 +1,164 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
import bpy
|
||||
|
||||
|
||||
class NodeCategory:
|
||||
@classmethod
|
||||
def poll(cls, _context):
|
||||
return True
|
||||
|
||||
def __init__(self, identifier, name, *, description="", items=None):
|
||||
self.identifier = identifier
|
||||
self.name = name
|
||||
self.description = description
|
||||
|
||||
if items is None:
|
||||
self.items = lambda context: []
|
||||
elif callable(items):
|
||||
self.items = items
|
||||
else:
|
||||
def items_gen(context):
|
||||
for item in items:
|
||||
if item.poll is None or context is None or item.poll(context):
|
||||
yield item
|
||||
self.items = items_gen
|
||||
|
||||
|
||||
class NodeItem:
|
||||
def __init__(self, nodetype, *, label=None, settings=None, poll=None):
|
||||
|
||||
if settings is None:
|
||||
settings = {}
|
||||
|
||||
self.nodetype = nodetype
|
||||
self._label = label
|
||||
self.settings = settings
|
||||
self.poll = poll
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
if self._label:
|
||||
return self._label
|
||||
else:
|
||||
# if no custom label is defined, fall back to the node type UI name
|
||||
bl_rna = bpy.types.Node.bl_rna_get_subclass(self.nodetype)
|
||||
if bl_rna is not None:
|
||||
return bl_rna.name
|
||||
else:
|
||||
return "Unknown"
|
||||
|
||||
@property
|
||||
def translation_context(self):
|
||||
if self._label:
|
||||
return bpy.app.translations.contexts.default
|
||||
else:
|
||||
# if no custom label is defined, fall back to the node type UI name
|
||||
bl_rna = bpy.types.Node.bl_rna_get_subclass(self.nodetype)
|
||||
if bl_rna is not None:
|
||||
return bl_rna.translation_context
|
||||
else:
|
||||
return bpy.app.translations.contexts.default
|
||||
|
||||
# NOTE: is a staticmethod because called with an explicit self argument
|
||||
# NodeItemCustom sets this as a variable attribute in __init__
|
||||
@staticmethod
|
||||
def draw(self, layout, _context):
|
||||
props = layout.operator("node.add_node", text=self.label, text_ctxt=self.translation_context)
|
||||
props.type = self.nodetype
|
||||
props.use_transform = True
|
||||
|
||||
for setting in self.settings.items():
|
||||
ops = props.settings.add()
|
||||
ops.name = setting[0]
|
||||
ops.value = setting[1]
|
||||
|
||||
|
||||
class NodeItemCustom:
|
||||
def __init__(self, *, poll=None, draw=None):
|
||||
self.poll = poll
|
||||
self.draw = draw
|
||||
|
||||
|
||||
_node_categories = {}
|
||||
|
||||
|
||||
def register_node_categories(identifier, cat_list):
|
||||
if identifier in _node_categories:
|
||||
raise KeyError("Node categories list '%s' already registered" % identifier)
|
||||
return
|
||||
|
||||
# works as draw function for menus
|
||||
def draw_node_item(self, context):
|
||||
layout = self.layout
|
||||
col = layout.column(align=True)
|
||||
for item in self.category.items(context):
|
||||
item.draw(item, col, context)
|
||||
|
||||
menu_types = []
|
||||
for cat in cat_list:
|
||||
menu_type = type("NODE_MT_category_" + cat.identifier, (bpy.types.Menu,), {
|
||||
"bl_space_type": 'NODE_EDITOR',
|
||||
"bl_label": cat.name,
|
||||
"category": cat,
|
||||
"poll": cat.poll,
|
||||
"draw": draw_node_item,
|
||||
})
|
||||
|
||||
menu_types.append(menu_type)
|
||||
|
||||
bpy.utils.register_class(menu_type)
|
||||
|
||||
def draw_add_menu(self, context):
|
||||
layout = self.layout
|
||||
|
||||
for cat in cat_list:
|
||||
if cat.poll(context):
|
||||
layout.menu("NODE_MT_category_%s" % cat.identifier)
|
||||
|
||||
# stores: (categories list, menu draw function, submenu types)
|
||||
_node_categories[identifier] = (cat_list, draw_add_menu, menu_types)
|
||||
|
||||
|
||||
def node_categories_iter(context):
|
||||
for cat_type in _node_categories.values():
|
||||
for cat in cat_type[0]:
|
||||
if cat.poll and ((context is None) or cat.poll(context)):
|
||||
yield cat
|
||||
|
||||
|
||||
def has_node_categories(context):
|
||||
for cat_type in _node_categories.values():
|
||||
for cat in cat_type[0]:
|
||||
if cat.poll and ((context is None) or cat.poll(context)):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def node_items_iter(context):
|
||||
for cat in node_categories_iter(context):
|
||||
for item in cat.items(context):
|
||||
yield item
|
||||
|
||||
|
||||
def unregister_node_cat_types(cats):
|
||||
for mt in cats[2]:
|
||||
bpy.utils.unregister_class(mt)
|
||||
|
||||
|
||||
def unregister_node_categories(identifier=None):
|
||||
# unregister existing UI classes
|
||||
if identifier:
|
||||
cat_types = _node_categories.get(identifier, None)
|
||||
if cat_types:
|
||||
unregister_node_cat_types(cat_types)
|
||||
del _node_categories[identifier]
|
||||
|
||||
else:
|
||||
for cat_types in _node_categories.values():
|
||||
unregister_node_cat_types(cat_types)
|
||||
_node_categories.clear()
|
||||
|
||||
|
||||
def draw_node_categories_menu(self, context):
|
||||
for cats in _node_categories.values():
|
||||
cats[1](self, context)
|
||||
902
scripts/modules/rna_info.py
Normal file
902
scripts/modules/rna_info.py
Normal file
@@ -0,0 +1,902 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# classes for extracting info from blenders internal classes
|
||||
|
||||
import bpy
|
||||
|
||||
# use to strip python paths
|
||||
script_paths = bpy.utils.script_paths()
|
||||
|
||||
_FAKE_STRUCT_SUBCLASS = True
|
||||
|
||||
|
||||
def _get_direct_attr(rna_type, attr):
|
||||
props = getattr(rna_type, attr)
|
||||
base = rna_type.base
|
||||
|
||||
if not base:
|
||||
return [prop for prop in props]
|
||||
else:
|
||||
props_base = getattr(base, attr).values()
|
||||
return [prop for prop in props if prop not in props_base]
|
||||
|
||||
|
||||
def get_direct_properties(rna_type):
|
||||
return _get_direct_attr(rna_type, "properties")
|
||||
|
||||
|
||||
def get_direct_functions(rna_type):
|
||||
return _get_direct_attr(rna_type, "functions")
|
||||
|
||||
|
||||
def rna_id_ignore(rna_id):
|
||||
if rna_id == "rna_type":
|
||||
return True
|
||||
|
||||
if "_OT_" in rna_id:
|
||||
return True
|
||||
if "_MT_" in rna_id:
|
||||
return True
|
||||
if "_PT_" in rna_id:
|
||||
return True
|
||||
if "_HT_" in rna_id:
|
||||
return True
|
||||
if "_KSI_" in rna_id:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def range_str(val):
|
||||
if val < -10000000:
|
||||
return "-inf"
|
||||
elif val > 10000000:
|
||||
return "inf"
|
||||
elif type(val) == float:
|
||||
return '%g' % val
|
||||
else:
|
||||
return str(val)
|
||||
|
||||
|
||||
def float_as_string(f):
|
||||
val_str = "%g" % f
|
||||
# Ensure a `.0` suffix for whole numbers, excluding scientific notation such as `1e-05` or `1e+5`.
|
||||
if '.' not in val_str and 'e' not in val_str:
|
||||
val_str += '.0'
|
||||
return val_str
|
||||
|
||||
|
||||
def get_py_class_from_rna(rna_type):
|
||||
""" Gets the Python type for a class which isn't necessarily added to ``bpy.types``.
|
||||
"""
|
||||
identifier = rna_type.identifier
|
||||
py_class = getattr(bpy.types, identifier, None)
|
||||
if py_class is not None:
|
||||
return py_class
|
||||
|
||||
def subclasses_recurse(cls):
|
||||
for c in cls.__subclasses__():
|
||||
# is_registered
|
||||
if "bl_rna" in cls.__dict__:
|
||||
yield c
|
||||
yield from subclasses_recurse(c)
|
||||
|
||||
while py_class is None:
|
||||
base = rna_type.base
|
||||
if base is None:
|
||||
raise Exception("can't find type")
|
||||
py_class_base = getattr(bpy.types, base.identifier, None)
|
||||
if py_class_base is not None:
|
||||
for cls in subclasses_recurse(py_class_base):
|
||||
if cls.bl_rna.identifier == identifier:
|
||||
return cls
|
||||
|
||||
|
||||
class InfoStructRNA:
|
||||
__slots__ = (
|
||||
"bl_rna",
|
||||
"identifier",
|
||||
"name",
|
||||
"description",
|
||||
"base",
|
||||
"nested",
|
||||
"full_path",
|
||||
"functions",
|
||||
"children",
|
||||
"references",
|
||||
"properties",
|
||||
"py_class",
|
||||
"module_name",
|
||||
)
|
||||
|
||||
global_lookup = {}
|
||||
|
||||
def __init__(self, rna_type):
|
||||
self.bl_rna = rna_type
|
||||
|
||||
self.identifier = rna_type.identifier
|
||||
self.name = rna_type.name
|
||||
self.description = rna_type.description.strip()
|
||||
|
||||
# set later
|
||||
self.base = None
|
||||
self.nested = None
|
||||
self.full_path = ""
|
||||
|
||||
self.functions = []
|
||||
self.children = []
|
||||
self.references = []
|
||||
self.properties = []
|
||||
|
||||
self.py_class = get_py_class_from_rna(self.bl_rna)
|
||||
self.module_name = (
|
||||
self.py_class.__module__
|
||||
if (self.py_class and not hasattr(bpy.types, self.identifier)) else
|
||||
"bpy.types"
|
||||
)
|
||||
if self.module_name == "bpy_types":
|
||||
self.module_name = "bpy.types"
|
||||
|
||||
def build(self):
|
||||
rna_type = self.bl_rna
|
||||
parent_id = self.identifier
|
||||
self.properties[:] = [GetInfoPropertyRNA(rna_prop, parent_id)
|
||||
for rna_prop in get_direct_properties(rna_type) if rna_prop.identifier != "rna_type"]
|
||||
self.functions[:] = [GetInfoFunctionRNA(rna_prop, parent_id)
|
||||
for rna_prop in get_direct_functions(rna_type)]
|
||||
|
||||
def get_bases(self):
|
||||
bases = []
|
||||
item = self
|
||||
|
||||
while item:
|
||||
item = item.base
|
||||
if item:
|
||||
bases.append(item)
|
||||
|
||||
return bases
|
||||
|
||||
def get_nested_properties(self, ls=None):
|
||||
if not ls:
|
||||
ls = self.properties[:]
|
||||
|
||||
if self.nested:
|
||||
self.nested.get_nested_properties(ls)
|
||||
|
||||
return ls
|
||||
|
||||
def _get_py_visible_attrs(self):
|
||||
attrs = []
|
||||
py_class = self.py_class
|
||||
|
||||
for attr_str in dir(py_class):
|
||||
if attr_str.startswith("_"):
|
||||
continue
|
||||
attrs.append((attr_str, getattr(py_class, attr_str)))
|
||||
return attrs
|
||||
|
||||
def get_py_properties(self):
|
||||
properties = []
|
||||
for identifier, attr in self._get_py_visible_attrs():
|
||||
if type(attr) is property:
|
||||
properties.append((identifier, attr))
|
||||
return properties
|
||||
|
||||
def get_py_functions(self):
|
||||
import types
|
||||
functions = []
|
||||
for identifier, attr in self._get_py_visible_attrs():
|
||||
# methods may be python wrappers to C functions
|
||||
attr_func = getattr(attr, "__func__", attr)
|
||||
if type(attr_func) in {types.FunctionType, types.MethodType}:
|
||||
functions.append((identifier, attr))
|
||||
return functions
|
||||
|
||||
def get_py_c_functions(self):
|
||||
import types
|
||||
functions = []
|
||||
for identifier, attr in self._get_py_visible_attrs():
|
||||
# methods may be python wrappers to C functions
|
||||
attr_func = getattr(attr, "__func__", attr)
|
||||
if (
|
||||
(type(attr_func) in {types.BuiltinMethodType, types.BuiltinFunctionType}) or
|
||||
# Without the `objclass` check, many inherited methods are included.
|
||||
(type(attr_func) == types.MethodDescriptorType and attr_func.__objclass__ == self.py_class)
|
||||
):
|
||||
functions.append((identifier, attr))
|
||||
return functions
|
||||
|
||||
def get_py_c_properties_getset(self):
|
||||
import types
|
||||
properties_getset = []
|
||||
for identifier, descr in self.py_class.__dict__.items():
|
||||
if type(descr) == types.GetSetDescriptorType:
|
||||
properties_getset.append((identifier, descr))
|
||||
return properties_getset
|
||||
|
||||
def __str__(self):
|
||||
|
||||
txt = ""
|
||||
txt += self.identifier
|
||||
if self.base:
|
||||
txt += "(%s)" % self.base.identifier
|
||||
txt += ": " + self.description + "\n"
|
||||
|
||||
for prop in self.properties:
|
||||
txt += prop.__repr__() + "\n"
|
||||
|
||||
for func in self.functions:
|
||||
txt += func.__repr__() + "\n"
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class InfoPropertyRNA:
|
||||
__slots__ = (
|
||||
"bl_prop",
|
||||
"srna",
|
||||
"identifier",
|
||||
"name",
|
||||
"description",
|
||||
"default_str",
|
||||
"default",
|
||||
"enum_items",
|
||||
"enum_pointer",
|
||||
"min",
|
||||
"max",
|
||||
"array_length",
|
||||
"array_dimensions",
|
||||
"collection_type",
|
||||
"type",
|
||||
"fixed_type",
|
||||
"subtype",
|
||||
"is_argument_optional",
|
||||
"is_enum_flag",
|
||||
"is_required",
|
||||
"is_readonly",
|
||||
"is_never_none",
|
||||
)
|
||||
global_lookup = {}
|
||||
|
||||
def __init__(self, rna_prop):
|
||||
self.bl_prop = rna_prop
|
||||
self.identifier = rna_prop.identifier
|
||||
self.name = rna_prop.name
|
||||
self.description = rna_prop.description.strip()
|
||||
self.default_str = "<UNKNOWN>"
|
||||
|
||||
def build(self):
|
||||
rna_prop = self.bl_prop
|
||||
|
||||
self.enum_items = []
|
||||
self.min = getattr(rna_prop, "hard_min", -1)
|
||||
self.max = getattr(rna_prop, "hard_max", -1)
|
||||
self.array_length = getattr(rna_prop, "array_length", 0)
|
||||
self.array_dimensions = getattr(rna_prop, "array_dimensions", ())[:]
|
||||
self.collection_type = GetInfoStructRNA(rna_prop.srna)
|
||||
self.subtype = getattr(rna_prop, "subtype", "")
|
||||
self.is_required = rna_prop.is_required
|
||||
self.is_readonly = rna_prop.is_readonly
|
||||
self.is_never_none = rna_prop.is_never_none
|
||||
self.is_argument_optional = rna_prop.is_argument_optional
|
||||
|
||||
self.type = rna_prop.type.lower()
|
||||
fixed_type = getattr(rna_prop, "fixed_type", "")
|
||||
if fixed_type:
|
||||
self.fixed_type = GetInfoStructRNA(fixed_type) # valid for pointer/collections
|
||||
else:
|
||||
self.fixed_type = None
|
||||
|
||||
self.enum_pointer = 0
|
||||
if self.type == "enum":
|
||||
# WARNING: don't convert to a tuple as this causes dynamically allocated enums to access freed memory
|
||||
# since freeing the iterator may free the memory used to store the internal `EnumPropertyItem` array.
|
||||
# To support this properly RNA would have to support owning the dynamically allocated memory.
|
||||
items = rna_prop.enum_items
|
||||
items_static = tuple(rna_prop.enum_items_static)
|
||||
self.enum_items[:] = [(item.identifier, item.name, item.description) for item in items]
|
||||
self.is_enum_flag = rna_prop.is_enum_flag
|
||||
# Prioritize static items as this is never going to be allocated data and is therefor
|
||||
# will be a stable match to compare against.
|
||||
item = (items_static or items)
|
||||
if item:
|
||||
self.enum_pointer = item[0].as_pointer()
|
||||
del items, items_static, item
|
||||
else:
|
||||
self.is_enum_flag = False
|
||||
|
||||
self.default_str = "" # fallback
|
||||
|
||||
if self.array_length:
|
||||
self.default = tuple(getattr(rna_prop, "default_array", ()))
|
||||
if self.array_dimensions[1] != 0: # Multi-dimensional array, convert default flat one accordingly.
|
||||
self.default_str = tuple(float_as_string(v) if self.type == "float" else str(v) for v in self.default)
|
||||
for dim in self.array_dimensions[::-1]:
|
||||
if dim != 0:
|
||||
self.default = tuple(zip(*((iter(self.default),) * dim)))
|
||||
self.default_str = tuple(
|
||||
"(%s)" % ", ".join(s for s in b) for b in zip(*((iter(self.default_str),) * dim))
|
||||
)
|
||||
self.default_str = self.default_str[0]
|
||||
elif self.type == "enum" and self.is_enum_flag:
|
||||
self.default = getattr(rna_prop, "default_flag", set())
|
||||
else:
|
||||
self.default = getattr(rna_prop, "default", None)
|
||||
|
||||
if self.type == "pointer":
|
||||
# pointer has no default, just set as None
|
||||
self.default = None
|
||||
self.default_str = "None"
|
||||
elif self.type == "string":
|
||||
self.default_str = "\"%s\"" % self.default
|
||||
elif self.type == "enum":
|
||||
if self.is_enum_flag:
|
||||
# self.default_str = "%r" % self.default # repr or set()
|
||||
self.default_str = "{%s}" % repr(list(sorted(self.default)))[1:-1]
|
||||
else:
|
||||
self.default_str = "'%s'" % self.default
|
||||
elif self.array_length:
|
||||
if self.array_dimensions[1] == 0: # single dimension array, we already took care of multi-dimensions ones.
|
||||
# special case for floats
|
||||
if self.type == "float" and len(self.default) > 0:
|
||||
self.default_str = "(%s)" % ", ".join(float_as_string(f) for f in self.default)
|
||||
else:
|
||||
self.default_str = str(self.default)
|
||||
else:
|
||||
if self.type == "float":
|
||||
self.default_str = float_as_string(self.default)
|
||||
else:
|
||||
self.default_str = str(self.default)
|
||||
|
||||
self.srna = GetInfoStructRNA(rna_prop.srna) # valid for pointer/collections
|
||||
|
||||
def get_arg_default(self, force=True):
|
||||
default = self.default_str
|
||||
if default and (force or self.is_required is False):
|
||||
return "%s=%s" % (self.identifier, default)
|
||||
return self.identifier
|
||||
|
||||
def get_type_description(
|
||||
self, *,
|
||||
as_ret=False,
|
||||
as_arg=False,
|
||||
class_fmt="%s",
|
||||
mathutils_fmt="%s",
|
||||
collection_id="Collection",
|
||||
enum_descr_override=None,
|
||||
):
|
||||
"""
|
||||
:arg enum_descr_override: Optionally override items for enum.
|
||||
Otherwise expand the literal items.
|
||||
:type enum_descr_override: string or None when unset.
|
||||
"""
|
||||
type_str = ""
|
||||
if self.fixed_type is None:
|
||||
type_str += self.type
|
||||
if self.array_length:
|
||||
if self.array_dimensions[1] != 0:
|
||||
dimension_str = " of %s items" % (
|
||||
" * ".join(str(d) for d in self.array_dimensions if d != 0)
|
||||
)
|
||||
type_str += " multi-dimensional array" + dimension_str
|
||||
else:
|
||||
dimension_str = " of %d items" % (self.array_length)
|
||||
type_str += " array" + dimension_str
|
||||
|
||||
# Describe mathutils types; logic mirrors pyrna_math_object_from_array
|
||||
if self.type == "float":
|
||||
if self.subtype == "MATRIX":
|
||||
if self.array_length in {9, 16}:
|
||||
type_str = (mathutils_fmt % "Matrix") + dimension_str
|
||||
elif self.subtype in {"COLOR", "COLOR_GAMMA"}:
|
||||
if self.array_length == 3:
|
||||
type_str = (mathutils_fmt % "Color") + dimension_str
|
||||
elif self.subtype in {"EULER", "QUATERNION"}:
|
||||
if self.array_length == 3:
|
||||
type_str = (mathutils_fmt % "Euler") + " rotation" + dimension_str
|
||||
elif self.array_length == 4:
|
||||
type_str = (mathutils_fmt % "Quaternion") + " rotation" + dimension_str
|
||||
elif self.subtype in {"COORDINATES", "TRANSLATION", "DIRECTION", "VELOCITY",
|
||||
"ACCELERATION", "XYZ", "XYZ_LENGTH"}:
|
||||
if 2 <= self.array_length <= 4:
|
||||
type_str = (mathutils_fmt % "Vector") + dimension_str
|
||||
|
||||
if self.type in {"float", "int"}:
|
||||
type_str += " in [%s, %s]" % (range_str(self.min), range_str(self.max))
|
||||
elif self.type == "enum":
|
||||
enum_descr = enum_descr_override
|
||||
if not enum_descr:
|
||||
if self.is_enum_flag:
|
||||
enum_descr = "{%s}" % ", ".join(("'%s'" % s[0]) for s in self.enum_items)
|
||||
else:
|
||||
enum_descr = "[%s]" % ", ".join(("'%s'" % s[0]) for s in self.enum_items)
|
||||
if self.is_enum_flag:
|
||||
type_str += " set in %s" % enum_descr
|
||||
else:
|
||||
type_str += " in %s" % enum_descr
|
||||
del enum_descr
|
||||
|
||||
if not (as_arg or as_ret):
|
||||
# write default property, ignore function args for this
|
||||
if self.type != "pointer":
|
||||
if self.default_str:
|
||||
type_str += ", default %s" % self.default_str
|
||||
|
||||
else:
|
||||
if self.type == "collection":
|
||||
if self.collection_type:
|
||||
collection_str = (class_fmt % self.collection_type.identifier) + (" %s of " % collection_id)
|
||||
else:
|
||||
collection_str = "%s of " % collection_id
|
||||
else:
|
||||
collection_str = ""
|
||||
|
||||
type_str += collection_str + (class_fmt % self.fixed_type.identifier)
|
||||
|
||||
# setup qualifiers for this value.
|
||||
type_info = []
|
||||
if as_ret:
|
||||
pass
|
||||
elif as_arg:
|
||||
if not self.is_required:
|
||||
type_info.append("optional")
|
||||
if self.is_argument_optional:
|
||||
type_info.append("optional argument")
|
||||
else: # readonly is only useful for self's, not args
|
||||
if self.is_readonly:
|
||||
type_info.append("readonly")
|
||||
|
||||
if self.is_never_none:
|
||||
type_info.append("never None")
|
||||
|
||||
if type_info:
|
||||
type_str += (", (%s)" % ", ".join(type_info))
|
||||
|
||||
return type_str
|
||||
|
||||
def __str__(self):
|
||||
txt = ""
|
||||
txt += " * " + self.identifier + ": " + self.description
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class InfoFunctionRNA:
|
||||
__slots__ = (
|
||||
"bl_func",
|
||||
"identifier",
|
||||
"description",
|
||||
"args",
|
||||
"return_values",
|
||||
"is_classmethod",
|
||||
)
|
||||
global_lookup = {}
|
||||
|
||||
def __init__(self, rna_func):
|
||||
self.bl_func = rna_func
|
||||
self.identifier = rna_func.identifier
|
||||
# self.name = rna_func.name # functions have no name!
|
||||
self.description = rna_func.description.strip()
|
||||
self.is_classmethod = not rna_func.use_self
|
||||
|
||||
self.args = []
|
||||
self.return_values = ()
|
||||
|
||||
def build(self):
|
||||
rna_func = self.bl_func
|
||||
parent_id = rna_func
|
||||
self.return_values = []
|
||||
|
||||
for rna_prop in rna_func.parameters.values():
|
||||
prop = GetInfoPropertyRNA(rna_prop, parent_id)
|
||||
if rna_prop.is_output:
|
||||
self.return_values.append(prop)
|
||||
else:
|
||||
self.args.append(prop)
|
||||
|
||||
self.return_values = tuple(self.return_values)
|
||||
|
||||
def __str__(self):
|
||||
txt = ''
|
||||
txt += ' * ' + self.identifier + '('
|
||||
|
||||
for arg in self.args:
|
||||
txt += arg.identifier + ', '
|
||||
txt += '): ' + self.description
|
||||
return txt
|
||||
|
||||
|
||||
class InfoOperatorRNA:
|
||||
__slots__ = (
|
||||
"bl_op",
|
||||
"identifier",
|
||||
"name",
|
||||
"module_name",
|
||||
"func_name",
|
||||
"description",
|
||||
"args",
|
||||
)
|
||||
global_lookup = {}
|
||||
|
||||
def __init__(self, rna_op):
|
||||
self.bl_op = rna_op
|
||||
self.identifier = rna_op.identifier
|
||||
|
||||
mod, name = self.identifier.split("_OT_", 1)
|
||||
self.module_name = mod.lower()
|
||||
self.func_name = name
|
||||
|
||||
# self.name = rna_func.name # functions have no name!
|
||||
self.description = rna_op.description.strip()
|
||||
|
||||
self.args = []
|
||||
|
||||
def build(self):
|
||||
rna_op = self.bl_op
|
||||
parent_id = self.identifier
|
||||
for rna_id, rna_prop in rna_op.properties.items():
|
||||
if rna_id == "rna_type":
|
||||
continue
|
||||
|
||||
prop = GetInfoPropertyRNA(rna_prop, parent_id)
|
||||
self.args.append(prop)
|
||||
|
||||
def get_location(self):
|
||||
try:
|
||||
op_class = getattr(bpy.types, self.identifier)
|
||||
except AttributeError:
|
||||
# defined in C.
|
||||
return None, None
|
||||
op_func = getattr(op_class, "execute", None)
|
||||
if op_func is None:
|
||||
op_func = getattr(op_class, "invoke", None)
|
||||
if op_func is None:
|
||||
op_func = getattr(op_class, "poll", None)
|
||||
|
||||
if op_func:
|
||||
op_code = op_func.__code__
|
||||
source_path = op_code.co_filename
|
||||
|
||||
# clear the prefix
|
||||
for p in script_paths:
|
||||
source_path = source_path.split(p)[-1]
|
||||
|
||||
if source_path[0] in "/\\":
|
||||
source_path = source_path[1:]
|
||||
|
||||
return source_path, op_code.co_firstlineno
|
||||
else:
|
||||
return None, None
|
||||
|
||||
|
||||
def _GetInfoRNA(bl_rna, cls, parent_id=""):
|
||||
|
||||
if bl_rna is None:
|
||||
return None
|
||||
|
||||
key = parent_id, bl_rna.identifier
|
||||
try:
|
||||
return cls.global_lookup[key]
|
||||
except KeyError:
|
||||
instance = cls.global_lookup[key] = cls(bl_rna)
|
||||
return instance
|
||||
|
||||
|
||||
def GetInfoStructRNA(bl_rna):
|
||||
return _GetInfoRNA(bl_rna, InfoStructRNA)
|
||||
|
||||
|
||||
def GetInfoPropertyRNA(bl_rna, parent_id):
|
||||
return _GetInfoRNA(bl_rna, InfoPropertyRNA, parent_id)
|
||||
|
||||
|
||||
def GetInfoFunctionRNA(bl_rna, parent_id):
|
||||
return _GetInfoRNA(bl_rna, InfoFunctionRNA, parent_id)
|
||||
|
||||
|
||||
def GetInfoOperatorRNA(bl_rna):
|
||||
return _GetInfoRNA(bl_rna, InfoOperatorRNA)
|
||||
|
||||
|
||||
def BuildRNAInfo():
|
||||
|
||||
# needed on successive calls to prevent stale data access
|
||||
for cls in (InfoStructRNA, InfoFunctionRNA, InfoOperatorRNA, InfoPropertyRNA):
|
||||
cls.global_lookup.clear()
|
||||
del cls
|
||||
|
||||
# Use for faster lookups
|
||||
# use rna_struct.identifier as the key for each dict
|
||||
rna_struct_dict = {} # store identifier:rna lookups
|
||||
rna_full_path_dict = {} # store the result of full_rna_struct_path(rna_struct)
|
||||
rna_children_dict = {} # store all rna_structs nested from here
|
||||
rna_references_dict = {} # store a list of rna path strings that reference this type
|
||||
# rna_functions_dict = {} # store all functions directly in this type (not inherited)
|
||||
|
||||
def full_rna_struct_path(rna_struct):
|
||||
"""
|
||||
Needed when referencing one struct from another
|
||||
"""
|
||||
nested = rna_struct.nested
|
||||
if nested:
|
||||
return "%s.%s" % (full_rna_struct_path(nested), rna_struct.identifier)
|
||||
else:
|
||||
return rna_struct.identifier
|
||||
|
||||
# def write_func(rna_func, ident):
|
||||
def base_id(rna_struct):
|
||||
try:
|
||||
return rna_struct.base.identifier
|
||||
except:
|
||||
return "" # invalid id
|
||||
|
||||
#structs = [(base_id(rna_struct), rna_struct.identifier, rna_struct) for rna_struct in bpy.doc.structs.values()]
|
||||
'''
|
||||
structs = []
|
||||
for rna_struct in bpy.doc.structs.values():
|
||||
structs.append( (base_id(rna_struct), rna_struct.identifier, rna_struct) )
|
||||
'''
|
||||
structs = []
|
||||
|
||||
def _bpy_types_iterator():
|
||||
# Don't report when these types are ignored.
|
||||
suppress_warning = {
|
||||
"bpy_func",
|
||||
"bpy_prop",
|
||||
"bpy_prop_array",
|
||||
"bpy_prop_collection",
|
||||
"bpy_struct",
|
||||
"bpy_struct_meta_idprop",
|
||||
}
|
||||
|
||||
names_unique = set()
|
||||
rna_type_list = []
|
||||
for rna_type_name in dir(bpy.types):
|
||||
names_unique.add(rna_type_name)
|
||||
rna_type = getattr(bpy.types, rna_type_name)
|
||||
rna_struct = getattr(rna_type, "bl_rna", None)
|
||||
if rna_struct is not None:
|
||||
rna_type_list.append(rna_type)
|
||||
yield (rna_type_name, rna_struct)
|
||||
elif rna_type_name.startswith("_"):
|
||||
# Ignore "__dir__", "__getattr__" .. etc.
|
||||
pass
|
||||
elif rna_type_name in suppress_warning:
|
||||
pass
|
||||
else:
|
||||
print("rna_info.BuildRNAInfo(..): ignoring type", repr(rna_type_name))
|
||||
|
||||
# Now, there are some sub-classes in add-ons we also want to include.
|
||||
# Cycles for e.g. these are referenced from the Scene, but not part of
|
||||
# bpy.types module.
|
||||
# Include all sub-classes we didn't already get from 'bpy.types'.
|
||||
i = 0
|
||||
while i < len(rna_type_list):
|
||||
rna_type = rna_type_list[i]
|
||||
for rna_sub_type in rna_type.__subclasses__():
|
||||
rna_sub_struct = getattr(rna_sub_type, "bl_rna", None)
|
||||
if rna_sub_struct is not None:
|
||||
rna_sub_type_name = rna_sub_struct.identifier
|
||||
if rna_sub_type_name not in names_unique:
|
||||
names_unique.add(rna_sub_type_name)
|
||||
rna_type_list.append(rna_sub_type)
|
||||
# The bl_idname may not match the class name in the file.
|
||||
# Always use the 'bl_idname' because using the Python
|
||||
# class name causes confusion - having two names for the same thing.
|
||||
# Since having two names for the same thing is trickier to support
|
||||
# without a significant benefit.
|
||||
yield (rna_sub_type_name, rna_sub_struct)
|
||||
i += 1
|
||||
|
||||
for (_rna_type_name, rna_struct) in _bpy_types_iterator():
|
||||
# if not _rna_type_name.startswith('__'):
|
||||
|
||||
identifier = rna_struct.identifier
|
||||
|
||||
if not rna_id_ignore(identifier):
|
||||
structs.append((base_id(rna_struct), identifier, rna_struct))
|
||||
|
||||
# Simple lookup
|
||||
rna_struct_dict[identifier] = rna_struct
|
||||
|
||||
# Store full rna path 'GameObjectSettings' -> 'Object.GameObjectSettings'
|
||||
rna_full_path_dict[identifier] = full_rna_struct_path(rna_struct)
|
||||
|
||||
# Store a list of functions, remove inherited later
|
||||
# NOT USED YET
|
||||
## rna_functions_dict[identifier] = get_direct_functions(rna_struct)
|
||||
|
||||
# fill in these later
|
||||
rna_children_dict[identifier] = []
|
||||
rna_references_dict[identifier] = []
|
||||
|
||||
del _bpy_types_iterator
|
||||
|
||||
structs.sort() # not needed but speeds up sort below, setting items without an inheritance first
|
||||
|
||||
# Arrange so classes are always defined in the correct order
|
||||
deps_ok = False
|
||||
while deps_ok is False:
|
||||
deps_ok = True
|
||||
rna_done = set()
|
||||
|
||||
for i, (rna_base, identifier, rna_struct) in enumerate(structs):
|
||||
|
||||
rna_done.add(identifier)
|
||||
|
||||
if rna_base and rna_base not in rna_done:
|
||||
deps_ok = False
|
||||
data = structs.pop(i)
|
||||
ok = False
|
||||
while i < len(structs):
|
||||
if structs[i][1] == rna_base:
|
||||
structs.insert(i + 1, data) # insert after the item we depend on.
|
||||
ok = True
|
||||
break
|
||||
i += 1
|
||||
|
||||
if not ok:
|
||||
print('Dependancy "%s" could not be found for "%s"' % (identifier, rna_base))
|
||||
|
||||
break
|
||||
|
||||
# Done ordering structs
|
||||
|
||||
# precalculate vars to avoid a lot of looping
|
||||
for (rna_base, identifier, rna_struct) in structs:
|
||||
|
||||
# rna_struct_path = full_rna_struct_path(rna_struct)
|
||||
rna_struct_path = rna_full_path_dict[identifier]
|
||||
|
||||
for rna_prop in get_direct_properties(rna_struct):
|
||||
rna_prop_identifier = rna_prop.identifier
|
||||
|
||||
if rna_prop_identifier == 'RNA' or rna_id_ignore(rna_prop_identifier):
|
||||
continue
|
||||
|
||||
for rna_prop_ptr in (getattr(rna_prop, "fixed_type", None), getattr(rna_prop, "srna", None)):
|
||||
# Does this property point to me?
|
||||
if rna_prop_ptr and rna_prop_ptr.identifier in rna_references_dict:
|
||||
rna_references_dict[rna_prop_ptr.identifier].append(
|
||||
"%s.%s" % (rna_struct_path, rna_prop_identifier))
|
||||
|
||||
for rna_func in get_direct_functions(rna_struct):
|
||||
for rna_prop_identifier, rna_prop in rna_func.parameters.items():
|
||||
|
||||
if rna_prop_identifier == 'RNA' or rna_id_ignore(rna_prop_identifier):
|
||||
continue
|
||||
|
||||
rna_prop_ptr = getattr(rna_prop, "fixed_type", None)
|
||||
|
||||
# Does this property point to me?
|
||||
if rna_prop_ptr and rna_prop_ptr.identifier in rna_references_dict:
|
||||
rna_references_dict[rna_prop_ptr.identifier].append(
|
||||
"%s.%s" % (rna_struct_path, rna_func.identifier))
|
||||
|
||||
# Store nested children
|
||||
nested = rna_struct.nested
|
||||
if nested:
|
||||
rna_children_dict[nested.identifier].append(rna_struct)
|
||||
|
||||
# Sort the refs, just reads nicer
|
||||
for rna_refs in rna_references_dict.values():
|
||||
rna_refs.sort()
|
||||
|
||||
info_structs = []
|
||||
for (rna_base, identifier, rna_struct) in structs:
|
||||
# if rna_struct.nested:
|
||||
# continue
|
||||
|
||||
#write_struct(rna_struct, '')
|
||||
info_struct = GetInfoStructRNA(rna_struct)
|
||||
if rna_base:
|
||||
info_struct.base = GetInfoStructRNA(rna_struct_dict[rna_base])
|
||||
info_struct.nested = GetInfoStructRNA(rna_struct.nested)
|
||||
info_struct.children[:] = rna_children_dict[identifier]
|
||||
info_struct.references[:] = rna_references_dict[identifier]
|
||||
info_struct.full_path = rna_full_path_dict[identifier]
|
||||
|
||||
info_structs.append(info_struct)
|
||||
|
||||
for rna_info_prop in InfoPropertyRNA.global_lookup.values():
|
||||
rna_info_prop.build()
|
||||
|
||||
for rna_info_prop in InfoFunctionRNA.global_lookup.values():
|
||||
rna_info_prop.build()
|
||||
|
||||
done_keys = set()
|
||||
new_keys = set(InfoStructRNA.global_lookup.keys())
|
||||
while new_keys:
|
||||
for rna_key in new_keys:
|
||||
rna_info = InfoStructRNA.global_lookup[rna_key]
|
||||
rna_info.build()
|
||||
for prop in rna_info.properties:
|
||||
prop.build()
|
||||
for func in rna_info.functions:
|
||||
func.build()
|
||||
for prop in func.args:
|
||||
prop.build()
|
||||
for prop in func.return_values:
|
||||
prop.build()
|
||||
done_keys |= new_keys
|
||||
new_keys = set(InfoStructRNA.global_lookup.keys()) - done_keys
|
||||
|
||||
# there are too many invalid defaults, unless we intend to fix, leave this off
|
||||
if 0:
|
||||
for rna_info in InfoStructRNA.global_lookup.values():
|
||||
for prop in rna_info.properties:
|
||||
# ERROR CHECK
|
||||
default = prop.default
|
||||
if type(default) in {float, int}:
|
||||
if default < prop.min or default > prop.max:
|
||||
print("\t %s.%s, %s not in [%s - %s]" %
|
||||
(rna_info.identifier, prop.identifier, default, prop.min, prop.max))
|
||||
|
||||
# now for operators
|
||||
op_mods = dir(bpy.ops)
|
||||
|
||||
for op_mod_name in sorted(op_mods):
|
||||
if op_mod_name.startswith('__'):
|
||||
continue
|
||||
|
||||
op_mod = getattr(bpy.ops, op_mod_name)
|
||||
operators = dir(op_mod)
|
||||
for op in sorted(operators):
|
||||
try:
|
||||
rna_prop = getattr(op_mod, op).get_rna_type()
|
||||
except AttributeError:
|
||||
rna_prop = None
|
||||
except TypeError:
|
||||
rna_prop = None
|
||||
|
||||
if rna_prop:
|
||||
GetInfoOperatorRNA(rna_prop)
|
||||
|
||||
for rna_info in InfoOperatorRNA.global_lookup.values():
|
||||
rna_info.build()
|
||||
for rna_prop in rna_info.args:
|
||||
rna_prop.build()
|
||||
|
||||
# for rna_info in InfoStructRNA.global_lookup.values():
|
||||
# print(rna_info)
|
||||
return (
|
||||
InfoStructRNA.global_lookup,
|
||||
InfoFunctionRNA.global_lookup,
|
||||
InfoOperatorRNA.global_lookup,
|
||||
InfoPropertyRNA.global_lookup,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
struct = BuildRNAInfo()[0]
|
||||
data = []
|
||||
for _struct_id, v in sorted(struct.items()):
|
||||
struct_id_str = v.identifier # "".join(sid for sid in struct_id if struct_id)
|
||||
|
||||
for base in v.get_bases():
|
||||
struct_id_str = base.identifier + "|" + struct_id_str
|
||||
|
||||
props = [(prop.identifier, prop) for prop in v.properties]
|
||||
for _prop_id, prop in sorted(props):
|
||||
# if prop.type == "boolean":
|
||||
# continue
|
||||
prop_type = prop.type
|
||||
if prop.array_length > 0:
|
||||
prop_type += "[%d]" % prop.array_length
|
||||
|
||||
data.append(
|
||||
"%s.%s -> %s: %s%s %s" %
|
||||
(struct_id_str, prop.identifier, prop.identifier, prop_type,
|
||||
", (read-only)" if prop.is_readonly else "", prop.description))
|
||||
data.sort()
|
||||
|
||||
if bpy.app.background:
|
||||
import sys
|
||||
sys.stderr.write("\n".join(data))
|
||||
sys.stderr.write("\n\nEOF\n")
|
||||
else:
|
||||
text = bpy.data.texts.new(name="api.py")
|
||||
text.from_string("\n".join(data))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
441
scripts/modules/rna_keymap_ui.py
Normal file
441
scripts/modules/rna_keymap_ui.py
Normal file
@@ -0,0 +1,441 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
__all__ = (
|
||||
"draw_entry",
|
||||
"draw_km",
|
||||
"draw_kmi",
|
||||
"draw_filtered",
|
||||
"draw_hierarchy",
|
||||
"draw_keymaps",
|
||||
)
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.app.translations import (
|
||||
contexts as i18n_contexts,
|
||||
pgettext_iface as iface_,
|
||||
)
|
||||
|
||||
|
||||
def _indented_layout(layout, level):
|
||||
indentpx = 16
|
||||
if level == 0:
|
||||
level = 0.0001 # Tweak so that a percentage of 0 won't split by half
|
||||
indent = level * indentpx / bpy.context.region.width
|
||||
|
||||
split = layout.split(factor=indent)
|
||||
col = split.column()
|
||||
col = split.column()
|
||||
return col
|
||||
|
||||
|
||||
def draw_entry(display_keymaps, entry, col, level=0):
|
||||
idname, spaceid, regionid, children = entry
|
||||
|
||||
for km, kc in display_keymaps:
|
||||
if km.name == idname and km.space_type == spaceid and km.region_type == regionid:
|
||||
draw_km(display_keymaps, kc, km, children, col, level)
|
||||
|
||||
'''
|
||||
km = kc.keymaps.find(idname, space_type=spaceid, region_type=regionid)
|
||||
if not km:
|
||||
kc = defkc
|
||||
km = kc.keymaps.find(idname, space_type=spaceid, region_type=regionid)
|
||||
|
||||
if km:
|
||||
draw_km(kc, km, children, col, level)
|
||||
'''
|
||||
|
||||
|
||||
def draw_km(display_keymaps, kc, km, children, layout, level):
|
||||
km = km.active()
|
||||
|
||||
layout.context_pointer_set("keymap", km)
|
||||
|
||||
col = _indented_layout(layout, level)
|
||||
|
||||
row = col.row(align=True)
|
||||
row.prop(km, "show_expanded_children", text="", emboss=False)
|
||||
row.label(text=km.name, text_ctxt=i18n_contexts.id_windowmanager)
|
||||
|
||||
if km.is_user_modified or km.is_modal:
|
||||
subrow = row.row()
|
||||
subrow.alignment = 'RIGHT'
|
||||
|
||||
if km.is_user_modified:
|
||||
subrow.operator("preferences.keymap_restore", text="Restore")
|
||||
if km.is_modal:
|
||||
subrow.label(text="", icon='LINKED')
|
||||
del subrow
|
||||
|
||||
if km.show_expanded_children:
|
||||
if children:
|
||||
# Put the Parent key map's entries in a 'global' sub-category
|
||||
# equal in hierarchy to the other children categories
|
||||
subcol = _indented_layout(col, level + 1)
|
||||
subrow = subcol.row(align=True)
|
||||
subrow.prop(km, "show_expanded_items", text="", emboss=False)
|
||||
subrow.label(text=iface_("%s (Global)") % iface_(km.name, i18n_contexts.id_windowmanager), translate=False)
|
||||
else:
|
||||
km.show_expanded_items = True
|
||||
|
||||
# Key Map items
|
||||
if km.show_expanded_items:
|
||||
kmi_level = level + 3 if children else level + 1
|
||||
for kmi in km.keymap_items:
|
||||
draw_kmi(display_keymaps, kc, km, kmi, col, kmi_level)
|
||||
|
||||
# "Add New" at end of keymap item list
|
||||
subcol = _indented_layout(col, kmi_level)
|
||||
subcol = subcol.split(factor=0.2).column()
|
||||
subcol.operator("preferences.keyitem_add", text="Add New", text_ctxt=i18n_contexts.id_windowmanager,
|
||||
icon='ADD')
|
||||
|
||||
col.separator()
|
||||
|
||||
# Child key maps
|
||||
if children:
|
||||
for entry in children:
|
||||
draw_entry(display_keymaps, entry, col, level + 1)
|
||||
|
||||
col.separator()
|
||||
|
||||
|
||||
def draw_kmi(display_keymaps, kc, km, kmi, layout, level):
|
||||
map_type = kmi.map_type
|
||||
|
||||
col = _indented_layout(layout, level)
|
||||
|
||||
if kmi.show_expanded:
|
||||
col = col.column(align=True)
|
||||
box = col.box()
|
||||
else:
|
||||
box = col.column()
|
||||
|
||||
split = box.split()
|
||||
|
||||
# header bar
|
||||
row = split.row(align=True)
|
||||
row.prop(kmi, "show_expanded", text="", emboss=False)
|
||||
row.prop(kmi, "active", text="", emboss=False)
|
||||
|
||||
if km.is_modal:
|
||||
row.separator()
|
||||
row.prop(kmi, "propvalue", text="")
|
||||
else:
|
||||
row.label(text=kmi.name)
|
||||
|
||||
row = split.row()
|
||||
row.prop(kmi, "map_type", text="")
|
||||
if map_type == 'KEYBOARD':
|
||||
row.prop(kmi, "type", text="", full_event=True)
|
||||
elif map_type == 'MOUSE':
|
||||
row.prop(kmi, "type", text="", full_event=True)
|
||||
elif map_type == 'NDOF':
|
||||
row.prop(kmi, "type", text="", full_event=True)
|
||||
elif map_type == 'TWEAK':
|
||||
subrow = row.row()
|
||||
subrow.prop(kmi, "type", text="")
|
||||
subrow.prop(kmi, "value", text="")
|
||||
elif map_type == 'TIMER':
|
||||
row.prop(kmi, "type", text="")
|
||||
else:
|
||||
row.label()
|
||||
|
||||
if (not kmi.is_user_defined) and kmi.is_user_modified:
|
||||
row.operator("preferences.keyitem_restore", text="", icon='BACK').item_id = kmi.id
|
||||
else:
|
||||
row.operator(
|
||||
"preferences.keyitem_remove",
|
||||
text="",
|
||||
# Abusing the tracking icon, but it works pretty well here.
|
||||
icon=('TRACKING_CLEAR_BACKWARDS' if kmi.is_user_defined else 'X')
|
||||
).item_id = kmi.id
|
||||
|
||||
# Expanded, additional event settings
|
||||
if kmi.show_expanded:
|
||||
box = col.box()
|
||||
|
||||
split = box.split(factor=0.4)
|
||||
sub = split.row()
|
||||
|
||||
if km.is_modal:
|
||||
sub.prop(kmi, "propvalue", text="")
|
||||
else:
|
||||
# One day...
|
||||
# sub.prop_search(kmi, "idname", bpy.context.window_manager, "operators_all", text="")
|
||||
sub.prop(kmi, "idname", text="")
|
||||
|
||||
if map_type not in {'TEXTINPUT', 'TIMER'}:
|
||||
sub = split.column()
|
||||
subrow = sub.row(align=True)
|
||||
|
||||
if map_type == 'KEYBOARD':
|
||||
subrow.prop(kmi, "type", text="", event=True)
|
||||
subrow.prop(kmi, "value", text="")
|
||||
subrow_repeat = subrow.row(align=True)
|
||||
subrow_repeat.active = kmi.value in {'ANY', 'PRESS'}
|
||||
subrow_repeat.prop(kmi, "repeat", text="Repeat")
|
||||
elif map_type in {'MOUSE', 'NDOF'}:
|
||||
subrow.prop(kmi, "type", text="")
|
||||
subrow.prop(kmi, "value", text="")
|
||||
|
||||
if map_type in {'KEYBOARD', 'MOUSE'} and kmi.value == 'CLICK_DRAG':
|
||||
subrow = sub.row()
|
||||
subrow.prop(kmi, "direction")
|
||||
|
||||
subrow = sub.row()
|
||||
subrow.scale_x = 0.75
|
||||
subrow.prop(kmi, "any", toggle=True)
|
||||
# Use `*_ui` properties as integers aren't practical.
|
||||
subrow.prop(kmi, "shift_ui", toggle=True)
|
||||
subrow.prop(kmi, "ctrl_ui", toggle=True)
|
||||
subrow.prop(kmi, "alt_ui", toggle=True)
|
||||
subrow.prop(kmi, "oskey_ui", text="Cmd", toggle=True)
|
||||
|
||||
subrow.prop(kmi, "key_modifier", text="", event=True)
|
||||
|
||||
# Operator properties
|
||||
box.template_keymap_item_properties(kmi)
|
||||
|
||||
# Modal key maps attached to this operator
|
||||
if not km.is_modal:
|
||||
kmm = kc.keymaps.find_modal(kmi.idname)
|
||||
if kmm:
|
||||
draw_km(display_keymaps, kc, kmm, None, layout, level + 1)
|
||||
layout.context_pointer_set("keymap", km)
|
||||
|
||||
|
||||
_EVENT_TYPES = set()
|
||||
_EVENT_TYPE_MAP = {}
|
||||
_EVENT_TYPE_MAP_EXTRA = {}
|
||||
|
||||
|
||||
def draw_filtered(display_keymaps, filter_type, filter_text, layout):
|
||||
|
||||
if filter_type == 'NAME':
|
||||
def filter_func(kmi):
|
||||
return (filter_text in kmi.idname.lower() or
|
||||
filter_text in kmi.name.lower())
|
||||
else:
|
||||
if not _EVENT_TYPES:
|
||||
enum = bpy.types.Event.bl_rna.properties["type"].enum_items
|
||||
_EVENT_TYPES.update(enum.keys())
|
||||
_EVENT_TYPE_MAP.update({item.name.replace(" ", "_").upper(): key
|
||||
for key, item in enum.items()})
|
||||
|
||||
del enum
|
||||
_EVENT_TYPE_MAP_EXTRA.update({
|
||||
"`": 'ACCENT_GRAVE',
|
||||
"*": 'NUMPAD_ASTERIX',
|
||||
"/": 'NUMPAD_SLASH',
|
||||
'+': 'NUMPAD_PLUS',
|
||||
"-": 'NUMPAD_MINUS',
|
||||
".": 'NUMPAD_PERIOD',
|
||||
"'": 'QUOTE',
|
||||
"RMB": 'RIGHTMOUSE',
|
||||
"LMB": 'LEFTMOUSE',
|
||||
"MMB": 'MIDDLEMOUSE',
|
||||
})
|
||||
_EVENT_TYPE_MAP_EXTRA.update({
|
||||
"%d" % i: "NUMPAD_%d" % i for i in range(10)
|
||||
})
|
||||
# done with once off init
|
||||
|
||||
filter_text_split = filter_text.strip()
|
||||
filter_text_split = filter_text.split()
|
||||
|
||||
# Modifier {kmi.attribute: name} mapping
|
||||
key_mod = {
|
||||
"ctrl": "ctrl",
|
||||
"alt": "alt",
|
||||
"shift": "shift",
|
||||
"cmd": "oskey",
|
||||
"oskey": "oskey",
|
||||
"any": "any",
|
||||
}
|
||||
# KeyMapItem like dict, use for comparing against
|
||||
# attr: {states, ...}
|
||||
kmi_test_dict = {}
|
||||
# Special handling of 'type' using a list if sets,
|
||||
# keymap items must match against all.
|
||||
kmi_test_type = []
|
||||
|
||||
# initialize? - so if a kmi has a MOD assigned it won't show up.
|
||||
# for kv in key_mod.values():
|
||||
# kmi_test_dict[kv] = {False}
|
||||
|
||||
# altname: attr
|
||||
for kk, kv in key_mod.items():
|
||||
if kk in filter_text_split:
|
||||
filter_text_split.remove(kk)
|
||||
kmi_test_dict[kv] = {True}
|
||||
|
||||
# what's left should be the event type
|
||||
def kmi_type_set_from_string(kmi_type):
|
||||
kmi_type = kmi_type.upper()
|
||||
kmi_type_set = set()
|
||||
|
||||
if kmi_type in _EVENT_TYPES:
|
||||
kmi_type_set.add(kmi_type)
|
||||
|
||||
if not kmi_type_set or len(kmi_type) > 1:
|
||||
# replacement table
|
||||
for event_type_map in (_EVENT_TYPE_MAP, _EVENT_TYPE_MAP_EXTRA):
|
||||
kmi_type_test = event_type_map.get(kmi_type)
|
||||
if kmi_type_test is not None:
|
||||
kmi_type_set.add(kmi_type_test)
|
||||
else:
|
||||
# print("Unknown Type:", kmi_type)
|
||||
|
||||
# Partial match
|
||||
for k, v in event_type_map.items():
|
||||
if (kmi_type in k) or (kmi_type in v):
|
||||
kmi_type_set.add(v)
|
||||
return kmi_type_set
|
||||
|
||||
for i, kmi_type in enumerate(filter_text_split):
|
||||
kmi_type_set = kmi_type_set_from_string(kmi_type)
|
||||
|
||||
if not kmi_type_set:
|
||||
return False
|
||||
|
||||
kmi_test_type.append(kmi_type_set)
|
||||
# tiny optimization, sort sets so the smallest is first
|
||||
# improve chances of failing early
|
||||
kmi_test_type.sort(key=lambda kmi_type_set: len(kmi_type_set))
|
||||
|
||||
# main filter func, runs many times
|
||||
def filter_func(kmi):
|
||||
for kk, ki in kmi_test_dict.items():
|
||||
val = getattr(kmi, kk)
|
||||
if val not in ki:
|
||||
return False
|
||||
|
||||
# special handling of 'type'
|
||||
for ki in kmi_test_type:
|
||||
val = kmi.type
|
||||
if val == 'NONE' or val not in ki:
|
||||
# exception for 'type'
|
||||
# also inspect 'key_modifier' as a fallback
|
||||
val = kmi.key_modifier
|
||||
if not (val == 'NONE' or val not in ki):
|
||||
continue
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
for km, kc in display_keymaps:
|
||||
km = km.active()
|
||||
layout.context_pointer_set("keymap", km)
|
||||
|
||||
filtered_items = [kmi for kmi in km.keymap_items if filter_func(kmi)]
|
||||
|
||||
if filtered_items:
|
||||
col = layout.column()
|
||||
|
||||
row = col.row()
|
||||
row.label(text=km.name, icon='DOT',
|
||||
text_ctxt=i18n_contexts.id_windowmanager)
|
||||
|
||||
row.label()
|
||||
row.label()
|
||||
|
||||
if km.is_user_modified:
|
||||
row.operator("preferences.keymap_restore", text="Restore")
|
||||
else:
|
||||
row.label()
|
||||
|
||||
for kmi in filtered_items:
|
||||
draw_kmi(display_keymaps, kc, km, kmi, col, 1)
|
||||
return True
|
||||
|
||||
|
||||
def draw_hierarchy(display_keymaps, layout):
|
||||
from bl_keymap_utils import keymap_hierarchy
|
||||
for entry in keymap_hierarchy.generate():
|
||||
draw_entry(display_keymaps, entry, layout)
|
||||
|
||||
|
||||
def draw_keymaps(context, layout):
|
||||
from bl_keymap_utils.io import keyconfig_merge
|
||||
|
||||
wm = context.window_manager
|
||||
kc_user = wm.keyconfigs.user
|
||||
kc_active = wm.keyconfigs.active
|
||||
spref = context.space_data
|
||||
|
||||
# row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config")
|
||||
text = bpy.path.display_name(kc_active.name, has_ext=False)
|
||||
if not text:
|
||||
text = "Blender (default)"
|
||||
|
||||
split = layout.split(factor=0.6)
|
||||
|
||||
row = split.row()
|
||||
|
||||
rowsub = row.row(align=True)
|
||||
|
||||
rowsub.menu("USERPREF_MT_keyconfigs", text=text)
|
||||
rowsub.operator("wm.keyconfig_preset_add", text="", icon='ADD')
|
||||
rowsub.operator("wm.keyconfig_preset_add", text="", icon='REMOVE').remove_active = True
|
||||
|
||||
rowsub = split.row(align=True)
|
||||
rowsub.operator("preferences.keyconfig_import", text="Import...", icon='IMPORT')
|
||||
rowsub.operator("preferences.keyconfig_export", text="Export...", icon='EXPORT')
|
||||
|
||||
row = layout.row()
|
||||
col = layout.column()
|
||||
|
||||
# layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
|
||||
# row.operator("preferences.keyconfig_remove", text="", icon='X')
|
||||
rowsub = row.split(factor=0.4, align=True)
|
||||
# postpone drawing into rowsub, so we can set alert!
|
||||
|
||||
layout.separator()
|
||||
display_keymaps = keyconfig_merge(kc_user, kc_user)
|
||||
filter_type = spref.filter_type
|
||||
filter_text = spref.filter_text.strip()
|
||||
if filter_text:
|
||||
filter_text = filter_text.lower()
|
||||
ok = draw_filtered(display_keymaps, filter_type, filter_text, layout)
|
||||
else:
|
||||
draw_hierarchy(display_keymaps, layout)
|
||||
ok = True
|
||||
|
||||
# go back and fill in rowsub
|
||||
rowsubsub = rowsub.row(align=True)
|
||||
rowsubsub.prop(spref, "filter_type", expand=True)
|
||||
rowsubsub = rowsub.row(align=True)
|
||||
if not ok:
|
||||
rowsubsub.alert = True
|
||||
rowsubsub.prop(spref, "filter_text", text="", icon='VIEWZOOM')
|
||||
|
||||
if not filter_text:
|
||||
# When the keyconfig defines its own preferences.
|
||||
kc_prefs = kc_active.preferences
|
||||
if kc_prefs is not None:
|
||||
box = col.box()
|
||||
row = box.row(align=True)
|
||||
|
||||
pref = context.preferences
|
||||
keymappref = pref.keymap
|
||||
show_ui_keyconfig = keymappref.show_ui_keyconfig
|
||||
row.prop(
|
||||
keymappref,
|
||||
"show_ui_keyconfig",
|
||||
text="",
|
||||
icon='DISCLOSURE_TRI_DOWN' if show_ui_keyconfig else 'DISCLOSURE_TRI_RIGHT',
|
||||
emboss=False,
|
||||
)
|
||||
row.label(text="Preferences")
|
||||
|
||||
if show_ui_keyconfig:
|
||||
# Defined by user preset, may contain mistakes out of our control.
|
||||
try:
|
||||
kc_prefs.draw(box)
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
del box
|
||||
del kc_prefs
|
||||
3397
scripts/modules/rna_manual_reference.py
Normal file
3397
scripts/modules/rna_manual_reference.py
Normal file
File diff suppressed because it is too large
Load Diff
231
scripts/modules/rna_prop_ui.py
Normal file
231
scripts/modules/rna_prop_ui.py
Normal file
@@ -0,0 +1,231 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import bpy
|
||||
|
||||
from mathutils import Vector
|
||||
from bpy.types import bpy_prop_array
|
||||
from idprop.types import IDPropertyArray, IDPropertyGroup
|
||||
|
||||
ARRAY_TYPES = (list, tuple, IDPropertyArray, Vector, bpy_prop_array)
|
||||
|
||||
# Maximum length of an array property for which a multi-line
|
||||
# edit field will be displayed in the Custom Properties panel.
|
||||
MAX_DISPLAY_ROWS = 8
|
||||
|
||||
|
||||
def rna_idprop_quote_path(prop):
|
||||
return "[\"%s\"]" % bpy.utils.escape_identifier(prop)
|
||||
|
||||
|
||||
def rna_idprop_ui_prop_update(item, prop):
|
||||
prop_path = rna_idprop_quote_path(prop)
|
||||
prop_rna = item.path_resolve(prop_path, False)
|
||||
if isinstance(prop_rna, bpy.types.bpy_prop):
|
||||
prop_rna.update()
|
||||
|
||||
|
||||
def rna_idprop_ui_prop_clear(item, prop):
|
||||
ui_data = item.id_properties_ui(prop)
|
||||
ui_data.clear()
|
||||
|
||||
|
||||
def rna_idprop_context_value(context, context_member, property_type):
|
||||
space = context.space_data
|
||||
|
||||
if space is None or isinstance(space, bpy.types.SpaceProperties):
|
||||
pin_id = space.pin_id
|
||||
else:
|
||||
pin_id = None
|
||||
|
||||
if pin_id and isinstance(pin_id, property_type):
|
||||
rna_item = pin_id
|
||||
context_member = "space_data.pin_id"
|
||||
else:
|
||||
rna_item = context.path_resolve(context_member)
|
||||
|
||||
return rna_item, context_member
|
||||
|
||||
|
||||
def rna_idprop_has_properties(rna_item):
|
||||
keys = rna_item.keys()
|
||||
return bool(keys)
|
||||
|
||||
|
||||
def rna_idprop_value_to_python(value):
|
||||
if isinstance(value, IDPropertyArray):
|
||||
return value.to_list()
|
||||
elif isinstance(value, IDPropertyGroup):
|
||||
return value.to_dict()
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
def rna_idprop_value_item_type(value):
|
||||
is_array = isinstance(value, ARRAY_TYPES) and len(value) > 0
|
||||
item_value = value[0] if is_array else value
|
||||
return type(item_value), is_array
|
||||
|
||||
|
||||
def rna_idprop_ui_prop_default_set(item, prop, value):
|
||||
ui_data = item.id_properties_ui(prop)
|
||||
ui_data.update(default=value)
|
||||
|
||||
|
||||
def rna_idprop_ui_create(
|
||||
item, prop, *, default,
|
||||
min=0.0, max=1.0,
|
||||
soft_min=None, soft_max=None,
|
||||
description=None,
|
||||
overridable=False,
|
||||
subtype=None,
|
||||
):
|
||||
"""Create and initialize a custom property with limits, defaults and other settings."""
|
||||
|
||||
# Assign the value
|
||||
item[prop] = default
|
||||
|
||||
rna_idprop_ui_prop_update(item, prop)
|
||||
ui_data = item.id_properties_ui(prop)
|
||||
proptype, _ = rna_idprop_value_item_type(default)
|
||||
|
||||
if proptype is bool:
|
||||
ui_data = item.id_properties_ui(prop)
|
||||
ui_data.update(
|
||||
description=description,
|
||||
default=default,
|
||||
)
|
||||
return
|
||||
|
||||
if soft_min is None:
|
||||
soft_min = min
|
||||
if soft_max is None:
|
||||
soft_max = max
|
||||
|
||||
ui_data.update(
|
||||
subtype=subtype,
|
||||
min=min,
|
||||
max=max,
|
||||
soft_min=soft_min,
|
||||
soft_max=soft_max,
|
||||
description=description,
|
||||
default=default,
|
||||
)
|
||||
|
||||
prop_path = rna_idprop_quote_path(prop)
|
||||
|
||||
item.property_overridable_library_set(prop_path, overridable)
|
||||
|
||||
|
||||
def draw(layout, context, context_member, property_type, *, use_edit=True):
|
||||
rna_item, context_member = rna_idprop_context_value(context, context_member, property_type)
|
||||
# poll should really get this...
|
||||
if not rna_item:
|
||||
return
|
||||
|
||||
from bpy.utils import escape_identifier
|
||||
|
||||
if rna_item.id_data.library is not None:
|
||||
use_edit = False
|
||||
is_lib_override = rna_item.id_data.override_library and rna_item.id_data.override_library.reference
|
||||
|
||||
assert isinstance(rna_item, property_type)
|
||||
|
||||
items = list(rna_item.items())
|
||||
items.sort()
|
||||
|
||||
# TODO: Allow/support adding new custom props to overrides.
|
||||
if use_edit and not is_lib_override:
|
||||
row = layout.row()
|
||||
props = row.operator("wm.properties_add", text="New", icon='ADD')
|
||||
props.data_path = context_member
|
||||
del row
|
||||
layout.separator()
|
||||
|
||||
show_developer_ui = context.preferences.view.show_developer_ui
|
||||
rna_properties = {prop.identifier for prop in rna_item.bl_rna.properties if prop.is_runtime} if items else None
|
||||
|
||||
layout.use_property_decorate = False
|
||||
|
||||
for key, value in items:
|
||||
is_rna = (key in rna_properties)
|
||||
|
||||
# Only show API defined properties to developers.
|
||||
if is_rna and not show_developer_ui:
|
||||
continue
|
||||
|
||||
to_dict = getattr(value, "to_dict", None)
|
||||
to_list = getattr(value, "to_list", None)
|
||||
|
||||
if to_dict:
|
||||
value = to_dict()
|
||||
elif to_list:
|
||||
value = to_list()
|
||||
|
||||
split = layout.split(factor=0.4, align=True)
|
||||
label_row = split.row()
|
||||
label_row.alignment = 'RIGHT'
|
||||
label_row.label(text=key, translate=False)
|
||||
|
||||
value_row = split.row(align=True)
|
||||
value_column = value_row.column(align=True)
|
||||
|
||||
is_long_array = to_list and len(value) >= MAX_DISPLAY_ROWS
|
||||
|
||||
if is_rna:
|
||||
value_column.prop(rna_item, key, text="")
|
||||
elif to_dict or is_long_array:
|
||||
props = value_column.operator("wm.properties_edit_value", text="Edit Value")
|
||||
props.data_path = context_member
|
||||
props.property_name = key
|
||||
else:
|
||||
value_column.prop(rna_item, '["%s"]' % escape_identifier(key), text="")
|
||||
|
||||
operator_row = value_row.row()
|
||||
operator_row.alignment = 'RIGHT'
|
||||
|
||||
# Do not allow editing of overridden properties (we cannot use a poll function
|
||||
# of the operators here since they's have no access to the specific property).
|
||||
operator_row.enabled = not (is_lib_override and key in rna_item.id_data.override_library.reference)
|
||||
|
||||
if use_edit:
|
||||
if is_rna:
|
||||
operator_row.label(text="API Defined")
|
||||
elif is_lib_override:
|
||||
operator_row.active = False
|
||||
operator_row.label(text="", icon='DECORATE_LIBRARY_OVERRIDE')
|
||||
else:
|
||||
props = operator_row.operator("wm.properties_edit", text="", icon='PREFERENCES', emboss=False)
|
||||
props.data_path = context_member
|
||||
props.property_name = key
|
||||
props = operator_row.operator("wm.properties_remove", text="", icon='X', emboss=False)
|
||||
props.data_path = context_member
|
||||
props.property_name = key
|
||||
else:
|
||||
# Add some spacing, so the right side of the buttons line up with layouts with decorators.
|
||||
operator_row.label(text="", icon='BLANK1')
|
||||
|
||||
|
||||
class PropertyPanel:
|
||||
"""
|
||||
The subclass should have its own poll function
|
||||
and the variable '_context_path' MUST be set.
|
||||
"""
|
||||
bl_label = "Custom Properties"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
bl_order = 1000 # Order panel after all others
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
rna_item, _context_member = rna_idprop_context_value(context, cls._context_path, cls._property_type)
|
||||
return bool(rna_item)
|
||||
|
||||
"""
|
||||
def draw_header(self, context):
|
||||
rna_item, context_member = rna_idprop_context_value(context, self._context_path, self._property_type)
|
||||
tot = len(rna_item.keys())
|
||||
if tot:
|
||||
self.layout().label(text="%d:" % tot)
|
||||
"""
|
||||
|
||||
def draw(self, context):
|
||||
draw(self.layout, context, self._context_path, self._property_type)
|
||||
391
scripts/modules/rna_xml.py
Normal file
391
scripts/modules/rna_xml.py
Normal file
@@ -0,0 +1,391 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
def build_property_typemap(skip_classes, skip_typemap):
|
||||
|
||||
property_typemap = {}
|
||||
|
||||
for attr in dir(bpy.types):
|
||||
# Skip internal methods.
|
||||
if attr.startswith("_"):
|
||||
continue
|
||||
cls = getattr(bpy.types, attr)
|
||||
if issubclass(cls, skip_classes):
|
||||
continue
|
||||
bl_rna = getattr(cls, "bl_rna", None)
|
||||
# Needed to skip classes added to the modules `__dict__`.
|
||||
if bl_rna is None:
|
||||
continue
|
||||
|
||||
# # to support skip-save we can't get all props
|
||||
# properties = bl_rna.properties.keys()
|
||||
properties = []
|
||||
for prop_id, prop in bl_rna.properties.items():
|
||||
if not prop.is_skip_save:
|
||||
properties.append(prop_id)
|
||||
|
||||
properties.remove("rna_type")
|
||||
property_typemap[attr] = properties
|
||||
|
||||
if skip_typemap:
|
||||
for cls_name, properties_blacklist in skip_typemap.items():
|
||||
properties = property_typemap.get(cls_name)
|
||||
if properties is not None:
|
||||
for prop_id in properties_blacklist:
|
||||
try:
|
||||
properties.remove(prop_id)
|
||||
except:
|
||||
print("skip_typemap unknown prop_id '%s.%s'" % (cls_name, prop_id))
|
||||
else:
|
||||
print("skip_typemap unknown class '%s'" % cls_name)
|
||||
|
||||
return property_typemap
|
||||
|
||||
|
||||
def print_ln(data):
|
||||
print(data, end="")
|
||||
|
||||
|
||||
def rna2xml(
|
||||
fw=print_ln,
|
||||
root_node="",
|
||||
root_rna=None, # must be set
|
||||
root_rna_skip=set(),
|
||||
root_ident="",
|
||||
ident_val=" ",
|
||||
skip_classes=(
|
||||
bpy.types.Operator,
|
||||
bpy.types.Panel,
|
||||
bpy.types.KeyingSet,
|
||||
bpy.types.Header,
|
||||
bpy.types.PropertyGroup,
|
||||
),
|
||||
skip_typemap=None,
|
||||
pretty_format=True,
|
||||
method='DATA',
|
||||
):
|
||||
from xml.sax.saxutils import quoteattr
|
||||
property_typemap = build_property_typemap(skip_classes, skip_typemap)
|
||||
|
||||
# don't follow properties of this type, just reference them by name
|
||||
# they MUST have a unique 'name' property.
|
||||
# 'ID' covers most types
|
||||
referenced_classes = (
|
||||
bpy.types.ID,
|
||||
bpy.types.Bone,
|
||||
bpy.types.ActionGroup,
|
||||
bpy.types.PoseBone,
|
||||
bpy.types.Node,
|
||||
bpy.types.Sequence,
|
||||
)
|
||||
|
||||
def number_to_str(val, val_type):
|
||||
if val_type == int:
|
||||
return "%d" % val
|
||||
elif val_type == float:
|
||||
return "%.6g" % val
|
||||
elif val_type == bool:
|
||||
return "TRUE" if val else "FALSE"
|
||||
else:
|
||||
raise NotImplementedError("this type is not a number %s" % val_type)
|
||||
|
||||
def rna2xml_node(ident, value, parent):
|
||||
ident_next = ident + ident_val
|
||||
|
||||
# divide into attrs and nodes.
|
||||
node_attrs = []
|
||||
nodes_items = []
|
||||
nodes_lists = []
|
||||
|
||||
value_type = type(value)
|
||||
|
||||
if issubclass(value_type, skip_classes):
|
||||
return
|
||||
|
||||
# XXX, fixme, pointcache has eternal nested pointer to itself.
|
||||
if value == parent:
|
||||
return
|
||||
|
||||
value_type_name = value_type.__name__
|
||||
for prop in property_typemap[value_type_name]:
|
||||
|
||||
subvalue = getattr(value, prop)
|
||||
subvalue_type = type(subvalue)
|
||||
|
||||
if subvalue_type in {int, bool, float}:
|
||||
node_attrs.append("%s=\"%s\"" % (prop, number_to_str(subvalue, subvalue_type)))
|
||||
elif subvalue_type is str:
|
||||
node_attrs.append("%s=%s" % (prop, quoteattr(subvalue)))
|
||||
elif subvalue_type is set:
|
||||
node_attrs.append("%s=%s" % (prop, quoteattr("{" + ",".join(list(subvalue)) + "}")))
|
||||
elif subvalue is None:
|
||||
node_attrs.append("%s=\"NONE\"" % prop)
|
||||
elif issubclass(subvalue_type, referenced_classes):
|
||||
# special case, ID's are always referenced.
|
||||
node_attrs.append("%s=%s" % (prop, quoteattr(subvalue_type.__name__ + "::" + subvalue.name)))
|
||||
else:
|
||||
try:
|
||||
subvalue_ls = list(subvalue)
|
||||
except:
|
||||
subvalue_ls = None
|
||||
|
||||
if subvalue_ls is None:
|
||||
nodes_items.append((prop, subvalue, subvalue_type))
|
||||
else:
|
||||
# check if the list contains native types
|
||||
subvalue_rna = value.path_resolve(prop, False)
|
||||
if type(subvalue_rna).__name__ == "bpy_prop_array":
|
||||
# check if this is a 0-1 color (rgb, rgba)
|
||||
# in that case write as a hexadecimal
|
||||
prop_rna = value.bl_rna.properties[prop]
|
||||
if (prop_rna.subtype == 'COLOR_GAMMA' and
|
||||
prop_rna.hard_min == 0.0 and
|
||||
prop_rna.hard_max == 1.0 and
|
||||
prop_rna.array_length in {3, 4}):
|
||||
# -----
|
||||
# color
|
||||
array_value = "#" + "".join(("%.2x" % int(v * 255) for v in subvalue_rna))
|
||||
|
||||
else:
|
||||
# default
|
||||
def str_recursive(s):
|
||||
subsubvalue_type = type(s)
|
||||
if subsubvalue_type in {int, float, bool}:
|
||||
return number_to_str(s, subsubvalue_type)
|
||||
else:
|
||||
return " ".join([str_recursive(si) for si in s])
|
||||
|
||||
array_value = " ".join(str_recursive(v) for v in subvalue_rna)
|
||||
|
||||
node_attrs.append("%s=\"%s\"" % (prop, array_value))
|
||||
else:
|
||||
nodes_lists.append((prop, subvalue_ls, subvalue_type))
|
||||
|
||||
# declare + attributes
|
||||
if pretty_format:
|
||||
if node_attrs:
|
||||
fw("%s<%s\n" % (ident, value_type_name))
|
||||
for node_attr in node_attrs:
|
||||
fw("%s%s\n" % (ident_next, node_attr))
|
||||
fw("%s>\n" % (ident_next,))
|
||||
else:
|
||||
fw("%s<%s>\n" % (ident, value_type_name))
|
||||
else:
|
||||
fw("%s<%s %s>\n" % (ident, value_type_name, " ".join(node_attrs)))
|
||||
|
||||
# unique members
|
||||
for prop, subvalue, subvalue_type in nodes_items:
|
||||
fw("%s<%s>\n" % (ident_next, prop)) # XXX, this is awkward, how best to solve?
|
||||
rna2xml_node(ident_next + ident_val, subvalue, value)
|
||||
fw("%s</%s>\n" % (ident_next, prop)) # XXX, need to check on this.
|
||||
|
||||
# list members
|
||||
for prop, subvalue, subvalue_type in nodes_lists:
|
||||
fw("%s<%s>\n" % (ident_next, prop))
|
||||
for subvalue_item in subvalue:
|
||||
if subvalue_item is not None:
|
||||
rna2xml_node(ident_next + ident_val, subvalue_item, value)
|
||||
fw("%s</%s>\n" % (ident_next, prop))
|
||||
|
||||
fw("%s</%s>\n" % (ident, value_type_name))
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# needs re-working to be generic
|
||||
|
||||
if root_node:
|
||||
fw("%s<%s>\n" % (root_ident, root_node))
|
||||
|
||||
# bpy.data
|
||||
if method == 'DATA':
|
||||
ident = root_ident + ident_val
|
||||
for attr in dir(root_rna):
|
||||
|
||||
# exceptions
|
||||
if attr.startswith("_"):
|
||||
continue
|
||||
elif attr in root_rna_skip:
|
||||
continue
|
||||
|
||||
value = getattr(root_rna, attr)
|
||||
try:
|
||||
ls = value[:]
|
||||
except:
|
||||
ls = None
|
||||
|
||||
if type(ls) == list:
|
||||
fw("%s<%s>\n" % (ident, attr))
|
||||
for blend_id in ls:
|
||||
rna2xml_node(ident + ident_val, blend_id, None)
|
||||
fw("%s</%s>\n" % (ident_val, attr))
|
||||
# any attribute
|
||||
elif method == 'ATTR':
|
||||
rna2xml_node(root_ident, root_rna, None)
|
||||
|
||||
if root_node:
|
||||
fw("%s</%s>\n" % (root_ident, root_node))
|
||||
|
||||
|
||||
def xml2rna(
|
||||
root_xml, *,
|
||||
root_rna=None, # must be set
|
||||
):
|
||||
|
||||
def rna2xml_node(xml_node, value):
|
||||
# print("evaluating:", xml_node.nodeName)
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Simple attributes
|
||||
|
||||
for attr in xml_node.attributes.keys():
|
||||
# print(" ", attr)
|
||||
subvalue = getattr(value, attr, Ellipsis)
|
||||
|
||||
if subvalue is Ellipsis:
|
||||
print("%s.%s not found" % (type(value).__name__, attr))
|
||||
else:
|
||||
value_xml = xml_node.attributes[attr].value
|
||||
|
||||
subvalue_type = type(subvalue)
|
||||
# tp_name = 'UNKNOWN'
|
||||
if subvalue_type == float:
|
||||
value_xml_coerce = float(value_xml)
|
||||
# tp_name = 'FLOAT'
|
||||
elif subvalue_type == int:
|
||||
value_xml_coerce = int(value_xml)
|
||||
# tp_name = 'INT'
|
||||
elif subvalue_type == bool:
|
||||
value_xml_coerce = {'TRUE': True, 'FALSE': False}[value_xml]
|
||||
# tp_name = 'BOOL'
|
||||
elif subvalue_type == str:
|
||||
value_xml_coerce = value_xml
|
||||
# tp_name = 'STR'
|
||||
elif hasattr(subvalue, "__len__"):
|
||||
if value_xml.startswith("#"):
|
||||
# read hexadecimal value as float array
|
||||
value_xml_split = value_xml[1:]
|
||||
value_xml_coerce = [int(value_xml_split[i:i + 2], 16) /
|
||||
255 for i in range(0, len(value_xml_split), 2)]
|
||||
del value_xml_split
|
||||
else:
|
||||
value_xml_split = value_xml.split()
|
||||
try:
|
||||
value_xml_coerce = [int(v) for v in value_xml_split]
|
||||
except ValueError:
|
||||
try:
|
||||
value_xml_coerce = [float(v) for v in value_xml_split]
|
||||
except ValueError: # bool vector property
|
||||
value_xml_coerce = [{'TRUE': True, 'FALSE': False}[v] for v in value_xml_split]
|
||||
del value_xml_split
|
||||
# tp_name = 'ARRAY'
|
||||
|
||||
# print(" %s.%s (%s) --- %s" % (type(value).__name__, attr, tp_name, subvalue_type))
|
||||
try:
|
||||
setattr(value, attr, value_xml_coerce)
|
||||
except ValueError:
|
||||
# size mismatch
|
||||
val = getattr(value, attr)
|
||||
if len(val) < len(value_xml_coerce):
|
||||
setattr(value, attr, value_xml_coerce[:len(val)])
|
||||
else:
|
||||
setattr(value, attr, list(value_xml_coerce) + list(val)[len(value_xml_coerce):])
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Complex attributes
|
||||
for child_xml in xml_node.childNodes:
|
||||
if child_xml.nodeType == child_xml.ELEMENT_NODE:
|
||||
# print()
|
||||
# print(child_xml.nodeName)
|
||||
subvalue = getattr(value, child_xml.nodeName, None)
|
||||
if subvalue is not None:
|
||||
|
||||
elems = []
|
||||
for child_xml_real in child_xml.childNodes:
|
||||
if child_xml_real.nodeType == child_xml_real.ELEMENT_NODE:
|
||||
elems.append(child_xml_real)
|
||||
del child_xml_real
|
||||
|
||||
if hasattr(subvalue, "__len__"):
|
||||
# Collection
|
||||
if len(elems) != len(subvalue):
|
||||
print("Size Mismatch! collection:", child_xml.nodeName)
|
||||
else:
|
||||
for i in range(len(elems)):
|
||||
child_xml_real = elems[i]
|
||||
subsubvalue = subvalue[i]
|
||||
|
||||
if child_xml_real is None or subsubvalue is None:
|
||||
print("None found %s - %d collection:", (child_xml.nodeName, i))
|
||||
else:
|
||||
rna2xml_node(child_xml_real, subsubvalue)
|
||||
|
||||
else:
|
||||
# print(elems)
|
||||
if len(elems) == 1:
|
||||
# sub node named by its type
|
||||
child_xml_real, = elems
|
||||
|
||||
# print(child_xml_real, subvalue)
|
||||
rna2xml_node(child_xml_real, subvalue)
|
||||
else:
|
||||
# empty is valid too
|
||||
pass
|
||||
|
||||
rna2xml_node(root_xml, root_rna)
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Utility function used by presets.
|
||||
# The idea is you can run a preset like a script with a few args.
|
||||
#
|
||||
# This roughly matches the operator 'bpy.ops.script.python_file_run'
|
||||
|
||||
|
||||
def _get_context_val(context, path):
|
||||
try:
|
||||
value = context.path_resolve(path)
|
||||
except Exception as ex:
|
||||
print("Error: %r, path %r not found" % (ex, path))
|
||||
value = Ellipsis
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def xml_file_run(context, filepath, rna_map):
|
||||
import xml.dom.minidom
|
||||
|
||||
xml_nodes = xml.dom.minidom.parse(filepath)
|
||||
bpy_xml = xml_nodes.getElementsByTagName("bpy")[0]
|
||||
|
||||
for rna_path, xml_tag in rna_map:
|
||||
|
||||
# first get xml
|
||||
# TODO, error check
|
||||
xml_node = bpy_xml.getElementsByTagName(xml_tag)[0]
|
||||
|
||||
value = _get_context_val(context, rna_path)
|
||||
|
||||
if value is not Ellipsis and value is not None:
|
||||
# print(" loading XML: %r -> %r" % (filepath, rna_path))
|
||||
xml2rna(xml_node, root_rna=value)
|
||||
|
||||
|
||||
def xml_file_write(context, filepath, rna_map, *, skip_typemap=None):
|
||||
with open(filepath, "w", encoding="utf-8") as file:
|
||||
fw = file.write
|
||||
fw("<bpy>\n")
|
||||
|
||||
for rna_path, _xml_tag in rna_map:
|
||||
# xml_tag is ignored, we get this from the rna
|
||||
value = _get_context_val(context, rna_path)
|
||||
rna2xml(
|
||||
fw=fw,
|
||||
root_rna=value,
|
||||
method='ATTR',
|
||||
root_ident=" ",
|
||||
ident_val=" ",
|
||||
skip_typemap=skip_typemap,
|
||||
)
|
||||
|
||||
fw("</bpy>\n")
|
||||
232
scripts/modules/sys_info.py
Normal file
232
scripts/modules/sys_info.py
Normal file
@@ -0,0 +1,232 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
# classes for extracting info from blenders internal classes
|
||||
|
||||
|
||||
def write_sysinfo(filepath):
|
||||
import sys
|
||||
import platform
|
||||
|
||||
import subprocess
|
||||
|
||||
import bpy
|
||||
import gpu
|
||||
|
||||
# pretty repr
|
||||
def prepr(v):
|
||||
r = repr(v)
|
||||
vt = type(v)
|
||||
if vt is bytes:
|
||||
r = r[2:-1]
|
||||
elif vt is list or vt is tuple:
|
||||
r = r[1:-1]
|
||||
return r
|
||||
|
||||
with open(filepath, 'w', encoding="utf-8") as output:
|
||||
try:
|
||||
header = "= Blender %s System Information =\n" % bpy.app.version_string
|
||||
lilies = "%s\n\n" % ((len(header) - 1) * "=")
|
||||
output.write(lilies[:-1])
|
||||
output.write(header)
|
||||
output.write(lilies)
|
||||
|
||||
def title(text):
|
||||
return "\n%s:\n%s" % (text, lilies)
|
||||
|
||||
# build info
|
||||
output.write(title("Blender"))
|
||||
output.write(
|
||||
"version: %s, branch: %s, commit date: %s %s, hash: %s, type: %s\n" %
|
||||
(bpy.app.version_string,
|
||||
prepr(bpy.app.build_branch),
|
||||
prepr(bpy.app.build_commit_date),
|
||||
prepr(bpy.app.build_commit_time),
|
||||
prepr(bpy.app.build_hash),
|
||||
prepr(bpy.app.build_type),
|
||||
))
|
||||
|
||||
output.write("build date: %s, %s\n" % (prepr(bpy.app.build_date), prepr(bpy.app.build_time)))
|
||||
output.write("platform: %s\n" % prepr(platform.platform()))
|
||||
output.write("binary path: %s\n" % prepr(bpy.app.binary_path))
|
||||
output.write("build cflags: %s\n" % prepr(bpy.app.build_cflags))
|
||||
output.write("build cxxflags: %s\n" % prepr(bpy.app.build_cxxflags))
|
||||
output.write("build linkflags: %s\n" % prepr(bpy.app.build_linkflags))
|
||||
output.write("build system: %s\n" % prepr(bpy.app.build_system))
|
||||
|
||||
# Windowing Environment (include when dynamically selectable).
|
||||
from _bpy import _ghost_backend
|
||||
ghost_backend = _ghost_backend()
|
||||
if ghost_backend not in {'NONE', 'DEFAULT'}:
|
||||
output.write("windowing environment: %s\n" % prepr(ghost_backend))
|
||||
del _ghost_backend, ghost_backend
|
||||
|
||||
# Python info.
|
||||
output.write(title("Python"))
|
||||
output.write("version: %s\n" % (sys.version.replace("\n", " ")))
|
||||
output.write("file system encoding: %s:%s\n" % (
|
||||
sys.getfilesystemencoding(),
|
||||
sys.getfilesystemencodeerrors(),
|
||||
))
|
||||
output.write("paths:\n")
|
||||
for p in sys.path:
|
||||
output.write("\t%r\n" % p)
|
||||
|
||||
output.write(title("Python (External Binary)"))
|
||||
output.write("binary path: %s\n" % prepr(sys.executable))
|
||||
try:
|
||||
py_ver = prepr(subprocess.check_output([
|
||||
sys.executable,
|
||||
"--version",
|
||||
]).strip())
|
||||
except Exception as e:
|
||||
py_ver = str(e)
|
||||
output.write("version: %s\n" % py_ver)
|
||||
del py_ver
|
||||
|
||||
output.write(title("Directories"))
|
||||
output.write("scripts:\n")
|
||||
for p in bpy.utils.script_paths():
|
||||
output.write("\t%r\n" % p)
|
||||
output.write("user scripts: %r\n" % (bpy.utils.script_path_user()))
|
||||
output.write("pref scripts: %r\n" % (bpy.utils.script_path_pref()))
|
||||
output.write("datafiles: %r\n" % (bpy.utils.user_resource('DATAFILES')))
|
||||
output.write("config: %r\n" % (bpy.utils.user_resource('CONFIG')))
|
||||
output.write("scripts : %r\n" % (bpy.utils.user_resource('SCRIPTS')))
|
||||
output.write("autosave: %r\n" % (bpy.utils.user_resource('AUTOSAVE')))
|
||||
output.write("tempdir: %r\n" % (bpy.app.tempdir))
|
||||
|
||||
output.write(title("FFmpeg"))
|
||||
ffmpeg = bpy.app.ffmpeg
|
||||
if ffmpeg.supported:
|
||||
for lib in ("avcodec", "avdevice", "avformat", "avutil", "swscale"):
|
||||
output.write(
|
||||
"%s:%s%r\n" % (lib, " " * (10 - len(lib)),
|
||||
getattr(ffmpeg, lib + "_version_string")))
|
||||
else:
|
||||
output.write("Blender was built without FFmpeg support\n")
|
||||
|
||||
if bpy.app.build_options.sdl:
|
||||
output.write(title("SDL"))
|
||||
output.write("Version: %s\n" % bpy.app.sdl.version_string)
|
||||
output.write("Loading method: ")
|
||||
if bpy.app.build_options.sdl_dynload:
|
||||
output.write("dynamically loaded by Blender (WITH_SDL_DYNLOAD=ON)\n")
|
||||
else:
|
||||
output.write("linked (WITH_SDL_DYNLOAD=OFF)\n")
|
||||
if not bpy.app.sdl.available:
|
||||
output.write("WARNING: Blender could not load SDL library\n")
|
||||
|
||||
output.write(title("Other Libraries"))
|
||||
ocio = bpy.app.ocio
|
||||
output.write("OpenColorIO: ")
|
||||
if ocio.supported:
|
||||
if ocio.version_string == "fallback":
|
||||
output.write("Blender was built with OpenColorIO, " +
|
||||
"but it currently uses fallback color management.\n")
|
||||
else:
|
||||
output.write("%s\n" % (ocio.version_string))
|
||||
else:
|
||||
output.write("Blender was built without OpenColorIO support\n")
|
||||
|
||||
oiio = bpy.app.oiio
|
||||
output.write("OpenImageIO: ")
|
||||
if ocio.supported:
|
||||
output.write("%s\n" % (oiio.version_string))
|
||||
else:
|
||||
output.write("Blender was built without OpenImageIO support\n")
|
||||
|
||||
output.write("OpenShadingLanguage: ")
|
||||
if bpy.app.build_options.cycles:
|
||||
if bpy.app.build_options.cycles_osl:
|
||||
from _cycles import osl_version_string
|
||||
output.write("%s\n" % (osl_version_string))
|
||||
else:
|
||||
output.write("Blender was built without OpenShadingLanguage support in Cycles\n")
|
||||
else:
|
||||
output.write("Blender was built without Cycles support\n")
|
||||
|
||||
opensubdiv = bpy.app.opensubdiv
|
||||
output.write("OpenSubdiv: ")
|
||||
if opensubdiv.supported:
|
||||
output.write("%s\n" % opensubdiv.version_string)
|
||||
else:
|
||||
output.write("Blender was built without OpenSubdiv support\n")
|
||||
|
||||
openvdb = bpy.app.openvdb
|
||||
output.write("OpenVDB: ")
|
||||
if openvdb.supported:
|
||||
output.write("%s\n" % openvdb.version_string)
|
||||
else:
|
||||
output.write("Blender was built without OpenVDB support\n")
|
||||
|
||||
alembic = bpy.app.alembic
|
||||
output.write("Alembic: ")
|
||||
if alembic.supported:
|
||||
output.write("%s\n" % alembic.version_string)
|
||||
else:
|
||||
output.write("Blender was built without Alembic support\n")
|
||||
|
||||
usd = bpy.app.usd
|
||||
output.write("USD: ")
|
||||
if usd.supported:
|
||||
output.write("%s\n" % usd.version_string)
|
||||
else:
|
||||
output.write("Blender was built without USD support\n")
|
||||
|
||||
if not bpy.app.build_options.sdl:
|
||||
output.write("SDL: Blender was built without SDL support\n")
|
||||
|
||||
if bpy.app.background:
|
||||
output.write("\nOpenGL: missing, background mode\n")
|
||||
else:
|
||||
output.write(title("GPU"))
|
||||
output.write("renderer:\t%r\n" % gpu.platform.renderer_get())
|
||||
output.write("vendor:\t\t%r\n" % gpu.platform.vendor_get())
|
||||
output.write("version:\t%r\n" % gpu.platform.version_get())
|
||||
output.write("device type:\t%r\n" % gpu.platform.device_type_get())
|
||||
output.write("backend type:\t%r\n" % gpu.platform.backend_type_get())
|
||||
output.write("extensions:\n")
|
||||
|
||||
glext = sorted(gpu.capabilities.extensions_get())
|
||||
|
||||
for l in glext:
|
||||
output.write("\t%s\n" % l)
|
||||
|
||||
output.write(title("Implementation Dependent GPU Limits"))
|
||||
output.write("Maximum Batch Vertices:\t%d\n" % gpu.capabilities.max_batch_vertices_get())
|
||||
output.write("Maximum Batch Indices:\t%d\n" % gpu.capabilities.max_batch_indices_get())
|
||||
|
||||
output.write("\nGLSL:\n")
|
||||
output.write("Maximum Varying Floats:\t%d\n" % gpu.capabilities.max_varying_floats_get())
|
||||
output.write("Maximum Vertex Attributes:\t%d\n" % gpu.capabilities.max_vertex_attribs_get())
|
||||
output.write("Maximum Vertex Uniform Components:\t%d\n" % gpu.capabilities.max_uniforms_vert_get())
|
||||
output.write("Maximum Fragment Uniform Components:\t%d\n" % gpu.capabilities.max_uniforms_frag_get())
|
||||
output.write("Maximum Vertex Image Units:\t%d\n" % gpu.capabilities.max_textures_vert_get())
|
||||
output.write("Maximum Fragment Image Units:\t%d\n" % gpu.capabilities.max_textures_frag_get())
|
||||
output.write("Maximum Pipeline Image Units:\t%d\n" % gpu.capabilities.max_textures_get())
|
||||
|
||||
output.write("\nFeatures:\n")
|
||||
output.write("Compute Shader Support: \t%d\n" %
|
||||
gpu.capabilities.compute_shader_support_get())
|
||||
output.write("Shader Storage Buffer Objects Support:\t%d\n" %
|
||||
gpu.capabilities.shader_storage_buffer_objects_support_get())
|
||||
output.write("Image Load/Store Support: \t%d\n" %
|
||||
gpu.capabilities.shader_image_load_store_support_get())
|
||||
|
||||
if bpy.app.build_options.cycles:
|
||||
import cycles
|
||||
output.write(title("Cycles"))
|
||||
output.write(cycles.engine.system_info())
|
||||
|
||||
import addon_utils
|
||||
addon_utils.modules()
|
||||
output.write(title("Enabled add-ons"))
|
||||
for addon in bpy.context.preferences.addons.keys():
|
||||
addon_mod = addon_utils.addons_fake_modules.get(addon, None)
|
||||
if addon_mod is None:
|
||||
output.write("%s (MISSING)\n" % (addon))
|
||||
else:
|
||||
output.write("%s (version: %s, path: %s)\n" %
|
||||
(addon, addon_mod.bl_info.get('version', "UNKNOWN"), addon_mod.__file__))
|
||||
except Exception as e:
|
||||
output.write("ERROR: %s\n" % e)
|
||||
Reference in New Issue
Block a user