Nodes: new interactive operator to slide nodes #121981

Open
Jacques Lucke wants to merge 24 commits from JacquesLucke/blender:slide-nodes into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
95 changed files with 6330 additions and 569 deletions
Showing only changes of commit 1f62c6562a - Show all commits

View File

@ -17,6 +17,7 @@ body:
* For feature requests, feedback, questions or build issues, see [communication channels](https://developer.blender.org/docs/handbook/communication/user_feedback/).
* Security vulnerabilities should be [reported privately](https://developer.blender.org/docs/handbook/bug_reports/vulnerability_reports/).
* If there are multiple bugs, make multiple bug reports.
* Any content you upload will be shared publicly. Ensure you have permission to share any files you upload, and avoid uploading age restricted content.
- type: textarea
id: body
@ -29,7 +30,7 @@ body:
Graphics card:
**Blender Version**
Broken: (example: 2.80, edbf15d3c044, master, 2018-11-28, as found on the splash screen)
Broken: (example: 3.0.0, main, `f1cca3055776`, 2020-12-30)
Worked: (newest version of Blender that worked as expected)
**Short description of error**

View File

@ -762,7 +762,7 @@ PACKAGES_ALL = (
DISTRO_ID_ARCH: "imath",
},
),
Package(name="OpenEXR Library", is_mandatory=False, version="3.2.1", version_short="3.2", version_min="3.0", version_mex="4.0",
Package(name="OpenEXR Library", is_mandatory=False, version="3.2.4", version_short="3.2", version_min="3.0", version_mex="4.0",
sub_packages=(),
distro_package_names={DISTRO_ID_DEBIAN: "libopenexr-dev",
DISTRO_ID_FEDORA: "openexr-devel",
@ -770,7 +770,7 @@ PACKAGES_ALL = (
DISTRO_ID_ARCH: "openexr",
},
),
Package(name="OpenImageIO Library", is_mandatory=True, version="2.5.6.0", version_short="2.5", version_min="2.5.0", version_mex="2.6.0",
Package(name="OpenImageIO Library", is_mandatory=True, version="2.5.11.0", version_short="2.5", version_min="2.5.0", version_mex="2.6.0",
sub_packages=(
Package(name="OpenImageIO Tools", is_mandatory=False,
distro_package_names={DISTRO_ID_DEBIAN: "openimageio-tools",
@ -869,7 +869,7 @@ PACKAGES_ALL = (
DISTRO_ID_ARCH: "materialx-git",
},
),
Package(name="USD Library", is_mandatory=False, version="23.11", version_short="23.11", version_min="20.05", version_mex="24.00",
Package(name="USD Library", is_mandatory=False, version="24.05", version_short="24.05", version_min="22.05", version_mex="25.00",
sub_packages=(),
distro_package_names={DISTRO_ID_DEBIAN: None,
DISTRO_ID_FEDORA: "usd-devel",
@ -884,7 +884,7 @@ PACKAGES_ALL = (
DISTRO_ID_ARCH: "opencollada",
},
),
Package(name="Embree Library", is_mandatory=False, version="4.1.0", version_short="4.1", version_min="3.13", version_mex="5.0",
Package(name="Embree Library", is_mandatory=False, version="4.3.2", version_short="4.3", version_min="3.13", version_mex="5.0",
sub_packages=(),
distro_package_names={DISTRO_ID_DEBIAN: "libembree-dev",
DISTRO_ID_FEDORA: "embree-devel",
@ -892,7 +892,7 @@ PACKAGES_ALL = (
DISTRO_ID_ARCH: "embree",
},
),
Package(name="OpenImageDenoiser Library", is_mandatory=False, version="2.2.0", version_short="2.2", version_min="2.0.0", version_mex="3.0",
Package(name="OpenImageDenoiser Library", is_mandatory=False, version="2.3.0", version_short="2.3", version_min="2.0.0", version_mex="3.0",
sub_packages=(),
distro_package_names={DISTRO_ID_DEBIAN: None,
DISTRO_ID_FEDORA: "oidn-devel",
@ -900,7 +900,7 @@ PACKAGES_ALL = (
DISTRO_ID_ARCH: "openimagedenoise",
},
),
Package(name="Level Zero Library", is_mandatory=False, version="1.15.8", version_short="1.15", version_min="1.7", version_mex="2.0",
Package(name="Level Zero Library", is_mandatory=False, version="1.16.1", version_short="1.16", version_min="1.7", version_mex="2.0",
sub_packages=(),
distro_package_names={DISTRO_ID_DEBIAN: None,
DISTRO_ID_FEDORA: "oneapi-level-zero-devel",

View File

@ -111,7 +111,7 @@ def handle_args():
usage=__doc__
)
# optional arguments
# Optional arguments.
parser.add_argument(
"-p", "--partial",
dest="partial",
@ -248,14 +248,14 @@ or
# Switch for quick testing so doc-builds don't take so long.
if not ARGS.partial:
# full build
# Full build.
FILTER_BPY_OPS = None
FILTER_BPY_TYPES = None
EXCLUDE_INFO_DOCS = False
EXCLUDE_MODULES = []
else:
# can manually edit this too:
# Can manually edit this too:
# FILTER_BPY_OPS = ("import.scene", ) # allow
# FILTER_BPY_TYPES = ("bpy_struct", "Operator", "ID") # allow
EXCLUDE_INFO_DOCS = True
@ -277,10 +277,10 @@ else:
"bpy.app.translations",
"bpy.context",
"bpy.data",
"bpy.ops", # supports filtering
"bpy.ops", # Supports filtering.
"bpy.path",
"bpy.props",
"bpy.types", # supports filtering
"bpy.types", # Supports filtering.
"bpy.utils",
"bpy.utils.previews",
"bpy.utils.units",
@ -319,7 +319,7 @@ else:
m = None
EXCLUDE_MODULES = [m for m in EXCLUDE_MODULES if not fnmatch.fnmatchcase(m, ARGS.partial)]
# special support for bpy.types.XXX
# Special support for `bpy.types.*`.
FILTER_BPY_OPS = tuple([m[8:] for m in ARGS.partial.split(":") if m.startswith("bpy.ops.")])
if FILTER_BPY_OPS:
EXCLUDE_MODULES.remove("bpy.ops")
@ -340,7 +340,7 @@ else:
"\n ".join(sorted(EXCLUDE_MODULES)))
#
# done filtering
# Done filtering
# --------------
try:
@ -380,7 +380,7 @@ EXTRA_SOURCE_FILES = (
)
# examples
# Examples.
EXAMPLES_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "examples"))
EXAMPLE_SET = set(os.path.splitext(f)[0] for f in os.listdir(EXAMPLES_DIR) if f.endswith(".py"))
EXAMPLE_SET_USED = set()
@ -418,7 +418,7 @@ INFO_DOCS_OTHER = (
# This is done so a short description can be included with each link.
USE_INFO_DOCS_FANCY_INDEX = True
# only support for properties atm.
# Only support for properties at the moment.
RNA_BLACKLIST = {
# XXX messes up PDF!, really a bug but for now just workaround.
"PreferencesSystem": {"language", }
@ -501,7 +501,7 @@ SPHINX_IN = os.path.join(ARGS.output_dir, "sphinx-in")
SPHINX_IN_TMP = SPHINX_IN + "-tmp"
SPHINX_OUT = os.path.join(ARGS.output_dir, "sphinx-out")
# html build
# HTML build.
if ARGS.sphinx_build:
SPHINX_BUILD = ["sphinx-build", SPHINX_IN, SPHINX_OUT]
@ -513,7 +513,7 @@ if ARGS.sphinx_build:
SPHINX_IN, SPHINX_OUT,
]
# pdf build
# PDF build.
if ARGS.sphinx_build_pdf:
SPHINX_OUT_PDF = os.path.join(ARGS.output_dir, "sphinx-out_pdf")
SPHINX_BUILD_PDF = [
@ -665,7 +665,7 @@ def example_extract_docstring(filepath):
line = file.readline()
line_no = 0
text = []
if line.startswith('"""'): # assume nothing here
if line.startswith('"""'): # Assume nothing here.
line_no += 1
else:
file.close()
@ -768,13 +768,38 @@ def write_indented_lines(ident, fn, text, strip=True):
fn(ident + l + "\n")
def pyfunc_is_inherited_method(py_func, identifier):
assert type(py_func) == MethodType
# Exclude Mix-in classes (after the first), because these don't get their own documentation.
cls = py_func.__self__
if (py_func_base := getattr(cls.__base__, identifier, None)) is not None:
if type(py_func_base) == MethodType:
if py_func.__func__ == py_func_base.__func__:
return True
elif type(py_func_base) == bpy.types.bpy_func:
return True
return False
def pyfunc2sphinx(ident, fw, module_name, type_name, identifier, py_func, is_class=True):
"""
function or class method to sphinx
"""
if type(py_func) == MethodType:
return
# Including methods means every operators "poll" function for e.g.
# would be listed in documentation which isn't useful.
#
# However excluding all of them is also incorrect as it means class methods defined
# in `bpy_types.py` for e.g. are excluded, making some utility functions entirely hidden.
if (bl_rna := getattr(py_func.__self__, "bl_rna", None)) is not None:
if bl_rna.functions.get(identifier) is not None:
return
del bl_rna
# Only inline the method if it's not inherited from another class.
if pyfunc_is_inherited_method(py_func, identifier):
return
arg_str = str(inspect.signature(py_func))
@ -789,7 +814,10 @@ def pyfunc2sphinx(ident, fw, module_name, type_name, identifier, py_func, is_cla
arg_str = "()" if (arg_str == "(cls)") else ("(" + arg_str[6:])
func_type = "classmethod"
else:
func_type = "staticmethod"
if type(py_func) == MethodType:
func_type = "classmethod"
else:
func_type = "staticmethod"
doc = py_func.__doc__
if (not doc) or (not doc.startswith(".. {:s}:: ".format(func_type))):
@ -822,7 +850,7 @@ def py_descr2sphinx(ident, fw, descr, module_name, type_name, identifier):
# NOTE: `RST_NOINDEX_ATTR` currently not supported (as it's not used).
write_indented_lines(ident + " ", fw, doc, False)
fw("\n")
elif type(descr) == MemberDescriptorType: # same as above but use "data"
elif type(descr) == MemberDescriptorType: # Same as above but use "data".
fw(ident + ".. data:: {:s}\n\n".format(identifier))
# NOTE: `RST_NOINDEX_ATTR` currently not supported (as it's not used).
write_indented_lines(ident + " ", fw, doc, False)
@ -960,8 +988,8 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
write_example_ref("", fw, module_name)
# write members of the module
# only tested with PyStructs which are not exactly modules
# Write members of the module.
# Only tested with `PyStructs` which are not exactly modules.
for key, descr in sorted(type(module).__dict__.items()):
if key.startswith("__"):
continue
@ -1031,7 +1059,7 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
if attribute.startswith("n_"): # Annoying exception, needed for `bpy.app`.
continue
# workaround for bpy.app documenting .index() and .count()
# Workaround for `bpy.app` documenting `.index()` and `.count()`.
if isinstance(module, tuple) and hasattr(tuple, attribute):
continue
@ -1039,8 +1067,7 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
module_dir_value_type.append((attribute, value, type(value)))
# sort by str of each type
# this way lists, functions etc are grouped.
# Sort by `str` of each type this way lists, functions etc are grouped.
module_dir_value_type.sort(key=lambda triple: str(triple[2]))
for attribute, value, value_type in module_dir_value_type:
@ -1319,7 +1346,7 @@ def pycontext2sphinx(basepath):
write_contex_cls()
del write_contex_cls
# end
# End.
# Internal API call only intended to be used to extract context members.
from _bpy import context_members
@ -1479,7 +1506,7 @@ def pyrna2sphinx(basepath):
fw("\n")
write_indented_lines(ident + " ", fw, enum_text)
del enum_text
# end enum exception
# End enum exception.
fw(ident + ":{:s}{:s}: {:s}\n".format(id_type, identifier, type_descr))
@ -1513,7 +1540,7 @@ def pyrna2sphinx(basepath):
fw(".. currentmodule:: {:s}\n\n".format(struct_module_name))
# docs first?, ok
# Docs first? OK.
write_example_ref("", fw, "{:s}.{:s}".format(struct_module_name, struct_id))
base_ids = [base.identifier for base in struct.get_bases()]
@ -1636,8 +1663,8 @@ def pyrna2sphinx(basepath):
elif func.return_values: # Multiple return values.
fw(" :return ({:s}):\n".format(", ".join(prop.identifier for prop in func.return_values)))
for prop in func.return_values:
# TODO: pyrna_enum2sphinx for multiple return values... actually don't
# think we even use this but still!
# TODO: pyrna_enum2sphinx for multiple return values,
# actually don't think we even use this but still!
enum_descr_override = None
if USE_SHARED_RNA_ENUM_ITEMS_STATIC:
@ -1760,7 +1787,7 @@ def pyrna2sphinx(basepath):
fw(" * :class:`{:s}`\n".format(ref))
fw("\n")
# docs last?, disable for now
# Docs last?, disable for now.
# write_example_ref("", fw, "bpy.types.{:s}".format(struct_id))
file.close()
@ -1811,7 +1838,7 @@ def pyrna2sphinx(basepath):
py_descr2sphinx(" ", fw, descr, "bpy.types", class_name, key)
file.close()
# write fake classes
# Write fake classes.
if _BPY_STRUCT_FAKE:
class_value = bpy_struct
fake_bpy_type(
@ -1826,7 +1853,7 @@ def pyrna2sphinx(basepath):
"built-in class used for all collections.", use_subclasses=False,
)
# operators
# Operators.
def write_ops():
API_BASEURL = "https://projects.blender.org/blender/blender/src/branch/main/scripts"
API_BASEURL_ADDON = "https://projects.blender.org/blender/blender-addons"
@ -1854,8 +1881,8 @@ def pyrna2sphinx(basepath):
args_str = ", ".join(prop.get_arg_default(force=True) for prop in op.args)
fw(".. function:: {:s}({:s})\n\n".format(op.func_name, args_str))
# if the description isn't valid, we output the standard warning
# with a link to the wiki so that people can help
# If the description isn't valid, we output the standard warning
# with a link to the wiki so that people can help.
if not op.description or op.description == "(undocumented operator)":
operator_description = undocumented_message("bpy.ops", op.module_name, op.func_name)
else:
@ -1927,18 +1954,18 @@ def write_rst_index(basepath):
fw(" :caption: Application Modules\n\n")
app_modules = (
"bpy.context", # note: not actually a module
"bpy.data", # note: not actually a module
"bpy.msgbus", # note: not actually a module
"bpy.context", # NOTE: not actually a module.
"bpy.data", # NOTE: not actually a module.
"bpy.msgbus", # NOTE: not actually a module.
"bpy.ops",
"bpy.types",
# py modules
# Python modules.
"bpy.utils",
"bpy.path",
"bpy.app",
# C modules
# C modules.
"bpy.props",
)
@ -1952,7 +1979,7 @@ def write_rst_index(basepath):
fw(" :caption: Standalone Modules\n\n")
standalone_modules = (
# submodules are added in parent page
# Sub-modules are added in parent page.
"aud",
"bgl",
"bl_math",
@ -2179,13 +2206,13 @@ def write_rst_importable_modules(basepath):
Write the RST files of importable modules.
"""
importable_modules = {
# Python_modules
# Python_modules.
"bpy.path": "Path Utilities",
"bpy.utils": "Utilities",
"bpy_extras": "Extra Utilities",
"gpu_extras": "GPU Utilities",
# C_modules
# C_modules.
"aud": "Audio System",
"blf": "Font Drawing",
"imbuf": "Image Buffer",
@ -2266,9 +2293,9 @@ def copy_handwritten_rsts(basepath):
# TODO: put this docs in Blender's code and use import as per modules above.
handwritten_modules = [
"bgl", # "Blender OpenGl wrapper"
"bmesh.ops", # generated by rst_from_bmesh_opdefines.py
"bmesh.ops", # Generated by `rst_from_bmesh_opdefines.py`.
# includes...
# Includes.
"include__bmesh",
]
@ -2345,30 +2372,30 @@ def format_config(basepath):
def rna2sphinx(basepath):
# main page
# Main page.
write_rst_index(basepath)
# context
# Context.
if "bpy.context" not in EXCLUDE_MODULES:
pycontext2sphinx(basepath)
# internal modules
write_rst_bpy(basepath) # bpy, disabled by default
write_rst_types_index(basepath) # bpy.types
write_rst_ops_index(basepath) # bpy.ops
write_rst_msgbus(basepath) # bpy.msgbus
pyrna2sphinx(basepath) # bpy.types.* and bpy.ops.*
write_rst_data(basepath) # bpy.data
# Internal modules.
write_rst_bpy(basepath) # `bpy`, disabled by default
write_rst_types_index(basepath) # `bpy.types`.
write_rst_ops_index(basepath) # `bpy.ops`.
write_rst_msgbus(basepath) # `bpy.msgbus`.
pyrna2sphinx(basepath) # `bpy.types.*` & `bpy.ops.*`.
write_rst_data(basepath) # `bpy.data`.
write_rst_importable_modules(basepath)
# `bpy_types_enum_items/*` (referenced from `bpy.types`).
if USE_SHARED_RNA_ENUM_ITEMS_STATIC:
write_rst_enum_items_and_index(basepath)
# copy the other rsts
# Copy the other RST files.
copy_handwritten_rsts(basepath)
# copy source files referenced
# Copy source files referenced.
copy_handwritten_extra(basepath)
@ -2394,7 +2421,7 @@ def align_sphinx_in_to_sphinx_in_tmp(dir_src, dir_dst):
else:
os.remove(f_dst)
# freshen with new files.
# Freshen with new files.
for f in sorted(sphinx_src_files):
f_src = os.path.join(dir_src, f)
f_dst = os.path.join(dir_dst, f)
@ -2473,7 +2500,7 @@ def main():
bpy_logfilehandler.setLevel(logging.DEBUG)
BPY_LOGGER.addHandler(bpy_logfilehandler)
# using a `FileHandler` seems to disable the `stdout`, so we add a `StreamHandler`.
# Using a `FileHandler` seems to disable the `stdout`, so we add a `StreamHandler`.
bpy_log_stdout_handler = logging.StreamHandler(stream=sys.stdout)
bpy_log_stdout_handler.setLevel(logging.DEBUG)
BPY_LOGGER.addHandler(bpy_log_stdout_handler)
@ -2491,7 +2518,7 @@ def main():
copy_function=shutil.copy,
)
# start from a clean directory everytime
# Start from a clean directory every time.
if os.path.exists(SPHINX_IN_TMP):
shutil.rmtree(SPHINX_IN_TMP, True)
@ -2500,10 +2527,10 @@ def main():
except:
pass
# copy extra files needed for theme
# Copy extra files needed for theme.
copy_sphinx_files(SPHINX_IN_TMP)
# write infromation needed for 'conf.py'
# Write information needed for `conf.py`.
format_config(SPHINX_IN_TMP)
# Dump the API in RST files.

View File

@ -315,6 +315,10 @@ def update_render_engine(self, context):
scene.update_render_engine()
def update_pause(self, context):
context.area.tag_redraw()
class CyclesRenderSettings(bpy.types.PropertyGroup):
device: EnumProperty(
@ -339,6 +343,7 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
name="Pause Preview",
description="Pause all viewport preview renders",
default=False,
update=update_pause,
)
use_denoising: BoolProperty(

View File

@ -864,24 +864,24 @@ int RenderScheduler::get_num_samples_to_path_trace() const
num_samples_to_occupy = lround(state_.occupancy_num_samples * 0.7f / state_.occupancy);
}
/* When time limit is used clamp the calculated number of samples to keep occupancy.
* This is because time limit causes the last render iteration to happen with less number of
* samples, which conflicts with the occupancy (lower number of samples causes lower
* occupancy, also the calculation is based on number of previously rendered samples).
/* The desired time within which the next render update is expected to happen.
*
* When time limit is not used the number of samples per render iteration is either increasing
* or stays the same, so there is no need to clamp number of samples calculated for occupancy.
*/
* Normally limit it to the same interval as used to calculate the number of samples without
* taking occupancy into account. This avoids situations when occupancy is low, but the GPU is
* already taking a lot of time to path trace.
*
* When the time limit is enabled, do not render more samples than it is needed to reach the
* time limit. */
double desired_path_tracing_time = guess_display_update_interval_in_seconds();
if (time_limit_ != 0.0 && state_.start_render_time != 0.0) {
const double remaining_render_time = max(
0.0, time_limit_ - (time_dt() - state_.start_render_time));
const double time_per_sample_average = path_trace_time_.get_average();
const double predicted_render_time = num_samples_to_occupy * time_per_sample_average;
if (predicted_render_time > remaining_render_time) {
num_samples_to_occupy = lround(num_samples_to_occupy *
(remaining_render_time / predicted_render_time));
}
desired_path_tracing_time = min(desired_path_tracing_time, remaining_render_time);
}
const double predicted_render_time = num_samples_to_occupy * path_trace_time_.get_average();
if (predicted_render_time > desired_path_tracing_time) {
num_samples_to_occupy = lround(num_samples_to_occupy *
(desired_path_tracing_time / predicted_render_time));
}
num_samples_to_render = max(num_samples_to_render,

View File

@ -882,11 +882,17 @@ void ObjectManager::device_update_flags(
bool has_volume_objects = false;
foreach (Object *object, scene->objects) {
if (object->geometry->has_volume) {
/* If the bounds are not valid it is not always possible to calculate the volume step, and
* the step size is not needed for the displacement. So, delay calculation of the volume
* step size until the final bounds are known. */
if (bounds_valid) {
volume_objects.push_back(object);
object_volume_step[object->index] = object->compute_volume_step_size();
}
else {
object_volume_step[object->index] = FLT_MAX;
}
has_volume_objects = true;
object_volume_step[object->index] = object->compute_volume_step_size();
}
else {
object_volume_step[object->index] = FLT_MAX;

View File

@ -124,15 +124,74 @@ def repo_active_or_none():
return active_repo
def repo_stats_calc_outdated_for_repo_directory(repo_directory):
pkg_manifest_local = repo_cache_store.refresh_local_from_directory(
directory=repo_directory,
error_fn=print,
)
if pkg_manifest_local is None:
return 0
if False:
# TODO: support this, currently creating this data involves a conversion which isn't free.
# This can probably be done once and cached, but for now use another function that provides this.
pkg_manifest_remote = repo_cache_store.refresh_remote_from_directory(
directory=repo_directory,
error_fn=print,
)
else:
pkg_manifest_remote = None
for pkg_manifest_remote_test in repo_cache_store.pkg_manifest_from_remote_ensure(
error_fn=print,
ignore_missing=True,
directory_subset=[repo_directory],
):
pkg_manifest_remote = pkg_manifest_remote_test
break
if pkg_manifest_remote is None:
return 0
package_count = 0
for pkg_id, item_local in pkg_manifest_local.items():
item_remote = pkg_manifest_remote.get(pkg_id)
# Local-only (unlikely but not impossible).
if item_remote is None:
continue
if item_remote["version"] != item_local["version"]:
package_count += 1
return package_count
def repo_stats_calc():
# NOTE: if repositories get very large, this could be optimized to only check repositories that have changed.
# Although this isn't called all that often - it's unlikely to be a bottleneck.
if bpy.app.background:
return
package_count = 0
for repo_item in bpy.context.preferences.extensions.repos:
if not repo_item.enabled:
continue
if not repo_item.use_remote_url:
continue
if not repo_item.remote_url:
continue
package_count += repo_stats_calc_outdated_for_repo_directory(repo_item.directory)
bpy.context.window_manager.extensions_updates = package_count
def print_debug(*args, **kw):
if not bpy.app.debug:
return
print(*args, **kw)
use_repos_to_notify = False
def repos_to_notify():
import os
from .bl_extension_utils import (
@ -144,9 +203,6 @@ def repos_to_notify():
repos_notify = []
do_online_sync = False
if bpy.app.background:
return repos_notify, do_online_sync
# To use notifications on startup requires:
# - The splash displayed.
# - The status bar displayed.
@ -156,11 +212,11 @@ def repos_to_notify():
prefs = bpy.context.preferences
extension_repos = prefs.extensions.repos
repos_remote = []
for repo_item in extension_repos:
if not repo_item.enabled:
continue
if not repo_item.use_sync_on_startup:
continue
if not repo_item.use_remote_url:
continue
remote_url = repo_item.remote_url
@ -191,8 +247,20 @@ def repos_to_notify():
if repo_is_empty:
continue
# NOTE: offline checks are handled by the notification (not here).
repos_notify.append(
repos_remote.append(repo_item)
# Update all repos together or none, to avoid bothering users
# multiple times in a day.
do_online_sync = False
for repo_item in repos_remote:
if not repo_item.use_sync_on_startup:
continue
if repo_index_outdated(repo_item.directory):
do_online_sync = True
break
for repo_item in repos_remote:
repos_notify.append((
bl_extension_ops.RepoItem(
name=repo_item.name,
directory=repo_directory,
@ -201,14 +269,10 @@ def repos_to_notify():
use_cache=repo_item.use_cache,
access_token=repo_item.access_token if repo_item.use_access_token else "",
),
)
repo_item.use_sync_on_startup and do_online_sync,
))
# Update all repos together or none, to avoid bothering users
# multiple times in a day.
if repo_index_outdated(repo_item.directory):
do_online_sync = True
return repos_notify, do_online_sync
return repos_notify
# -----------------------------------------------------------------------------
@ -336,6 +400,9 @@ def monkeypatch_extenions_repos_update_post_impl():
_monkeypatch_extenions_repos_update_dirs.clear()
# Based on changes, the statistics may need to be re-calculated.
repo_stats_calc()
@bpy.app.handlers.persistent
def monkeypatch_extensions_repos_update_pre(*_):
@ -460,6 +527,8 @@ cli_commands = []
def register():
prefs = bpy.context.preferences
# pylint: disable-next=global-statement
global repo_cache_store
@ -528,17 +597,13 @@ def register():
cli_commands.append(bpy.utils.register_cli_command("extension", cli_extension))
global use_repos_to_notify
repos_notify, do_online_sync = repos_to_notify()
if repos_notify:
use_repos_to_notify = True
from . import bl_extension_notify
bl_extension_notify.register()
bl_extension_notify.update_non_blocking(repos=repos_notify, do_online_sync=do_online_sync)
del repos_notify
monkeypatch_install()
if not bpy.app.background:
if prefs.view.show_extensions_updates:
from . import bl_extension_notify
bl_extension_notify.update_non_blocking(repos_fn=repos_to_notify)
def unregister():
# pylint: disable-next=global-statement
@ -587,10 +652,4 @@ def unregister():
bpy.utils.unregister_cli_command(cmd)
cli_commands.clear()
global use_repos_to_notify
if use_repos_to_notify:
use_repos_to_notify = False
from . import bl_extension_notify
bl_extension_notify.unregister()
monkeypatch_uninstall()

View File

@ -10,9 +10,6 @@ __all__ = (
"update_non_blocking",
"update_in_progress",
"update_ui_text",
"register",
"unregister",
)
@ -31,45 +28,22 @@ USE_GRACEFUL_EXIT = False
# Special value to signal no packages can be updated because all repositories are blocked by being offline.
STATE_DATA_ALL_OFFLINE = object()
# `wmWindowManager.extensions_updates` from C++
WM_EXTENSIONS_UPDATE_UNSET = -2
WM_EXTENSIONS_UPDATE_CHECKING = -1
# -----------------------------------------------------------------------------
# Internal Utilities
def sync_status_count_outdated_extensions(repos_notify):
from . import repo_cache_store
repos_notify_directories = [repo_item.directory for repo_item in repos_notify]
from . import repo_stats_calc_outdated_for_repo_directory
package_count = 0
for (
pkg_manifest_remote,
pkg_manifest_local,
) in zip(
repo_cache_store.pkg_manifest_from_remote_ensure(
error_fn=print,
directory_subset=repos_notify_directories,
),
repo_cache_store.pkg_manifest_from_local_ensure(
error_fn=print,
directory_subset=repos_notify_directories,
# Needed as these have been updated.
check_files=True,
),
):
if pkg_manifest_remote is None:
continue
if pkg_manifest_local is None:
continue
for repo_item in repos_notify:
package_count += repo_stats_calc_outdated_for_repo_directory(repo_item.directory)
for pkg_id, item_remote in pkg_manifest_remote.items():
item_local = pkg_manifest_local.get(pkg_id)
if item_local is None:
# Not installed.
continue
if item_remote["version"] != item_local["version"]:
package_count += 1
return package_count
@ -170,7 +144,7 @@ def sync_apply_locked(repos_notify, repos_notify_files, unique_ext):
return any_lock_errors, any_stale_errors
def sync_status_generator(repos_notify, do_online_sync):
def sync_status_generator(repos_fn):
import atexit
# Generator results...
@ -181,18 +155,22 @@ def sync_status_generator(repos_notify, do_online_sync):
# Setup The Update #
# ################ #
repos_notify_orig = repos_notify
if not bpy.app.online_access:
repos_notify = [repo for repo in repos_notify if repo.remote_url.startswith("file://")]
if not repos_notify:
# Special case, early exit.
yield (STATE_DATA_ALL_OFFLINE, 0, ())
return
yield None
any_offline = len(repos_notify) != len(repos_notify_orig)
del repos_notify_orig
# Calculate the repositories.
# This may be an expensive so yield once before running.
repos_and_do_online = list(repos_fn())
assert isinstance(repos_and_do_online, list)
if not bpy.app.online_access:
# Allow file-system only sync.
repos_and_do_online = [
(repo, do_online_sync) for repo, do_online_sync in repos_and_do_online
if repo.remote_url.startswith("file://")
]
if not repos_and_do_online:
return
# An extension unique to this session.
unique_ext = "@{:x}".format(os.getpid())
@ -200,7 +178,7 @@ def sync_status_generator(repos_notify, do_online_sync):
from functools import partial
cmd_batch_partial = []
for repo_item in repos_notify:
for repo_item, do_online_sync in repos_and_do_online:
# Local only repositories should still refresh, but not run the sync.
assert repo_item.remote_url
cmd_batch_partial.append(partial(
@ -259,7 +237,7 @@ def sync_status_generator(repos_notify, do_online_sync):
update_total = -1
any_lock_errors = False
repos_notify_files = [[] for _ in repos_notify]
repos_notify_files = [[] for _ in repos_and_do_online]
is_debug = bpy.app.debug
while True:
@ -302,15 +280,13 @@ def sync_status_generator(repos_notify, do_online_sync):
# ################### #
# Finalize The Update #
# ################### #
repos_notify = [repo for repo, _do_online_sync in repos_and_do_online]
any_lock_errors, any_stale_errors = sync_apply_locked(repos_notify, repos_notify_files, unique_ext)
update_total = sync_status_count_outdated_extensions(repos_notify)
if any_lock_errors:
extra_warnings.append(" Failed to acquire lock!")
if any_stale_errors:
extra_warnings.append(" Unexpected change in repository!")
if any_offline:
extra_warnings.append(" Skipping online repositories!")
yield (cmd_batch.calc_status_data(), update_total, extra_warnings)
else:
yield None
@ -337,25 +313,23 @@ class NotifyHandle:
"splash_region",
"sync_info",
"do_online_sync",
"_repos",
"_repos_fn",
"is_complete",
"_sync_generator",
)
def __init__(self, repos_notify, do_online_sync):
def __init__(self, repos_fn):
self.splash_region = None
self._repos = repos_notify
self._repos_fn = repos_fn
self._sync_generator = None
self.is_complete = False
# status_data, update_count, extra_warnings.
self.sync_info = None
self.do_online_sync = do_online_sync
def run(self):
assert self._sync_generator is None
self._sync_generator = iter(sync_status_generator(self._repos, self.do_online_sync))
self._sync_generator = iter(sync_status_generator(self._repos_fn))
def run_ensure(self):
if self.is_running():
@ -374,13 +348,18 @@ class NotifyHandle:
def is_running(self):
return self._sync_generator is not None
def updates_count(self):
if self.sync_info is None:
return WM_EXTENSIONS_UPDATE_CHECKING
_status_data, update_count, _extra_warnings = self.sync_info
return update_count
def ui_text(self):
if self.sync_info is None:
return "Checking for Extension Updates", 'NONE', 0
return "Checking for Extension Updates", 'NONE', WM_EXTENSIONS_UPDATE_CHECKING
status_data, update_count, extra_warnings = self.sync_info
do_online_sync = self.do_online_sync
text, icon = bl_extension_utils.CommandBatch.calc_status_text_icon_from_data(
status_data, update_count, do_online_sync,
status_data, update_count,
)
# Not more than 1-2 of these (failed to lock, some repositories offline .. etc).
for warning in extra_warnings:
@ -429,8 +408,11 @@ def _ui_refresh_apply(*, notify):
def _ui_refresh_timer():
if not _notify_queue:
if wm.extensions_updates == WM_EXTENSIONS_UPDATE_CHECKING:
wm.extensions_updates = WM_EXTENSIONS_UPDATE_UNSET
return None
wm = bpy.context.window_manager
notify = _notify_queue[0]
notify.run_ensure()
@ -444,11 +426,19 @@ def _ui_refresh_timer():
# Nothing changed, no action is needed (waiting for a response).
return default_wait
# Some content was found, set checking.
# Avoid doing this early because the icon flickers in cases when
# it's not needed and it gets turned off quickly.
if wm.extensions_updates == WM_EXTENSIONS_UPDATE_UNSET:
wm.extensions_updates = WM_EXTENSIONS_UPDATE_CHECKING
# If the generator exited, either step to the next action or early exit here.
if sync_info is ...:
_ui_refresh_apply(notify=notify)
if len(_notify_queue) <= 1:
# Keep the item because the text should remain displayed for the splash.
# Keep `_notify_queuy[0]` because the text should remain displayed for the splash.
if wm.extensions_updates == WM_EXTENSIONS_UPDATE_CHECKING:
wm.extensions_updates = WM_EXTENSIONS_UPDATE_UNSET
return None
# Move onto the next item.
del _notify_queue[0]
@ -457,48 +447,27 @@ def _ui_refresh_timer():
# TODO: redraw the status bar.
_ui_refresh_apply(notify=notify)
update_count = notify.updates_count()
if update_count != wm.extensions_updates:
wm.extensions_updates = update_count
return default_wait
def splash_draw_status_fn(self, context):
assert bool(_notify_queue), "Never empty"
notify = _notify_queue[0]
if notify.splash_region is None:
notify.splash_region = context.region_popup
if not bpy.app.online_access:
if bpy.app.online_access_override:
# Since there is nothing to do in this case, we show no operator.
# The splash screen shows text here.
pass
else:
text, icon, update_count = notify.ui_text()
row = self.layout.row(align=True)
if update_count > 0:
row.operator("extensions.userpref_show_for_update", text=text, icon=icon)
else:
row.label(text=text, icon=icon)
self.layout.separator()
self.layout.separator()
# -----------------------------------------------------------------------------
# Public API
def update_non_blocking(*, repos, do_online_sync):
"""
Perform a non-blocking update on ``repos``.
Updates are queued in case some are already running.
"""
# TODO: it's possible this preferences requests updates just after check-for-updates on startup.
# The checks should be de-duplicated. For now just ensure the checks don't interfere with each other.
assert bool(repos), "Unexpected empty repository list passed in"
_notify_queue.append(NotifyHandle(repos, do_online_sync))
def update_non_blocking(*, repos_fn):
# Perform a non-blocking update on ``repos``.
# Updates are queued in case some are already running.
# `repos_fn` A generator or function that returns a list of ``(RepoItem, do_online_sync)`` pairs.
# Some repositories don't check for update on startup for e.g.
_notify_queue.append(NotifyHandle(repos_fn))
if not bpy.app.timers.is_registered(_ui_refresh_timer):
bpy.app.timers.register(_ui_refresh_timer, first_interval=TIME_WAIT_INIT, persistent=True)
return True
def update_in_progress():
@ -515,11 +484,3 @@ def update_ui_text():
text = ""
icon = 'NONE'
return text, icon
def register():
bpy.types.WM_MT_splash.append(splash_draw_status_fn)
def unregister():
bpy.types.WM_MT_splash.remove(splash_draw_status_fn)

View File

@ -41,8 +41,9 @@ from . import (
) # noqa: E402
from . import (
repo_status_text,
cookie_from_session,
repo_status_text,
repo_stats_calc,
)
from .bl_extension_utils import (
@ -1058,6 +1059,8 @@ class EXTENSIONS_OT_repo_sync(Operator, _ExtCmdMixIn):
lock_result_any_failed_with_report(self, self.repo_lock.release(), report_type='WARNING')
del self.repo_lock
repo_stats_calc()
_preferences_ui_redraw()
@ -1153,6 +1156,8 @@ class EXTENSIONS_OT_repo_sync_all(Operator, _ExtCmdMixIn):
lock_result_any_failed_with_report(self, self.repo_lock.release(), report_type='WARNING')
del self.repo_lock
repo_stats_calc()
_preferences_ui_redraw()
@ -1303,6 +1308,8 @@ class EXTENSIONS_OT_package_upgrade_all(Operator, _ExtCmdMixIn):
error_fn=self.error_fn_from_exception,
)
repo_stats_calc()
# TODO: it would be nice to include this message in the banner.
def handle_error(ex):
self.report({'ERROR'}, str(ex))
@ -1403,6 +1410,7 @@ class EXTENSIONS_OT_package_install_marked(Operator, _ExtCmdMixIn):
)
_extensions_repo_sync_wheels(repo_cache_store)
repo_stats_calc()
# TODO: it would be nice to include this message in the banner.
def handle_error(ex):
@ -1518,6 +1526,7 @@ class EXTENSIONS_OT_package_uninstall_marked(Operator, _ExtCmdMixIn):
)
_extensions_repo_sync_wheels(repo_cache_store)
repo_stats_calc()
_preferences_theme_state_restore(self._theme_restore)
@ -1535,8 +1544,9 @@ class EXTENSIONS_OT_package_install_files(Operator, _ExtCmdMixIn):
"pkg_id_sequence"
)
_drop_variables = None
_legacy_drop = None
filter_glob: StringProperty(default="*.zip", options={'HIDDEN'})
filter_glob: StringProperty(default="*.zip;*.py", options={'HIDDEN'})
directory: StringProperty(
name="Directory",
@ -1561,12 +1571,26 @@ class EXTENSIONS_OT_package_install_files(Operator, _ExtCmdMixIn):
enable_on_install: rna_prop_enable_on_install
# Properties matching the legacy operator, not used by extension packages.
target: EnumProperty(
name="Legacy Target Path",
items=bpy.types.PREFERENCES_OT_addon_install._target_path_items,
description="Path to install legacy add-on packages to",
)
overwrite: BoolProperty(
name="Legacy Overwrite",
description="Remove existing add-ons with the same ID",
default=True,
)
# Only used for code-path for dropping an extension.
url: rna_prop_url
def exec_command_iter(self, is_modal):
from .bl_extension_utils import (
pkg_manifest_dict_from_file_or_error,
pkg_is_legacy_addon,