diff --git a/.gitignore b/.gitignore index d1f9c1af..a8eecaa3 100644 --- a/.gitignore +++ b/.gitignore @@ -67,4 +67,4 @@ ENV/ # Docs node_modules/ -docs/.vitepress/cache +docs/.vitepress/cache \ No newline at end of file diff --git a/scripts-blender/addons/asset_pipeline/.gitignore b/scripts-blender/addons/asset_pipeline/.gitignore deleted file mode 100644 index 245d94fc..00000000 --- a/scripts-blender/addons/asset_pipeline/.gitignore +++ /dev/null @@ -1,115 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# dotenv -.env - -# virtualenv -.venv -.venv* -venv/ -ENV/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ - -# IDE settings -.vscode/ - -# utility bat files: -*jump_in_venv.bat - -#local tests -tests/local* - -# Production Config Dir. -production_config/* diff --git a/scripts-blender/addons/asset_pipeline/CHANGELOG.md b/scripts-blender/addons/asset_pipeline/CHANGELOG.md deleted file mode 100644 index 9ca5001b..00000000 --- a/scripts-blender/addons/asset_pipeline/CHANGELOG.md +++ /dev/null @@ -1,13 +0,0 @@ -## 0.1.2 - 2023-08-02 - -### FIXED -- Fix Changelog Rendering (#125) -- Fix line ends from DOS to UNIX (#68) - -## 0.1.1 - 2023-06-02 - -### FIXED -- Fix Addon Install Instructions -- Fix Addons Spelling and Links (#54) - - diff --git a/scripts-blender/addons/asset_pipeline/README.md b/scripts-blender/addons/asset_pipeline/README.md index eaf7e368..73ab0a97 100644 --- a/scripts-blender/addons/asset_pipeline/README.md +++ b/scripts-blender/addons/asset_pipeline/README.md @@ -1,555 +1,172 @@ # Asset Pipeline -asset-pipeline is a Blender Add-on that manages the Asset Pipeline of the Blender Studio. It includes an Asset Builder and an Asset Updater. -[Asset Pipeline Presentation](https://youtu.be/IBTEBhAouKc?t=527) +## Introduction +This Add-On was designed to allow multiple artists to collaborate while contributing to a common Asset. It enables simultaneous work on the same asset by multiple artists. The add-on works by tracking what data each artist contributes to the asset and merges the assets together into a final "Published" asset. This published asset is marked to be discovered by Blender's asset manager. -## Table of Contents -- [Installation](#installation) -- [How to get started](#how-to-get-started) -- [Configuration](#configuration) - - [Task Layers](#task_layers.py) - - [Hooks](#hooks.py) -- [Getting Started as a Developer](#getting-started-as-a-developer) - - [Context](#context) - - [UI](#ui) - - [Asset Collection](#asset-collection) - - [Asset Files](#asset-files) - - [Metadata](#metadata) - - [Asset Importer](#asset-importer) - - [Asset Mapping](#asset-mapping) - - [Asset Builder](#asset-builder) - - [Asset Updater](#asset-updater) + +- [Asset Pipeline](#asset-pipeline) + - [Introduction](#introduction) + - [Table of Contents](#table-of-contents) + - [Installation](#installation) + - [Key Concepts](#key-concepts) + - [Creating New Assets](#creating-new-assets) + - [Current File Mode](#current-file-mode) + - [Blank File Mode](#blank-file-mode) + - [Push/Pull](#pushpull) + - [Updating Ownership](#updating-ownership) + - [Save File](#save-file) + - [Merge with Published File](#merge-with-published-file) + - [Surrendering Ownership](#surrendering-ownership) + - [Publish New Version](#publish-new-version) + - [Active](#active) + - [Staged](#staged) + - [Review](#review) + - [Creating Custom Task Layers](#creating-custom-task-layers) + + ## Installation 1. Download [latest release](../addons/overview) -2. Launch Blender, navigate to `Edit > Preferences` select `Addons` and then `Install`, +2. Launch Blender, navigate to `Edit > Preferences` select `Addons` and then `Install` 3. Navigate to the downloaded add-on and select `Install Add-on` -> **_NOTE:_** This add-on depends on other add-ons that are in the [Blender Studio Tools](https://projects.blender.org/studio/blender-studio-pipeline). -Make sure to also install: -- [**blender-kitsu**](/addons/blender_kitsu) +## Key Concepts +**Task Layers** Task Layers are defined in a JSON file that describes the number of layers used to manage the asset. Typically each task layer is given it's own file, artists can optionally house multiple task layers inside the same file if required. Each task layer is used to describe a step in the asset making process (e.g."Modeling", "Rigging", "Shading"). The number and content of a task layer is fully customizable by the artist. +**Ownership** Each piece of data in the Asset Pipeline is owned by a task layer, this includes Objects, Task Layer Collections and Transferable Data. The owner of data is the only person who can contribute to that piece of data, including modifying or removing that data. Objects implicitly will own the meshes and other types of object data, multiple objects referencing the same mesh is not supported. -## How to get started - -After installing you need to setup the addon preferences to fit your environment. - -The asset-pipeline add-on can be configured with some config files. The idea is that for each project you can have a custom configuration. - -In the add-on preferences you need to setup the `Production Config Directory`. In this folder the add-on expects to find a file called `task_layers.py`. What exactly you need to define in this file is something you will learn in the [Configuration](#configuration) section. - -To understand the underlying concepts of the Asset Pipeline it is recommended to read [this](https://studio.blender.org/blog/asset-pipeline-update-2022/) article. - -## Configuration -The add-on can be configured on a per project basis, by pointing the the `Production Config Directory` property in the add-on preferences to a folder that contains the config files. - -The config files need to be named a certain way and contain certain content. - - - -### task_layers.py -In this file you can define the Task Layers and TransferSettings for this project. -For an example config check out: `docs/production_config_example/task_layers.py` - - ---- -**Defining Task Layers** - -To define a Task Layer import: - -``` -import bpy - -from asset_pipeline.api import ( - AssetTransferMapping, - TaskLayer, -) -``` - -And declare a TaskLayer class that Inherits from TaskLayer: - -``` -class RiggingTaskLayer(TaskLayer): - name = "Rigging" - order = 0 - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - pass - -``` - -The `class name` ("RiggingTaskLayer") will be the Identifier for that TaskLayer in the code. The `name` attribute will be used for display purposes in the UI. -There can be no TaskLayers with the same class name. - -The `order` attribute will be used to determine in which order the TaskLayers are processed. Processing a TaskLayer means calling the `transfer_data()` class method. - -> **_NOTE:_** The TaskLayer with the lowest order is a special TaskLayer. In the code it will be considered as the **base** TaskLayer. - -The `transfer_data()` function of the base TaskLayer should be empty as it provides the base for other task layers to transfer their data to. But it will still be called as there are cases where Users might need that functionality. - -When Users push one or multiple TaskLayers from an Asset Task to an Asset Publish or pull vice versa, we need a base on which we can transfer the data. - -During the transfer process there will be 3 Asset Collections: -- The Asset Collection of the Asset Task -- The Asset Collection of the Asset Publish -- The Target Asset Collection - -The Target Asset Collection is a duplicate of either the Task or Publish Asset Collection and is the base on which we transfer data to. The decision to duplicate the Publish or Task Collection depends on if the **base** Task Layer (Task Layer with lowers order) was enabled or not before the push or the pull. - -If we push from an Asset Task to an Asset Publish and the base TaskLayer is among the selection we take the Asset Collection from the Asset Task as a base. If it is not selected we take the Asset Collection od the Asset Publish as a base. - -If we pull from an Asset Publish to an Asset Task and the base TaskLayer is among the selection we take the Asset Collection from the Asset Publish as base. If it is not selected we take the Asset Collection of the Asset Task as a base. - -The `transfer_data()` function contains 4 parameters that are useful when writing the transfer instructions. - -``` - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - pass -``` - -- **context**: Regular bpy.context - -- **build_context**: Is an instance of type `asset_pipeline.builder.context.BuildContext`. It contains all information that the Asset Builder needs to process the transfer. You can for example query the selected TaskLayers with `build_context.asset_context.task_layer_assembly.get_used_task_layers()`. Or find out if the current operation was a `push` or a `pull` with `build_context.is_push`. - -- **transfer_mapping**: Will be an instance of type `AssetTransferMapping`. This is a mapping between source and target for: **objects**, **materials** and **collections**. The maps are just dictionaries where the key is the source and the value the target. Both key and target are actual Blender ID Datablocks. - -``` -transfer_mapping.object_map: Dict[bpy.types.Object, bpy.types.Object] - -transfer_mapping.collection_map: Dict[bpy.types.Collection, bpy.types.Collection] - -transfer_mapping.material_map: Dict[bpy.types.Material, bpy.types.Material] - -``` -This enables you to do things like this: -``` -for obj_source, obj_target in transfer_mapping.object_map.items(): - pass - -for mat_source, mat_target in transfer_mapping.material_map.items(): - pass - -... -``` -You can also access the root Asset source and Target Collection: -``` -transfer_mapping.source_coll: bpy.types.Collection -transfer_mapping.target_coll: bpy.types.Collection -``` - -Further than that you can access to objects which had no match. -``` -transfer_mapping.no_match_target_objs: Set[bpy.types.Object] (all Objects that exist in target but not in source) -transfer_mapping.no_match_source_objs: Set[bpy.types.Object] (vice versa) -``` -- **transfer_settings**: Is the `TransferSettings` PropertyGroup that was defined in the task_layer.py module. More to that in the next section. If the PropertyGroup was defined you can just query its values as you would regularly do it inside of Blender: `transfer_settings.my_value` - ---- -**Defining Transfer Settings** - -Transfer Settings are settings that Users can adjust inside of the Blender UI which can be queried in the `tranfer_data()` function and control certain aspects of the transfer. - -To define Transfer Setting you just have to add a class called `TranferSettings` that inherits from `bpy.props.PropertyGroup` in the task_layer.py file. - -``` -class TransferSettings(bpy.types.PropertyGroup): - transfer_mat: bpy.props.BoolProperty(name="Materials", default=True) - transfer_uvs: bpy.props.BoolProperty(name="UVs", default=True) - transfer_type: bpy.props.EnumProperty( - name="Transfer Type", - items=[("VERTEX_ORDER", "Vertex Order", ""), ("PROXIMITY", "Proximity", "")], - ) -``` -You can use every native Blender Property type. These properties are automatically exposed in the `Transfer Settings` tab UI in the Asset Pipeline Panel. - - -### hooks.py -The Asset Pipeline supports post transfer hooks that can be defined in the `hooks.py` file. Post Transfer hooks are simple Python functions that get executed **after** the successful transfer of all TaskLayers. - -> **_NOTE:_** Post Transfer Hooks are only executed on a push from Asset Task to Asset Publish. **Not** on a pull. - -These hooks could do anything but the the intent of a post merge hook is to bring the asset in the correct publish state. These are usually repetitive steps that the task artist has to do to prepare data for publishing (and often revert it again for working). - -For an example config check out: `docs/production_config_example/hooks.py` - -Start by importing these classes. - -``` -import bpy - -from asset_pipeline.api import hook, Wildcard, DoNotMatch - -``` - -An example definition of a hook can look like this: - -``` -@hook(match_asset="Generic Sprite") -def my_hook(asset_collection: bpy.types.Collection, **kwargs) -> None: - pass - -``` - -You define a regular python function and decorate it with the **hook()** decorator. -Note: The decorator needs to be executed. - -The hook decorator as well as the function itself can specify arguments. - -Let's first have a look at the hook decorator. -The Idea is that you can use the hook decorator to - -first: Let the asset-pipeline know that this is an actual hook it should register - -second: to filter under which conditions the hook gets executed. - -For filtering you can use these key word arguments inside of the hook decorator braces: -- `match_asset_type` -- `match_asset match_asset` -- `match_task_layers` - -For each of these keys you can supply these values: -* `str`: would perform an exact string match. -* `Iterator[str]`: would perform an exact string match with any of the given strings. -* `Type[Wildcard]`: would match any type for this parameter. This would be used so a hook - is called for any value. -* `Type[DoNotMatch]`: would ignore this hook when matching the hook parameter. This is the default - value for the matching criteria and would normally not be set directly in a - production configuration. - -With that in mind let's look at some more example hooks: - -``` -@hook() -def test_hook_A(**kwargs) -> None: - pass -``` -This hook has no filtering parameters so it is considered to be a **global** hook that always gets executed. - -``` -@hook(match_asset_type="Character") -def test_hook_B(**kwargs) -> None: - pass -``` - -This hook will only be executed if current Asset is of type "Character". - - -``` -@hook(match_task_layers="ShadingTaskLayer") -def test_hook_C(**kwargs) -> None: - pass -``` - -This hook will only be executed if the Task Layer: "ShadingTaskLayer" was amongst the Task Layers that were selected for this transfer operation. - -``` -@hook(match_asset="Ellie") -def test_hook_D(**kwargs) -> None: - pass -``` -This hook will only be executed if the asset "Ellie" is processed. - -``` -@hook( - match_asset="Generic Sprite", - match_task_layers=["RiggingTaskLayer", "ShadingTaskLayer], -) -def test_hook_E(**kwargs) -> None: - pass -``` -This hook will only be executed if the asset "Generic Sprite" is processed and the "RiggingTaskLayer" or -"ShadingTaskLayer" was amongst the Task Layers that were selected for this transfer operation. - - - -It is important to note that the asset-pipeline follows a certain order to execute the hooks. And that is exactly the one of the examples hook described above: - -1. Global hooks -2. Asset Type Hooks -3. Task Layer Hooks -4. Asset Hooks -5. Asset + TaskLayer specific Hooks - -The function itself should always have **\*\*kwargs** as a parameter. The asset-pipeline automatically passes a couple of useful keyword arguments to the function: -- `asset_collection`: bpy.types.Collection -- `context`: bpy.types.Context -- `asset_task`: asset_pipeline.asset_files.AssetTask -- `asset_dir`: asset_pipeline.asset_files.AssetDir - -By exposing these parameters in the hook function you can use them in your code: -``` -@hook() -def test_hook_F(context: bpy.types.Context, asset_collection: bpy.types.Collection, **kwargs) -> None: - print(asset_collection.name) -``` - - - -## Getting Started as a Developer - -The asset-pipeline contains two main packages. - -1. **builder**: The Asset Builder which contains most of the core definitions and logic of Task Layers, Asset publishing, pulling, the import process for that and metadata handling. - -2. **updater**: The Asset Updater is quite light weight. It handles detecting imported asset collections and fetching available asset publishes. It also handles the logic of the actual updating. - -Both packages share a couple of modules that you can find on the top level. - -Let's have a closer look at the **builder** package. - -The Pipeline of **publishing** an Asset looks roughly like the following: - -- Loading a .blend file -- Creating a Production Context -- Creating an Asset Context -- Selecting TaskLayers to publish -- Start publish: Create Build Context -- Fetch all asset publishes and their metadata -- Apply changes: Pushes the selected TaskLayer to the affected asset publishes, updates metadata (In separate Blender instances) -- Publish: Finalizes the publish process, commits changes to svn. - -The Pipeline of **pulling** TaskLayers from the latest asset publish goes down roughly like this: -- Loading a .blend file -- Creating a Production Context -- Creating an Asset Context -- Creating Build Context -- Selecting TaskLayers to pull -- Pull: Pulls the selected TaskLayers from the latest Asset Publish in to the current Asset Task and updates metadata. - ---- - -### Context - -The asset-pipeline strongly works with Context objects, that get populated with -information and are used by the AssetBuilder to perform the actual logic of -building an Asset. - -There are 3 types of contexts: - -- **ProductionContext**: Global production level context, gets loaded on startup, -processes all the config files. This context collects all the TaskLayers and -registers TransferSettings that are defined in the `task_layers.py` config file. -It also searches for the `hooks.py` file and collects all valid hooks. - -- **AssetContext**: Local Asset Context, gets loaded on each scene load. Stores -settings and information for active Asset. It holds all information that are -related to the current Asset. This includes the current Asset Collection, Asset -Task, available Asset Publishes, the Asset Directory, the configuration of Task -Layers (which ones are enabled and disabled) and the Transfer Settings values. - -- **BuildContext**: Gets loaded when starting a publish or a pull. Contains both the -ProductionContext and AssetContext as well as some other data. Is the actual -context that gets processed by the AssetBuilder. - -A key feature is that we need to be able to 'exchange' this information with -another blend file. As the 'push' or publish process requires to: - -Open another blend file -> load the build context there -> process it -> close it again. - -This can be achieved by using the -[pickle](https://docs.python.org/3/library/pickle.html) library and pickle the -Contexts. All the contexts are pickle-able. The **\_\_setstate\_\_**, -**\_\_getstate\_\_** functions ensure that. - - -### UI - -All of this information that hides in these Context Objects needs to be partially visible for -Users in the UI. In the `props.py` module there are a whole lot of PropertyGroups that can store this -information with native Blender Properties to display it in the UI. - -This requires some sort of sync process between the Context and the PropertyGroups. -This sync process happens in a couple of places: - -- On startup -- On scene load -- On start publish -- After push task layers -- After abort publish -- After pull task layers -- After publish -- After updating statuses (metadata) - -Which PropertyGroups get updated depends a little bit on the operations. In general the asset-pipeline -only tries to update the parts that were altered and are therefore outdated. - -Not only are PropertyGroups updated by the Context objects, sometimes it also goes the other way. -For example: The last selected TaskLayers are saved on Scene level. On load this selection is restored, -which also updates the AssetContext. - -### Asset Collection - -Per task file there is only **one** Asset Collection. The Asset Collection and all its children and -dependencies is the final data that is being worked with in the Asset Builder. - -An Asset Collection needs to be initialized which fills out a whole lot of properties that get fetched from Kitsu. - -The properties are saved on the Collection at: - -`collection.bsp_asset` - -as a PropertyGroup. Some properties you can access via Python Scripts are: - -``` -entity_parent_id: bpy.props.StringProperty(name="Asset Type ID") -entity_parent_name: bpy.props.StringProperty(name="Asset Type") -entity_name: bpy.props.StringProperty(name="Asset Name") -entity_id: bpy.props.StringProperty(name="Asset ID") -project_id: bpy.props.StringProperty(name="Project ID") -is_publish: bpy.props.BoolProperty( - name="Is Publish", - description="Controls if this Collection is an Asset Publish to distinguish it from a 'working' Collection", -) -version: bpy.props.StringProperty(name="Asset Version") -publish_path: bpy.props.StringProperty(name="Asset Publish") -rig: bpy.props.PointerProperty(type=bpy.types.Armature, name="Rig") -``` - -### Asset Files - -Often we have to interact with files on disk and do the same operations over and -over again. For this consider using the: **asset_file.py** module. It contains -the **AssetTask**, **AssetPublish** and -**AssetDir** classes that are very useful and an important part of the System. - -In fact every interaction with asset files happens via these classes as they automatically load -metadata, which is in integral part of the pipeline. - - -### Metadata - -An asset file is always paired with a metadata file. The metadata file contains various information -about that particular asset file. It saves all the TaskLayers that are contained in this file and where -they came from. It also holds all kinds of information that make the Asset clearly identifiable. - -The AssetFile Classes automatically load this metadata on creation. - -The file format of this metadata is `xmp`. For that the asset-pipeline uses the `xml.etree` library. -In the `metadata.py` file are Schemas that represent the different Metadata blocks. - -The idea here is to have Schemas in the form of Python `dataclasses` that can be converted to their equivalent as XML Element. That way we have a clear definition of what kind of field are expected and available. -Schemas can have nested Data Classes. The conversion from Data Class to XML Element happens in the `ElementMetadata` class and is automated. -Metadata Classes can also be generated from ElementClasses. This conversion is happening in the `from_element()` function. - -The code base should only work with Data Classes as they are much easier to handle. -That means it is forbidden to import `Element[]` classes from `metadata.py`. -The conversion from and to Data Classes is only handled in this module. - -That results in this logic: -A: Saving Metadata to file: - -> MetadataClass -> ElementClass -> XML File on Disk -B: Loading Metadata from file: - -> XML File on Disk -> ElementClass -> MetadataClass - -### Asset Importer - -The `AssetImporter` is responsible for importing the right collections from the right source file -so the data transfer can happen as expected. -The output is a `TransferCollectionTriplet` which holds a reference to the collection from the AssetTask, collection from the AssetPublish and the actual target Collection on which the data is transferred. - -The target Collection is either a duplicate of the the AssetTask Collection or the AssetPublish Collection. -Which it is depends on a number of factors. Is it pull or a push and which Task Layers are selected. The exact logic is described in the [configuration](#configuration) section. - -The important takeaway here is that during a transfer we always have these 3 Collections present and each TaskLayer is either transferred from the AssetTask or the AssetPublish Collection to the Target. - -The logic of figuring out what needs to be target is happening in the AssetImporter. To avoid naming collisions the AssetImporter uses a suffix system. Each of the collections, all their children (including materials and node trees) receive a suffix during import. - -### Asset Mapping - -To transfer data we need a source and a target. Users can describe what should happen during this transfer for each Task Layer in the: - -``` -@classmethod -def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, -) -> None: -``` - -method. Users can have access to this `source` and `target` via the `transfer_mapping`. The TransferMapping is a class that has a couple of properties, which hold dictionaries. - -In these dictionaries the key is the source and the value the target. -Both key and target are actual Blender ID Datablocks. -This makes it easy to write Merge Instructions. -With it you can do access things like: - -``` -transfer_mapping.object_map: Dict[bpy.types.Object, bpy.types.Object] -transfer_mapping.collection_map: Dict[bpy.types.Collection, bpy.types.Collection] -transfer_mapping.material_map: Dict[bpy.types.Material, bpy.types.Material] -``` - -This TransferMapping is created in the AssetBuilder in the `pull_from_task` and `pull_from_publish` functions. We always create **2** mappings: - -- asset_task -> target -- asset_publish -> target - -And when we finally loop through all the TaskLayers we decide for each TaskLayer which mapping to use (which will decide if we either transfer from the AssetTask Collection to the target Collection or AssetPublish Collection to target Collection). -And that is the mapping we pass to `TaskLayer.transfer_data()`. - -> **_NOTE:_** If Users are adjusting the Mapping in a `transfer_data()` function they have to be aware that they are working with **2** mappings. - -### Asset Builder - -The AssetBuilder class contains the actual logic that can process the BuildContext. - -That means that this ist the final place where we call the AssetImporter to import all the Collections and create the TransferCollectionTriplet. We also create the AssetTransferMappings here, we make sure that all objects are visible, we load the metadata, we loop through all the TaskLayers and call their `transfer_data()` functions and finally update the metadata. - - -The Asset Builder contains 3 functions: - -- `push` -- `pull_from_publish` -- `pull_from_task` - -You might wonder why we have one push function and two pulls? -This is because the push process requires us to start another Blender Instance that opens a .blend file. This Blender Instance then actually performs a pull, a `pull_from_task`. - -The `push` function prepares everything so the `pull_from_task` can be called in the new Blender Instance. -It does a couple of things: - -- pickles the `BuildContext` to disk -- starts a new Blender Instance with a Python script as `-P` argument - -(It does this for all the affected publishes) - -This Python script is inside of the repository: `asset_pipeline/builder/scripts/push.py` - -The scripts basically just restores the BuildContext and calls the `pull_from_task` function. - - -### Asset Updater - -The Asset Updater is very light weight compared to the Asset Builder. - -The main process how the Asset Updater collects its data goes like this: - -1. Scanning Scene for found Assets -2. For each Asset check the Asset Publish Directory for all versions (Ignore Assets Publishes in Review State) - -The most trickiest part here is to save this information nicely in native Blender Properties. Checkout the `props.py` module if you want to have a look. - -The update process itself is very straightforward: -Calling the `bpy.ops.wm.lib_relocate()` operator. +**Asset Collection** Is the top-level collection for a given asset, all relevant objects/sub-collections for the asset are contained within this collection. +**Task Layer Collection** These collections are children of the asset collection, each Task Layer collection is owned by a specific task layer, they are all top-level child collections to the Asset Collection. Children of the Task Layer collections are owned by the Task Layer collection's owner. +**Transferable Data** Is data that a part or associated with an object or mesh, but can be explicitly owned and updated. This is the key concept that allows multiple artists to contribute to an asset. During the Push/Pull process Transferable Data is applied on top of each object, allowing artist A to own an object but artist B to own the vertex groups on that object for example. +Transferable Data Types: + - Vertex Groups + - Modifiers + - Constraints + - Materials (including slots, index and the material IDs) + - ShapeKeys + - Attributes + - Parent Relationships + + **Shared IDs** Shared IDs are data-blocks that can be owned by many 'users'. This data type is limited to Geometry Node Groups and Images, these are pieces of data that need to be explicitly owned by a task layer, and only that task layer may update this data-block, but other task layers may reference this data-block. + + +## Creating New Assets +Once the add-on is installed you will be greeted by a new sidebar in the 3D View, titled 'Asset Pipeline'. Under the panel 'Asset Management' you will find a UI to set up a new Asset. The New Asset UI has two modes "Current File" and "Blank File". + +### Current File Mode +"Current File" mode will retain you current .blend file's data and allow you to use it's current directory to setup a new Asset. +To setup an asset using "Current File" mode, please open the file you would like to setup as an asset, then select "Current File" mode in the asset pipeline side panel in the 3D View. + + 1. Select the "Task Layer Preset" you would like to use. + 2. Select the collection to be the Asset Collection, this is the top level collection for your asset. + 3. Select 'Create New Asset' + 4. In the operator pop-up select which task layers will be local to your file, typically artists only select one. + 5. Ensure 'Create Files for Unselected Task Layers' is enabled, otherwise the add-on will not automatically create files for the other task layers. + 6. Press OK to set-up current file/folder as an Asset. The add-on will automatically create a published file, this file will be empty until you push to it. + + +### Blank File Mode +"Blank File" mode will create a new blank asset in a new directory named after your asset. + + 1. Select the "Task Layer Preset" you would like to use. + 2. Enter the name and prefix desired for your asset + 3. Select 'Create New Asset' + 4. In the operator pop-up select which task layers will be local to your file, typically artists only select one. + 5. Ensure 'Create Files for Unselected Task Layers' is enabled, otherwise the add-on will not automatically create files for the other task layers. + 6. Press OK to set-up current file/folder as an Asset. The add-on will automatically create a published file, this file will be empty until you push to it. + + +## Push/Pull +The Push/Pull process happens in three steps. + +### Updating Ownership +When you Push/Pull a file, you will be greeted with an operator dialogue. This dialogue will list any new data that it has found in your file. Pressing OK will assign these new pieces of data to your local task layer, if you have multiple local task layers, you will be able to select which one is the owner of each piece of data. Once completed this information will be used to ensure your work is merged properly with the published file. + +### Save File +The add-on will optionally save your current file plus any unsaved/unpacked images will be saved in a directory relative to your asset (configurable in the add-on preferences). It will always create a back-up of your current file, in the case where the merge process fails, you will be prompted to revert your file back to it's pre-merge status. + +### Merge with Published File +Push and Pull are merging operations done to/from the published file. When you want to share your updated work to the rest of the team select "Push to Publish" to update the published file with your changes. Push will update any Transferable Data you edited, and update any objects/collections you own with the version in your current file. Transferable Data owned by other artists will be re-applied to your objects. + +If another artist then uses the "Pull to Publish" operator the same process will occur, keeping all objects, collections and Transferable Data that is local to their file, and importing any data that was owned externally by other task layers. + +## Surrendering Ownership +In the ownership inspector each Object/Transferable Data item has an option to "surrender" that piece of data. When surrendering this piece of data is now "up for grabs" to all other task layers. After surrendering artists will need to push this update to the published file. The surrendered item's ownership indicator will be replaced by an "Update Surrendered" operator, this operator is available to all task layers except the one that surrendered that data. When another task layer pulls in from the publish, they will be able to run the "Update Surrendered" operator to claim it assigning it to that task layer. + +## Publish New Version +To Publish a new version of an asset select the "Publish New Version" operator. The operator dialogue will require an input on which publish type to create. Publish types are as follows. + +### Active +An active publish is a publish that can be referenced by the production into shot files, multiple version can be published if some shots require an older version of the current asset, but only a single asset will be updated with changes from the push/pull target. + +### Staged +A staged asset, is an publish that cannot be referenced by the production, only one staged asset can exist at a time. If a staged publish exists it will replace the active publish as the push/pull target. The staged area exists so artists can collaborate on a new version of an asset that is not ready to be used in production. + +### Review +A review publish is simple a way to test out the final published version of your asset. You can create as many review publishes as you want to check your work and ensure the merge process produces results that are expected. Review publish is never used as a push/pull target and is for testing only. + +## Creating Custom Task Layers + +Add your own custom Task Layers to the asset pipeline addon. To create a custom task layer, find one of the templates at `/asset_pipeline/task_layer_configs/` copy one of the task layers to your own custom directory. The layout of the JSON file is as follows... + + +```JSON +{ + // Task Layer Types are formatted as {"Name of Task Layer": "Prefix"} + "TASK_LAYER_TYPES": { + "Modeling": "MOD", + "Rigging": "RIG", + "Shading": "SHD" + }, + + // These are the default or preferred owners for each type of transfer data + "TRANSFER_DATA_DEFAULTS": { + "GROUP_VERTEX": { // Name of Transfer Data Type (not customizable) + "default_owner": "Rigging", // Matching one of the Task Layer types above + "auto_surrender": false // If data type will be surrendered on initialization + }, + "MODIFIER": { + "default_owner": "Rigging", + "auto_surrender": false + }, + "CONSTRAINT": { + "default_owner": "Rigging", + "auto_surrender": false + }, + "MATERIAL": { + "default_owner": "Shading", + "auto_surrender": true + }, + "SHAPE_KEY": { + "default_owner": "Modeling", + "auto_surrender": false + }, + "ATTRIBUTE": { + "default_owner": "Rigging", + "auto_surrender": false + }, + "PARENT": { + "default_owner": "Rigging", + "auto_surrender": false + } + }, + + // These are default attributes created by Blender + "ATTRIBUTE_DEFAULTS": { + "sharp_face": { + "default_owner": "Modeling", + "auto_surrender": true + }, + "UVMap": { + "default_owner": "Shading", + "auto_surrender": true + } + } +} + +``` \ No newline at end of file diff --git a/scripts-blender/addons/asset_pipeline/TODO.txt b/scripts-blender/addons/asset_pipeline/TODO.txt deleted file mode 100644 index a6438bfc..00000000 --- a/scripts-blender/addons/asset_pipeline/TODO.txt +++ /dev/null @@ -1,84 +0,0 @@ -Here are some ideas, bugs, and TODOs for the Asset Pipeline. - -High prio bugs: - - Crashes when pulling in dog.modeling.blend - - Seems to nukes face sets when pulling into modeling. - - Pulling into rigging, SurfaceDeform modifiers fall asleep. - - Pulling into rigging, GeoNodes modifiers lose some of their inputs until the same nodetree is re-assigned. - - Pulling into rigging and I think also on pushing, the Copy Location constraint targetting the zipper helper mesh ends up targetting the rig instead. I tried investigating this already but I just don't get it. - - Pulling into rigging after a mesh update, material assignments seem to break until pulling a 2nd time. - - -Low Prio: - Bugs: - - "Create production Context" (Refresh icon under Publish Manager panel) segfaults. - - If reloading the file mid-publish, Apply Changes button throws "StructRNA has been removed". - - If trying to push from an unsaved file, the changes since the last save won't be pushed. This is fine, but there should be an indication of it. - - I think all of these would be fixed by the "Sync" button idea. - - TODOs: - - Setting an asset to Deprecated should set all task layers to Locked. - - Asset status list seems to not show all versions until refresh button is pressed? - - We should update asset statuses as an early stage of the Publish process, to avoid potentially pushing into deprecated versions (if somebody else deprecates a version, we SVN update, but don't manually refresh or reload). - - Asset Updater: - - Don't fully ignore versions when their status is Review. Allow them to be manually selected at least. - - Also display the asset status(Review/Deprecated/Approved) in the version number enum drop-down. - - Is there a missing Purge at the end of update_asset()? - - Make the UI prettier and less confusing. - - Code quality: - - Looks like the generate_mapping() call could be moved out of task layers and into generic. - - De-duplicating pull_from_task and pull_from_publish would probably be pretty great. - - -## Idea: "Sync" instead of "Push/Pull": - Instead of the "push/pull" mental model that we currently have, I propose a "Sync" mental model. The "Sync" button would: - - Save a backup of the current file in the user's Autosave folder. - - Pull from Publish. - - Save the current file. - - Delete all collections and objects beside the asset collection. - - "Save As" to overwrite the publish. - - Open the original file. - - Benefits: - - No more opening a Blender subprocess in the background, which makes issues hard to troubleshoot. - - Files are always forced to stay in sync, because you can't push without pulling. - - Half the time spent on pushing and pulling, since it's only done once for two files. - - What you see is what you get: You can be confident that whatever lands in your asset collection is exactly what's in the publish as well. - - Downsides: - - Any "cleanup" operations done on the asset will now be done on the working file, such as un-assigning actions from rigs. (This could probably be accounted for at the cost of sacrificing the "Shat you see is what you get" benefit.) - - If the Asset Pipeline is broken, now your working file will be broken as well, instead of just the publish. (Hence the back-up as the first step) - - Hopefully this idea is still compatible with syncing multiple versions and accounting for locked task layers. - - -## Idea: Object ownership by Task Layer - A feature that was added after Paul left, was the ability for Task Layers to affect collection assingments. Relevant code is `transfer_collection_objects()`. The current behaviour and code are both crazy confusing; Any Task Layer can add objects to its collection (eg. Rigging can add objects to einar.rigging), but they can't remove them unless there's a special suffix in the colleciton name, ".FULLY_OWNED". This was obviously implemented in a rush, we needed it working on the day of, or we couldn't get the job done. - - All this code and behaviour can be thrown away in favor of something better. - - My proposal: - - Approach the whole system with an "override" mental model. - - An object is "owned" by the lowest-index task layer that it's assigned to. (rigging==0) - - If the object is assigned to other task layers, those task layers are "overriding" the aspects of the object that correspond to that task layer. - - This means that most objects will be assigned to most sub-collections, and that's okay! - - - A task layer can add and remove objects from its influence, but not add or remove objects from other task layers' influence. - - If an object is only assigned to a single task layer, don't transfer any data to it. - - If an object is in two task layer collections, determine which one is source and target, and transfer data accordingly. - - For example, if an object is assigned to two task layers(eg. rigging+shading), take the object from the task layer with lower index (rigging==0) and transfer the data of the higher index task layer to it. - - Although, I'm not sure how this will work if a task layer is locked. - -## Idea: Sanity Check panel - Would be cool (even as a separate addon) to add a "sanity check" button & panel that can warn about: - - Datablock in file but not referenced by current view layer - - Mesh/Armature datablock not named same as container object - - Datablock has .00x name ending - - Datablock has .TASK/.TARGET/etc ending - - Display a list of all datablocks per type, and show what other datablocks are referencing that one. Clicking on those sets the list filter to their datablock type and makes their entry the active one. - - Draw the User Remap operator or a masked version of it (since Objects might need to be removed from the View Layer before being user remapped) - - This would be quite similar to CloudRig's "Generation Log" list, that gets filled with warnings by the Generate button, with information about potential issues with a generated rig. diff --git a/scripts-blender/addons/asset_pipeline/__init__.py b/scripts-blender/addons/asset_pipeline/__init__.py index c39860f4..4a898c4b 100644 --- a/scripts-blender/addons/asset_pipeline/__init__.py +++ b/scripts-blender/addons/asset_pipeline/__init__.py @@ -1,37 +1,13 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -import bpy - import importlib -from . import prefs, util, props, api, builder, updater +from . import ui, ops, props, prefs bl_info = { "name": "Asset Pipeline", - "author": "Paul Golter", + "author": "Nick Alberelli", "description": "Blender Studio Asset Pipeline Add-on", - "blender": (3, 1, 0), - "version": (0, 1, 2), + "blender": (4, 0, 0), + "version": (0, 2, 0), "location": "View3D", "warning": "", "doc_url": "", @@ -39,27 +15,19 @@ bl_info = { "category": "Generic", } -logger = logging.getLogger("BSP") - def reload() -> None: - global util - global prefs + global ui + global ops global props - global api - global builder - global updater - - importlib.reload(util) - importlib.reload(prefs) + global prefs + importlib.reload(ui) + importlib.reload(ops) importlib.reload(props) - importlib.reload(api) - - builder.reload() - updater.reload() + importlib.reload(prefs) -_need_reload = "prefs" in locals() +_need_reload = "ui" in locals() if _need_reload: reload() @@ -67,14 +35,14 @@ if _need_reload: def register() -> None: - prefs.register() + ui.register() + ops.register() props.register() - builder.register() - updater.register() + prefs.register() def unregister() -> None: - builder.unregister() - updater.unregister() + ui.unregister() + ops.unregister() props.unregister() prefs.unregister() diff --git a/scripts-blender/addons/asset_pipeline/api/__init__.py b/scripts-blender/addons/asset_pipeline/api/__init__.py deleted file mode 100644 index 079e2991..00000000 --- a/scripts-blender/addons/asset_pipeline/api/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -from ..builder.context import BuildContext -from ..builder.task_layer import TaskLayer -from ..builder.asset_mapping import AssetTransferMapping -from ..builder.hook import hook, Wildcard, DoNotMatch -from ..builder.vis import EnsureObjectVisibility, EnsureCollectionVisibility - -__all__ = ["TaskLayer", - "BuildContext", - "AssetTransferMapping", - "hook", - "Wildcard", - "DoNotMatch", - "EnsureObjectVisibility", - "EnsureCollectionVisibility", - ] diff --git a/scripts-blender/addons/asset_pipeline/asset_files.py b/scripts-blender/addons/asset_pipeline/asset_files.py deleted file mode 100644 index 59a672fc..00000000 --- a/scripts-blender/addons/asset_pipeline/asset_files.py +++ /dev/null @@ -1,282 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import re -import shutil -import logging - -from typing import List, Dict, Union, Any, Set, Optional -from pathlib import Path - -import bpy - -from . import constants -from .builder import metadata -from .builder.metadata import MetadataTreeAsset -from .asset_status import AssetStatus - -logger = logging.getLogger("BSP") - - -class FailedToIncrementLatestPublish(Exception): - pass - - -class FailedToLoadMetadata(Exception): - pass - - -class AssetFile: - def __init__(self, asset_path: Path): - self._path = asset_path - self._metadata_path = ( - asset_path.parent / f"{asset_path.stem}{constants.METADATA_EXT}" - ) - self._metadata: Optional[MetadataTreeAsset] = None - self._load_metadata() - - @property - def path(self) -> Path: - return self._path - - @property - def metadata_path(self) -> Path: - return self._metadata_path - - @property - def metadata(self) -> MetadataTreeAsset: - return self._metadata - - def write_metadata(self) -> None: - metadata.write_asset_metadata_tree_to_file(self.metadata_path, self.metadata) - - def reload_metadata(self) -> None: - if not self.metadata_path.exists(): - raise FailedToLoadMetadata( - f"Metadata file does not exist: {self.metadata_path.as_posix()}" - ) - self._load_metadata() - - @property - def pickle_path(self) -> Path: - return self.path.parent / f"{self.path.stem}.pickle" - - def __repr__(self) -> str: - return self._path.name - - def _load_metadata(self) -> None: - # Make AssetPublish initializeable even tough - # metadata file does not exist. - # Its handy to use this class for in the 'future' - # existing files, to query paths etc. - if not self.metadata_path.exists(): - logger.warning( - f"Metadata file does not exist: {self.metadata_path.as_posix()}" - ) - return - - self._metadata = metadata.load_asset_metadata_tree_from_file(self.metadata_path) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AssetFile): - raise NotImplementedError() - - return bool(self.path == other.path) - - def __hash__(self) -> int: - return hash(self.path) - - -class AssetTask(AssetFile): - """ - Represents a working file. - """ - - @property - def asset_dir(self) -> "AssetDir": - return AssetDir(self.path.parent) - - @property - def path_relative_to_asset_dir(self) -> Path: - return self._path.relative_to(self.asset_dir.path) - - -class AssetPublish(AssetFile): - """ - Represents a publish file. - """ - - def get_version(self, format: type = str) -> Optional[Union[str, int]]: - return get_file_version(self.path, format=format) - - def unlink(self) -> None: - """ - Caution: This will delete the file and the metadata file of this asset publish on disk. - """ - self.metadata_path.unlink() - self.path.unlink() - - @property - def asset_dir(self) -> "AssetDir": - return AssetDir(self.path.parent.parent) - - @property - def path_relative_to_asset_dir(self) -> Path: - return self._path.relative_to(self.asset_dir.path) - - -class AssetDir: - def __init__(self, path: Path): - self._path = path - # Directory name should match asset name - self._asset_disk_name = path.name - - @property - def path(self) -> Path: - return self._path - - @property - def asset_disk_name(self) -> str: - return self._asset_disk_name - - @property - def publish_dir(self) -> Path: - return self._path / "publish" - - def get_asset_publishes(self) -> List[AssetPublish]: - # Asset Naming Convention: {asset_name}.{asset_version}.{suffix} - # TODO: if asset_dir.name == asset.name we could use this logic here - if not self.publish_dir.exists(): - return [] - - blend_files = get_files_by_suffix(self.publish_dir, ".blend") - asset_publishes: List[AssetPublish] = [] - - for file in blend_files: - file_version = get_file_version(file) - if not file_version: - continue - - t = file.stem # Without suffix - t = t.replace(f".{file_version}", "") # Without version string - - # It it matches asset name now, it is an official publish. - if t != self._asset_disk_name: - continue - - asset_publishes.append(AssetPublish(file)) - - # Sort asset publishes after their 'version' ascending -> v001, v002, v003 - def get_publish_version(asset_publish: AssetPublish) -> int: - return asset_publish.get_version(format=int) - - asset_publishes.sort(key=get_publish_version) - return asset_publishes - - def increment_latest_publish(self) -> AssetPublish: - asset_publishes = self.get_asset_publishes() - if not asset_publishes: - raise FailedToIncrementLatestPublish( - f"No publishes available in: {self.publish_dir.as_posix()}" - ) - - latest_publish = asset_publishes[-1] - new_version = f"v{(latest_publish.get_version(format=int)+1):03}" - - # Duplicate blend and metadata file. - # Have metadata_path first so new_path is the one with .blend. - for path in [latest_publish.metadata_path, latest_publish.path]: - new_name = path.name.replace(latest_publish.get_version(), new_version) - new_path = latest_publish.path.parent / new_name - - if new_path.exists(): - raise FailedToIncrementLatestPublish( - f"Already exists: {new_path.as_posix()}" - ) - - shutil.copy(path, new_path) - logger.info(f"Copied: {path.name} to: {new_path.name}") - - new_publish = AssetPublish(new_path) - - # Update metadata. - new_publish.metadata.meta_asset.version = new_version - - # Set new status to review. - new_publish.metadata.meta_asset.status = AssetStatus.REVIEW.name - - # Set all task layers of new version to live. - for meta_tl in new_publish.metadata.meta_task_layers: - meta_tl.is_locked = False - - # Write metadata to disk. - new_publish.write_metadata() - - return new_publish - - def get_first_publish_path(self) -> Path: - filename = f"{self.asset_disk_name}.v001.blend" - return self.publish_dir / filename - - def __repr__(self) -> str: - publishes = ", ".join(str(a) for a in self.get_asset_publishes()) - return f"{self.asset_disk_name} (Publishes:{str(publishes)})" - - -def get_asset_disk_name(asset_name: str) -> str: - """ - Converts Asset Name that is stored on Kitsu to a - adequate name for the filesystem. Replaces spaces with underscore - and lowercases all. - """ - return asset_name.lower().replace(" ", "_") - - -def get_file_version(path: Path, format: type = str) -> Optional[Union[str, int]]: - """ - Detects if file has versioning pattern "v000" and returns that version. - Returns: - str: if file version exists - bool: False if no version was detected - """ - match = re.search("v(\d\d\d)", path.name) - if not match: - return None - - version = match.group(0) - - if format == str: - return version - - elif format == int: - return int(version.replace("v", "")) - - else: - raise ValueError(f"Unsupported format {format} expected: int, str.") - - -def get_files_by_suffix(dir_path: Path, suffix: str) -> List[Path]: - """ - Returns a list of paths that match the given ext in folder. - Args: - ext: String of file extensions eg. ".txt". - Returns: - List of Path() objects that match the ext. Returns empty list if no files were found. - """ - return [p for p in dir_path.iterdir() if p.is_file() and p.suffix == suffix] diff --git a/scripts-blender/addons/asset_pipeline/asset_status.py b/scripts-blender/addons/asset_pipeline/asset_status.py deleted file mode 100644 index 96dce91a..00000000 --- a/scripts-blender/addons/asset_pipeline/asset_status.py +++ /dev/null @@ -1,37 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from pathlib import Path -from enum import Enum, auto - -import bpy - - -class AssetStatus(Enum): - REVIEW = 0 - APPROVED = 1 - DEPRECATED = 2 - - -def get_asset_status_as_bl_enum( - self: bpy.types.Operator, context: bpy.types.Context -) -> List[Tuple[str, str, str]]: - return [(str(item.value), item.name.capitalize(), "") for item in AssetStatus] diff --git a/scripts-blender/addons/asset_pipeline/builder/__init__.py b/scripts-blender/addons/asset_pipeline/builder/__init__.py deleted file mode 100644 index 6ec73503..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import importlib - -from typing import List, Dict, Union, Any, Set, Optional - -from . import ops, ui -from .context import ProductionContext, AssetContext, BuildContext, UndoContext -from .asset_builder import AssetBuilder - -# Initialize building variables. -PROD_CONTEXT: Optional[ProductionContext] = None -ASSET_CONTEXT: Optional[AssetContext] = None -BUILD_CONTEXT: Optional[BuildContext] = None -ASSET_BUILDER: Optional[AssetBuilder] = None -UNDO_CONTEXT: Optional[UndoContext] = None - -# ----------------REGISTER--------------. - - -def reload() -> None: - global ops - global ui - - importlib.reload(ops) - importlib.reload(ui) - - -def register() -> None: - ops.register() - ui.register() - - -def unregister() -> None: - ui.unregister() - ops.unregister() diff --git a/scripts-blender/addons/asset_pipeline/builder/asset_builder.py b/scripts-blender/addons/asset_pipeline/builder/asset_builder.py deleted file mode 100644 index 06413d87..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/asset_builder.py +++ /dev/null @@ -1,612 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import pickle -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple, Callable -from pathlib import Path -from datetime import datetime - -import bpy - -from . import asset_suffix, metadata, meta_util -from .context import BuildContext -from .asset_importer import AssetImporter -from .asset_mapping import TransferCollectionTriplet, AssetTransferMapping -from .blstarter import BuilderBlenderStarter -from .metadata import MetadataTaskLayer, MetadataTreeAsset -from .hook import HookFunction - -from .. import constants, util -from ..asset_files import AssetPublish - -logger = logging.getLogger("BSP") - - -class AssetBuilderFailedToInitialize(Exception): - pass - - -class AssetBuilderFailedToPull(Exception): - pass - - -class AssetBuilderFailedToPublish(Exception): - pass - - -class AssetBuilder: - """ - The AssetBuilder contains the actual logic how to process the BuildContext. - It has 3 main functions: - - push: Starts process of opening a new Blender Instance and pickling the BuildContext. New Blender Instance - actually then loads the BuildContext and calls AssetBuilder.pull_from_task(). - - pull_from_publish: Pulls the selected TaskLayers from the AssetPublish in to the current AssetTask. - Does not require a new Blender Instance. - - pull_from_task: Pulls the selected TaskLayers from the AssetTask in to the current AssetPublish. - """ - - def __init__(self, build_context: BuildContext): - if not build_context: - raise AssetBuilderFailedToInitialize( - "Failed to initialize AssetBuilder. Build_context not valid." - ) - - self._build_context = build_context - self._asset_importer = AssetImporter(self._build_context) - self._transfer_settings = bpy.context.scene.bsp_asset_transfer_settings - - @property - def build_context(self) -> BuildContext: - return self._build_context - - @property - def asset_importer(self) -> AssetImporter: - return self._asset_importer - - @property - def transfer_settings(self) -> bpy.types.PropertyGroup: - return self._transfer_settings - - def push(self, context: bpy.types.Context) -> None: - """ - Starts process of opening a new Blender Instance and pickling the BuildContext. New Blender Instance - actually then loads the BuildContext and calls AssetBuilder.pull_from_task(). That means pickling the BuildContext - and restoring it in the other Blender Instance. - """ - - # No here it gets a little tricky. We cannot just simply - # perform a libraries.write() operation. The merge process - # requires additional operations to happen so we need to actually - # open the asset version blend file and perform them. - - # Now we already assembled this huge BuildContext, in which we have - # all the information we need for whatever needs to be done. - # The question is how can we share this info with the new Blender Instance - # that knows nothing about it. - - # A very effective and easy ways seems to be pickling the BuildContext - # and unpickling it in the new Blender Instance again. - # Some objects cannot be pickled (like the blender context or a collection) - # (We can add custom behavior to work around this please see: ./context.py) - - # Catch special case first version. - if not self.build_context.asset_publishes: - asset_publish = self._create_first_version() - - # Start pickling. - pickle_path = asset_publish.pickle_path - with open(pickle_path.as_posix(), "wb") as f: - pickle.dump(self.build_context, f) - - logger.info(f"Pickled to {pickle_path.as_posix()}") - - # Open new blender instance, with publish script. - # Publish script can detect a first version publish and performs - # a special set of operations. - BuilderBlenderStarter.start_publish( - asset_publish.path, - pickle_path, - ) - return - - # Normal publish process. - for process_pair in self.build_context.process_pairs: - - asset_publish = process_pair.asset_publish - - logger.info("Processing %s", asset_publish.path.as_posix()) - - # Start pickling. - pickle_path = ( - asset_publish.pickle_path - ) # TODO: Do we need a pickle for all of them? I think one would be enough. - with open(pickle_path.as_posix(), "wb") as f: - pickle.dump(self.build_context, f) - logger.info(f"Pickled to {pickle_path.as_posix()}") - - # Open new blender instance, with publish script. - popen = BuilderBlenderStarter.start_publish( - asset_publish.path, - pickle_path, - ) - return_code = popen.wait() - - # Update returncode property. This will be displayed - # as icon in the UI and shows Users if something went wrong - # during push. - asset_file = context.scene.bsp_asset.asset_publishes.get( - asset_publish.path.name - ) - asset_file.returncode_publish = return_code - print(f"Set {asset_file.path_str} to returncode {return_code}") - if return_code != 0: - logger.error( - "Push to %s exited with error code: %i", - asset_publish.path.name, - return_code, - ) - - def pull_from_publish( - self, - context: bpy.types.Context, - ) -> None: - - """ - Pulls the selected TaskLayers from the AssetPublish in to the current AssetTask. - """ - - # Here we don't need to open another blender instance. We can use the current - # one. We pull in the asset collection from the latest asset publish and - # perform the required data transfers depending on what was selected. - - # Set is_push attribute. - self.build_context.is_push = False - - # User does a pull. This code runs in AssetTask file. - # Check if there are any publishes. - if not self.build_context.asset_publishes: - raise AssetBuilderFailedToPull(f"Failed to pull. Found no asset publishes.") - - # We always want to pull from latest asset publish. - asset_publish = self.build_context.asset_publishes[-1] - - # Import Asset Collection form Asset Publish. - transfer_triplet: TransferCollectionTriplet = ( - self.asset_importer.import_asset_publish() - ) - - # The target collection (base) was already decided by ASSET_IMPORTER.import_asset_task() - # and is saved in transfer_triplet.target_coll. - mapping_task_target = AssetTransferMapping( - transfer_triplet.task_coll, transfer_triplet.target_coll - ) - mapping_publish_target = AssetTransferMapping( - transfer_triplet.publish_coll, transfer_triplet.target_coll - ) - - # Process only the TaskLayers that were ticked as 'use'. - used_task_layers = ( - self.build_context.asset_context.task_layer_assembly.get_used_task_layers() - ) - # Should be ordered, just in case. - prod_task_layers = self.build_context.prod_context.task_layers - prod_task_layers.sort(key=lambda tl: tl.order) - - transfer_triplet.reset_rigs() - # Apparently Blender does not evaluate objects or collections in the depsgraph - # in some cases if they are not visible. Ensure visibility here. - transfer_triplet.ensure_vis() - - # Perform Task Layer merging. - # Note: We always want to apply all TaskLayers except for the Task Layer with the lowest order - # aka 'Base Task Layer'. This Task Layer gives us the starting point on which to apply all other Task Layers - # on. The asset importer already handles this logic by supplying as with the right TARGET collection - # after import. That's why we could exclude the first task layer here in the loop. - # But people at the Studio pointed out it might still be useful sometimes to still let - # this task layer run the transfer() functions as there can be cases like: - # Prefixing modififers that are coming from a task layer with the task layer name. - logger.info(f"Using {prod_task_layers[0].name} as base.") - - # If metafile does not exist yet create it. - metadata_path = self.build_context.asset_task.metadata_path - if not metadata_path.exists(): - tree = self._create_asset_metadata_tree_from_collection() - metadata.write_asset_metadata_tree_to_file(metadata_path, tree) - logger.info("Created metadata file: %s", metadata_path.name) - del tree - - # Otherwise load it from disk. - meta_asset_tree = metadata.load_asset_metadata_tree_from_file(metadata_path) - - # Get time for later metadata update. - time = datetime.now() - - for task_layer in prod_task_layers: - - # Get metadata task layer for current task layer. - meta_tl = meta_asset_tree.get_metadata_task_layer(task_layer.get_id()) - - # Task Layer might not exist in metadata if it was added midway production - # if so add it here. - if not meta_tl: - logger.warning( - "Detected TaskLayer that was not in metadata file yet: %s. Will be added.", - task_layer.get_id(), - ) - meta_tl = meta_util.init_meta_task_layer(task_layer, asset_publish) - meta_asset_tree.add_metadata_task_layer(meta_tl) - - # Transfer selected task layers from Publish Coll -> Target Coll. - if task_layer in used_task_layers: - - logger.info( - f"Transferring {task_layer.name} from {transfer_triplet.publish_coll.name} to {transfer_triplet.target_coll.name}." - ) - task_layer.transfer( - context, self.build_context, mapping_publish_target, self.transfer_settings - ) - - # Update source meta task layer source path. - # Save path relative to asset directory, otherwise we have system paths in the start - # which might differ on various systems. - meta_tl.source_path = ( - asset_publish.path_relative_to_asset_dir.as_posix() - ) - meta_tl.updated_at = time.strftime(constants.TIME_FORMAT) - - # Transfer unselected task layers from Task Coll -> Target Coll. Retain them. - else: - logger.info( - f"Transferring {task_layer.name} from {transfer_triplet.task_coll.name} to {transfer_triplet.target_coll.name}." - ) - task_layer.transfer( - context, self.build_context, mapping_task_target, self.transfer_settings - ) - - # Here we don't want to update source path, we keep it as is, as we are just 'retaining' here. - - # Cleanup transfer. - self._clean_up_transfer(context, transfer_triplet) - - # Save updated metadata. - metadata.write_asset_metadata_tree_to_file(metadata_path, meta_asset_tree) - - def pull_from_task( - self, - context: bpy.types.Context, - ) -> None: - - """ - Pulls the selected TaskLayers from the AssetTask in to the current AssetPublish. - """ - # Set is_push attribute. - self.build_context.is_push = True - - # User does a publish/push. This code runs ins AssetPublish file. - # Import Asset Collection from Asset Task. - transfer_triplet: TransferCollectionTriplet = ( - self.asset_importer.import_asset_task() - ) - asset_publish = AssetPublish(Path(bpy.data.filepath)) - metadata_path = asset_publish.metadata_path - locked_task_layer_ids = asset_publish.metadata.get_locked_task_layer_ids() - meta_asset_tree = metadata.load_asset_metadata_tree_from_file(metadata_path) - - transfer_triplet.reset_rigs() - # Ensure visibility for depsgraph evaluation. - transfer_triplet.ensure_vis() - - # The target collection (base) was already decided by ASSET_IMPORTER.import_asset_task() - # and is saved in transfer_triplet.target_coll. - mapping_task_target = AssetTransferMapping( - transfer_triplet.task_coll, transfer_triplet.target_coll - ) - mapping_publish_target = AssetTransferMapping( - transfer_triplet.publish_coll, transfer_triplet.target_coll - ) - - # Process only the TaskLayers that were ticked as 'use'. - used_task_layers = ( - self.build_context.asset_context.task_layer_assembly.get_used_task_layers() - ) - # Should be ordered, just in case. - prod_task_layers = self.build_context.prod_context.task_layers - prod_task_layers.sort(key=lambda tl: tl.order) - - # Perform Task Layer merging. - - # Note: We always want to apply all TaskLayers except for the Task Layer with the lowest order - # aka 'Base Task Layer'. This Task Layer gives us the starting point on which to apply all other Task Layers - # on. The asset importer already handles this logic by supplying as with the right TARGET collection - # after import. That's why we could exclude the first task layer here in the loop. - # But people at the Studio pointed out it might still be useful sometimes to still let - # this task layer run the transfer() functions as there can be cases like: - # Prefixing modififers that are coming from a task layer with the task layer name. - logger.info(f"Using {prod_task_layers[0].name} as base.") - - # Get time for later metadata update. - time = datetime.now() - - for task_layer in prod_task_layers: - - # Get metadata task layer for current task layer. - meta_tl = meta_asset_tree.get_metadata_task_layer(task_layer.get_id()) - - # Task Layer might not exist in metadata if it was added midway production - # if so add it here. - if not meta_tl: - logger.warning( - "Detected TaskLayer that was not in metadata file yet: %s. Will be added.", - task_layer.get_id(), - ) - meta_tl = meta_util.init_meta_task_layer( - task_layer, self.build_context.asset_task - ) - meta_asset_tree.add_metadata_task_layer(meta_tl) - - # Transfer selected task layers from AssetTask Coll -> Target Coll. - # Skip any Task Layers that are locked in this AssetPublish. - # We have to do this check here because Users can push multiple Task Layer at - # the same time. Amongst the selected TaskLayers there could be some locked and some live - # in this asset publish. - if ( - task_layer in used_task_layers - and task_layer.get_id() not in locked_task_layer_ids - ): - logger.info( - f"Transferring {task_layer.name} from {transfer_triplet.task_coll.name} to {transfer_triplet.target_coll.name}." - ) - - task_layer.transfer( - context, self.build_context, mapping_task_target, self.transfer_settings - ) - - # Update source meta task layer source path. - # Save path relative to asset directory, otherwise we have system paths in the start - # which might differ on various systems. - meta_tl.source_path = ( - self.build_context.asset_task.path_relative_to_asset_dir.as_posix() - ) - meta_tl.updated_at = time.strftime(constants.TIME_FORMAT) - - else: - # Transfer unselected task layers from Publish Coll -> Target Coll. Retain them. - logger.info( - f"Transferring {task_layer.name} from {transfer_triplet.publish_coll.name} to {transfer_triplet.target_coll.name}." - ) - task_layer.transfer( - context, self.build_context, mapping_publish_target, self.transfer_settings - ) - - # Here we don't want to update source path, we keep it as is, as we are just 'retaining' here. - - # Cleanup transfer. - self._clean_up_transfer(context, transfer_triplet) - - # Save updated metadata. - metadata.write_asset_metadata_tree_to_file(metadata_path, meta_asset_tree) - - # Update asset collection properties. - context.scene.bsp_asset.asset_collection.bsp_asset.update_props_by_asset_publish( - asset_publish - ) - - # Run hook phase. - self._run_hooks(context) - - @staticmethod - def _remap_users(context): - """ - When objects inside the asset collection reference datablocks outside of - the asset collection or vice versa, some duplication can occur, as - outside objects end up with a .TASK suffix, and they end up referencing - objects that are no longer linked to the scene. - - Objects inside the asset collection correctly lose their suffix, but - also end up referencing outside objects without the suffix, which are - actually the wrong ones. - - So this function remaps references such that everything inside and outside - the asset collection reference each other once again, and removes - any leftover .TASK suffixes. - """ - - suf = constants.TASK_SUFFIX - for datablock in bpy.data.user_map(): - has_type = hasattr(datablock, 'type') - if has_type and datablock.type == 'OBJECT' \ - and datablock.name not in context.scene.objects: - # Objects that aren't in the scene have been replaced by the pull - # process, so we don't want to remap any references to them. - continue - storage = util.get_storage_of_id(datablock) - if not datablock.name.endswith(suf): - continue - - without_suffix = datablock.name.replace(suf, "") - other_db = storage.get(without_suffix) - if not other_db: - continue - - # print(f'REMAP USERS: "{other_db.name}" -> "{datablock.name}"') - other_db.user_remap(datablock) - # Rename the object to make its name available. - # This datablock should get purged soon, otherwise it's a bug. - other_db.name += "_Users_Remapped" - datablock.name = without_suffix - - # Since this process can leave unused datablocks behind, let's purge. - bpy.ops.outliner.orphans_purge(do_recursive=True) - - def _clean_up_transfer( - self, context: bpy.types.Context, transfer_triplet: TransferCollectionTriplet - ): - """ - Cleans up the transfer by removing the non target collection in the merge triplet, restoring - the visibilities as well as purging all orphan data. It also removes the suffixes from the target - collection and sets the asset collection. - """ - # Restore Visibility. - transfer_triplet.restore_vis() - - # Remove non TARGET collections. - for coll in [transfer_triplet.publish_coll, transfer_triplet.task_coll]: - util.del_collection(coll) - - # Purge orphan data. - # This is quite an important one, if this goes wrong we can end up with - # wrong data block names. - bpy.ops.outliner.orphans_purge(do_recursive=True) - - # Enable armature poses - for ob in transfer_triplet.target_coll.all_objects: - if ob.type != 'ARMATURE': - continue - ob.data.pose_position = 'POSE' - - # Remove suffix from TARGET Collection. - asset_suffix.remove_suffix_from_hierarchy(transfer_triplet.target_coll) - - self._remap_users(context) - - # Remove transfer suffix. - transfer_triplet.target_coll.bsp_asset.transfer_suffix = "" - - # Restore scenes asset collection. - context.scene.bsp_asset.asset_collection = transfer_triplet.target_coll - - def _run_hooks(self, context: bpy.types.Context) -> None: - - if not self.build_context.prod_context.hooks: - logger.info("No hooks to run") - return - - asset_coll = context.scene.bsp_asset.asset_collection - asset_data = asset_coll.bsp_asset - params = self.build_context.get_hook_kwargs(context) - hooks_to_run: Set[HookFunction] = set() - - # Collect global hooks first. - for hook in self.build_context.prod_context.hooks.filter(): - hooks_to_run.add(hook) - - # Collect asset type hooks. - for hook in self.build_context.prod_context.hooks.filter( - match_asset_type=asset_data.entity_parent_name, - ): - hooks_to_run.add(hook) - - # Collect Global Layer Hooks. - # We have to loop through each task layer here, can't give filter() function - # a list as one of the input parameters. - for ( - task_layer_id - ) in ( - self.build_context.asset_context.task_layer_assembly.get_used_task_layer_ids() - ): - for hook in self.build_context.prod_context.hooks.filter( - match_task_layers=task_layer_id, - ): - hooks_to_run.add(hook) - - # Collect asset hooks. - for hook in self.build_context.prod_context.hooks.filter( - match_asset=asset_data.entity_name, - ): - hooks_to_run.add(hook) - - # Collect asset + task layer specific hooks. - for ( - task_layer_id - ) in ( - self.build_context.asset_context.task_layer_assembly.get_used_task_layer_ids() - ): - for hook in self.build_context.prod_context.hooks.filter( - match_asset=asset_data.entity_name, - match_task_layers=task_layer_id, - ): - hooks_to_run.add(hook) - - # Run actual hooks. - for hook in hooks_to_run: - hook(**params) - - # Purge again. - bpy.ops.outliner.orphans_purge(do_recursive=True) - - def _create_first_version(self) -> AssetPublish: - first_publish = AssetPublish( - self._build_context.asset_dir.get_first_publish_path() - ) - asset_coll = self._build_context.asset_context.asset_collection - data_blocks = set((asset_coll,)) - - # Check if already exists. - if first_publish.path.exists(): - raise AssetBuilderFailedToPublish( - f"Failed to create first publish. Already exist: {first_publish.path.name}" - ) - - # Create asset meta tree. - asset_metadata_tree = self._create_asset_metadata_tree_from_collection() - - # Adjust version metadata. - asset_metadata_tree.meta_asset.version = first_publish.get_version() - - # Create directory if not exist. - first_publish.path.parent.mkdir(parents=True, exist_ok=True) - - # Save asset tree. - metadata.write_asset_metadata_tree_to_file( - first_publish.metadata_path, asset_metadata_tree - ) - - # Create blend file. - bpy.data.libraries.write( - first_publish.path.as_posix(), - data_blocks, - path_remap="RELATIVE_ALL", - fake_user=True, - ) - - logger.info("Created first asset version: %s", first_publish.path.as_posix()) - return first_publish - - def _create_asset_metadata_tree_from_collection(self) -> MetadataTreeAsset: - # Create asset meta tree. - meta_asset = ( - self.build_context.asset_context.asset_collection.bsp_asset.gen_metadata_class() - ) - meta_task_layers: List[MetadataTaskLayer] = [] - - for task_layer in self.build_context.prod_context.task_layers: - meta_tl = meta_util.init_meta_task_layer( - task_layer, self.build_context.asset_task - ) - meta_task_layers.append(meta_tl) - - meta_tree_asset = MetadataTreeAsset( - meta_asset=meta_asset, meta_task_layers=meta_task_layers - ) - return meta_tree_asset diff --git a/scripts-blender/addons/asset_pipeline/builder/asset_importer.py b/scripts-blender/addons/asset_pipeline/builder/asset_importer.py deleted file mode 100644 index ee21c9f0..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/asset_importer.py +++ /dev/null @@ -1,317 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import logging -import uuid -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from pathlib import Path - -import bpy - -from . import asset_suffix -from .context import BuildContext -from .asset_mapping import TransferCollectionTriplet - -from .. import constants -from ..asset_files import AssetPublish - -logger = logging.getLogger("BSP") - - -class FileExistsError(Exception): - pass - - -class ImportFailed(Exception): - pass - - -def import_data_from_lib( - libpath: Path, - data_category: str, - data_name: str, - link: bool = False, -) -> Any: - - noun = "Appended" - if link: - noun = "Linked" - - with bpy.data.libraries.load(libpath.as_posix(), relative=True, link=link) as ( - data_from, - data_to, - ): - - if data_name not in eval(f"data_from.{data_category}"): - raise ImportFailed( - f"Failed to import {data_category} {data_name} from {libpath.as_posix()}. Doesn't exist in file.", - ) - - # Check if datablock with same name already exists in blend file. - try: - eval(f"bpy.data.{data_category}['{data_name}']") - except KeyError: - pass - else: - raise ImportFailed( - f"{data_name} already in bpy.data.{data_category} of this blendfile.", - ) - - # Append data block. - eval(f"data_to.{data_category}.append('{data_name}')") - logger.info( - "%s: %s from library: %s", - noun, - data_name, - libpath.as_posix(), - ) - - if link: - return eval( - f"bpy.data.{data_category}['{data_name}', '{bpy.path.relpath(libpath.as_posix())}']" - ) - - return eval(f"bpy.data.{data_category}['{data_name}']") - - -class AssetImporter: - """ - Class that handles the creation of the TransferCollectionTriplet. - Depending on the operation (push/pull) and depending on the selected TaskLayers - we need to import and suffix the Asset Collections from the AssetTask and the AssetPublish - after a certain logic. - """ - - def __init__(self, build_context: BuildContext): - self._build_context = build_context - - @property - def build_context(self) -> BuildContext: - return self._build_context - - def _duplicate_tmp_blendfile(self) -> Path: - # Gen a UUID to minimize risk of overwriting an existing blend file. - id = uuid.uuid4() - filepath_tmp = Path(bpy.data.filepath) - filepath_tmp = filepath_tmp.parent / f"{filepath_tmp.stem}-{id}.blend" - - if filepath_tmp.exists(): - raise FileExistsError( - f"Failed to duplicate blend file. Path already exists: {filepath_tmp.as_posix()}" - ) - - # Duplicate blend file by saving it in filepath_tmp. - bpy.ops.wm.save_as_mainfile(filepath=filepath_tmp.as_posix(), copy=True) - - logger.debug("Created temporary duplicate: %s", filepath_tmp.name) - - return filepath_tmp - - def _import_coll_with_suffix( - self, libpath: Path, coll_name: str, coll_suffix: str - ) -> bpy.types.Collection: - - coll = import_data_from_lib(libpath, "collections", coll_name) - asset_suffix.add_suffix_to_hierarchy(coll, coll_suffix) - return coll - - def import_asset_task(self) -> TransferCollectionTriplet: - """ - Imports that asset task that is stored in BuildContext.asset_task. - Note: This function assumes it is run in an asset publish file. - """ - - # TODO: Add safety check to verify this function is not run in an - # asset task. Maybe built context could receive a flag that we can check here? - - asset_task = self.build_context.asset_task - asset_publish = AssetPublish(Path(bpy.data.filepath)) - - asset_coll_publish = self.build_context.asset_context.asset_collection - asset_coll_name = asset_coll_publish.name - - # We now need to either duplicate the asset task or publish collection - # depending on which one is going to be the base. To make this decision we should look - # at the enabled TaskLayers in the build context and then check the 'order' attribute of TaskLayers - # if the asset task collection contains a task layer with the lowest order we have to take that as - # a base. - orders_prod: List[int] = self.build_context.prod_context.get_task_layer_orders() - orders_asset_task: List[ - int - ] = self.build_context.asset_context.task_layer_assembly.get_task_layer_orders( - only_used=True - ) - - # If the smallest order of the asset task is equal the smallest order or prod orders - # We know that we need to take the collection of the asset task as a new base. - - # BASE --> ASSET_TASK COLLECTION - if min(orders_asset_task) == min(orders_prod): - - logger.info("Take Asset Task as Base: %s", asset_task.path.name) - - # Suffix asset_publish collection with .PUBLISH - asset_suffix.add_suffix_to_hierarchy( - asset_coll_publish, constants.PUBLISH_SUFFIX - ) - - # Import asset task collection with .TASK suffix. - asset_coll_task = self._import_coll_with_suffix( - asset_task.path, asset_coll_name, constants.TASK_SUFFIX - ) - - # Import asset_task collection again and suffix as .TARGET - asset_coll_target = self._import_coll_with_suffix( - asset_task.path, asset_coll_name, constants.TARGET_SUFFIX - ) - - # BASE --> ASSET_PUBLISH COLLECTION - else: - - logger.info("Take Asset Publish as Base: %s", asset_publish.path.name) - - # Make tmp blendfile. - # This is a little tricks that prevents us from having to duplicate the whole - # Collection hierarchy and deal with annoyin .001 suffixes. - # That way we can first suffix the asset publish collection and then import it again. - tmp_blendfile_path = self._duplicate_tmp_blendfile() - - # Suffix asset_publish collection with .PUBLISH. - asset_suffix.add_suffix_to_hierarchy( - asset_coll_publish, constants.PUBLISH_SUFFIX - ) - - # Import asset task collection with .TASK suffix. - asset_coll_task = self._import_coll_with_suffix( - asset_task.path, asset_coll_name, constants.TASK_SUFFIX - ) - - # Import asset_publish collection from tmp blend file and suffix as .TARGET - asset_coll_target = self._import_coll_with_suffix( - tmp_blendfile_path, asset_coll_name, constants.TARGET_SUFFIX - ) - - # Remove tmp blend file. - tmp_blendfile_path.unlink() - - # Link for debugging. - for coll in [asset_coll_publish, asset_coll_target, asset_coll_task]: - if coll in list(bpy.context.scene.collection.children): - continue - bpy.context.scene.collection.children.link(coll) - - # Set suffixes. - asset_coll_task.bsp_asset.transfer_suffix = constants.TASK_SUFFIX - asset_coll_publish.bsp_asset.transfer_suffix = constants.PUBLISH_SUFFIX - asset_coll_target.bsp_asset.transfer_suffix = constants.TARGET_SUFFIX - - return TransferCollectionTriplet( - asset_coll_task, asset_coll_publish, asset_coll_target - ) - - def import_asset_publish(self) -> TransferCollectionTriplet: - """ - Imports the latest asset publish. - """ - # TODO: shares a lot of the same code as import_asset_task(). Refactor it to make it DRY. - - # TODO: Add safety check to verify this function is not run in an - # asset publish. Maybe built context could receive a flag that we can check here? - # Get latest asset version. - asset_publish = self.build_context.asset_publishes[-1] - asset_task = self.build_context.asset_task - asset_coll_task = self.build_context.asset_context.asset_collection - asset_coll_name = asset_coll_task.name - - # We now need to either duplicate the asset task or publish collection - # depending on which one is going to be the base. To make this decision we should look - # at the enabled TaskLayers in the build context and then check the 'order' attribute of TaskLayers - # if the asset task collection contains a task layer with the lowest order we have to take that as - # a base. - orders_prod: List[int] = self.build_context.prod_context.get_task_layer_orders() - orders_asset_publish: List[ - int - ] = self.build_context.asset_context.task_layer_assembly.get_task_layer_orders( - only_used=True - ) - - # Remember in this scenario the orders_asset_task might be a little misleading - # because we have to turn it around. In this case the user selects which TaskLayers they want - # to pull from the ASSET PUBLISH. But the base logic stays the same: - - # If the smallest order of the asset publish is equal the smallest order or prod orders - # We know that we need to take the collection of the asset publish as a new base. - - # BASE --> ASSET_PUBLISH COLLECTION - if min(orders_asset_publish) == min(orders_prod): - logger.info("Take Asset Publish as Base: %s", asset_publish.path.name) - - # Suffix asset_task collection with .TASK - asset_suffix.add_suffix_to_hierarchy(asset_coll_task, constants.TASK_SUFFIX) - - # Import asset_publish collection with .PUBLISH suffix. - asset_coll_publish = self._import_coll_with_suffix( - asset_publish.path, asset_coll_name, constants.PUBLISH_SUFFIX - ) - - # Import asset_publish collection again and suffix as .TARGET - asset_coll_target = self._import_coll_with_suffix( - asset_publish.path, asset_coll_name, constants.TARGET_SUFFIX - ) - - # BASE --> ASSET_TASK COLLECTION - else: - logger.info("Take Asset Task as Base: %s", asset_task.path.name) - - # Make tmp blendfile. - # This is a little tricks that prevents us from having to duplicate the whole - # Collection hierarchy and deal with annoyin .001 suffixes. - # That way we can first suffix the asset publish collection and then import it again. - tmp_blendfile_path = self._duplicate_tmp_blendfile() - - # Suffix asset_task collection with .TASK. - asset_suffix.add_suffix_to_hierarchy(asset_coll_task, constants.TASK_SUFFIX) - - # Import asset publish collection with .PUBLISH suffix. - asset_coll_publish = self._import_coll_with_suffix( - asset_publish.path, asset_coll_name, constants.PUBLISH_SUFFIX - ) - - # Import asset_task collection from tmp blend file and suffix as .TARGET - asset_coll_target = self._import_coll_with_suffix( - tmp_blendfile_path, asset_coll_name, constants.TARGET_SUFFIX - ) - - # Remove tmp blend file. - tmp_blendfile_path.unlink() - - # Link for debugging. - for coll in [asset_coll_publish, asset_coll_target, asset_coll_task]: - if coll in list(bpy.context.scene.collection.children): - continue - bpy.context.scene.collection.children.link(coll) - - # Set suffixes. - asset_coll_task.bsp_asset.transfer_suffix = constants.TASK_SUFFIX - asset_coll_publish.bsp_asset.transfer_suffix = constants.PUBLISH_SUFFIX - asset_coll_target.bsp_asset.transfer_suffix = constants.TARGET_SUFFIX - - return TransferCollectionTriplet( - asset_coll_task, asset_coll_publish, asset_coll_target - ) diff --git a/scripts-blender/addons/asset_pipeline/builder/asset_mapping.py b/scripts-blender/addons/asset_pipeline/builder/asset_mapping.py deleted file mode 100644 index f14350ff..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/asset_mapping.py +++ /dev/null @@ -1,356 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple - -from pathlib import Path - -import bpy - -from .vis import EnsureCollectionVisibility - -from .. import util - -logger = logging.getLogger("BSP") - - -class TransferCollectionTriplet: - """ - This class holds the 3 collections that are needed for the merge process. Publish, Task and Target Collection. - During the merge we have to dynamically decide which Task Layer we take from the Publish Collection - and which we take from the Task Collection to apply on the target. - That's why we save these 3 Collections in a dedicated class, as we require them. - """ - - def __init__( - self, - task_coll: bpy.types.Collection, - publish_coll: bpy.types.Collection, - target_coll: bpy.types.Collection, - ): - self.publish_coll = publish_coll - self.task_coll = task_coll - self.target_coll = target_coll - self._vis_colls: List[EnsureCollectionVisibility] = [] - - def get_collections(self) -> List[bpy.types.Collection]: - return [self.task_coll, self.publish_coll, self.target_coll] - - def reset_rigs(self) -> None: - """To ensure correct data transferring, make sure all rigs are in their - default positions.""" - for main_coll in self.get_collections(): - for ob in main_coll.all_objects: - if ob.type != "ARMATURE": - continue - util.reset_armature_pose( - ob, - reset_properties=True, - reset_transforms=True, - ) - ob.data.pose_position = "REST" - - def ensure_vis(self) -> None: - # Apparently Blender does not evaluate objects or collections in the depsgraph - # in some cases if they are not visible. This is something Users should not have to take - # care about when writing their transfer data instructions. So we will make sure here - # that everything is visible and after the transfer the original state will be restored. - - # Catch mistake if someone calls this twice without restoring before. - if self._vis_colls: - self.restore_vis() - for main_coll in self.get_collections(): - self.recursive_ensure_vis(main_coll) - - def recursive_ensure_vis(self, coll): - self._vis_colls.append(EnsureCollectionVisibility(coll)) - for subcoll in coll.children: - self.recursive_ensure_vis(subcoll) - - def restore_vis(self) -> None: - for vis_coll in self._vis_colls: - vis_coll.restore() - - self._vis_colls.clear() - - -def rreplace(s: str, old: str, new: str, occurrence: int) -> str: - li = s.rsplit(old, occurrence) - return new.join(li) - - -class AssetTransferMapping: - """ - The AssetTranfserMapping class represents a mapping between a source and a target. - It contains an object mapping which connects each source object with a target - object as well as a collection mapping. - The mapping process relies heavily on suffixes, which is why we use - MergeCollections as input that store a suffix. - - Instances of this class will be pased TaskLayer data transfer function so Users - can easily write their merge instructions. - """ - - def __init__( - self, - source_coll: bpy.types.Collection, - target_coll: bpy.types.Collection, - ): - - self._source_coll = source_coll - self._target_coll = target_coll - - self._no_match_source_objs: Set[bpy.types.Object] = set() - self._no_match_target_objs: Set[bpy.types.Object] = set() - - self._no_match_source_colls: Set[bpy.types.Object] = set() - self._no_match_target_colls: Set[bpy.types.Object] = set() - - # TODO: gen_map functions almost have the same code, - # refactor it to one function with the right parameters. - self.generate_mapping() - - @property - def source_coll(self) -> bpy.types.Collection: - return self._source_coll - - @property - def target_coll(self) -> bpy.types.Collection: - return self._target_coll - - @property - def no_match_source_objs(self) -> Set[bpy.types.Object]: - """ - All objects that exist in source but not in target - """ - return self._no_match_source_objs - - @property - def no_match_target_objs(self) -> Set[bpy.types.Object]: - """ - All objects that exist in target but not in source - """ - return self._no_match_target_objs - - @property - def no_match_source_colls(self) -> Set[bpy.types.Object]: - """ - All collections that exist in source but not in target - """ - return self._no_match_source_colls - - @property - def no_match_target_colls(self) -> Set[bpy.types.Object]: - """ - All collections that exist in target but not in source - """ - return self._no_match_target_colls - - def generate_mapping(self) -> None: - self._object_map = self._gen_object_map() - self._collection_map = self._gen_collection_map() - self._material_map = self._gen_material_map() - - def _gen_object_map(self) -> Dict[bpy.types.Object, bpy.types.Object]: - - """ - Tries to link all objects in source collection to an object in - target collection. Uses suffixes to match them up. - """ - - object_map: Dict[bpy.types.Object, bpy.types.Object] = {} - - for source_obj in self.source_coll.all_objects: - - # assert source_obj.name.endswith(self._source_merge_coll.suffix) - - # Replace source object suffix with target suffix to get target object. - target_obj_name = rreplace( - source_obj.name, - self._source_coll.bsp_asset.transfer_suffix, - self._target_coll.bsp_asset.transfer_suffix, - 1, - ) - try: - target_obj = self._target_coll.all_objects[target_obj_name] - except KeyError: - logger.debug( - "Failed to find match obj %s for %s", - target_obj_name, - source_obj.name, - ) - self._no_match_source_objs.add(source_obj) - continue - else: - object_map[source_obj] = target_obj - # logger.debug( - # "Found match: source: %s target: %s", - # source_obj.name, - # target_obj.name, - # ) - - # Populate no match target set. - match_target_objs = set([obj for obj in object_map.values()]) - self._no_match_target_objs = ( - set(self.target_coll.all_objects) - match_target_objs - ) - - return object_map - - def _gen_collection_map(self) -> Dict[bpy.types.Collection, bpy.types.Collection]: - """ - Tries to link all source collections to a target collection. - Uses suffixes to match them up. - """ - coll_map: Dict[bpy.types.Collection, bpy.types.Collection] = {} - - # Link top most parents. - coll_map[self.source_coll] = self.target_coll - - # Link up all children. - for s_coll in util.traverse_collection_tree(self.source_coll): - - # assert source_obj.name.endswith(self._source_merge_coll.suffix) - - # Replace source object suffix with target suffix to get target object. - target_coll_name = rreplace( - s_coll.name, - self._source_coll.bsp_asset.transfer_suffix, - self._target_coll.bsp_asset.transfer_suffix, - 1, - ) - try: - t_coll = bpy.data.collections[target_coll_name] - except KeyError: - logger.debug( - "Failed to find match collection %s for %s", - s_coll.name, - target_coll_name, - ) - self._no_match_source_colls.add(s_coll) - continue - else: - coll_map[s_coll] = t_coll - # logger.debug( - # "Found match: source: %s target: %s", - # s_coll.name, - # t_coll.name, - # ) - - all_tgt_colls = set(self.target_coll.children_recursive) - all_tgt_colls.add(self.target_coll) - match_target_colls = set([coll for coll in coll_map.values()]) - self._no_match_target_colls = all_tgt_colls - match_target_colls - - return coll_map - - def _gen_material_map(self) -> Dict[bpy.types.Material, bpy.types.Material]: - material_map: Dict[bpy.types.Material, bpy.types.Material] = {} - - source_materials: List[bpy.types.Material] = self._get_all_materials_of_coll( - self.source_coll - ) - target_materials_dict: Dict[ - str, bpy.types.Material - ] = self._get_all_materials_of_coll(self.target_coll, as_dict=True) - - # Link up all children. - for s_mat in source_materials: - - # assert s_mat.name.endswith(self._source_merge_coll.suffix) - - # Replace source object suffix with target suffix to get target object. - target_mat_name = rreplace( - s_mat.name, - self._source_coll.bsp_asset.transfer_suffix, - self._target_coll.bsp_asset.transfer_suffix, - 1, - ) - try: - t_mat = target_materials_dict[target_mat_name] - except KeyError: - logger.debug( - "Failed to find match material %s for %s", - s_mat.name, - target_mat_name, - ) - continue - else: - material_map[s_mat] = t_mat - # logger.debug( - # "Found match: source: %s target: %s", - # s_mat.name, - # t_mat.name, - # ) - - return material_map - - def _get_all_materials_of_coll( - self, coll: bpy.types.Collection, as_dict: bool = False - ) -> Union[List[bpy.types.Material], Dict[str, bpy.types.Material]]: - materials: List[bpy.types.Material] = [] - for obj in coll.all_objects: - for ms in obj.material_slots: - m = ms.material - - # Material can be None. - if not m: - continue - - if m in materials: - continue - - materials.append(m) - - # Return list. - if not as_dict: - return materials - - # Return dict. - materials_dict = {} - for mat in materials: - materials_dict[mat.name] = mat - return materials_dict - - @property - def object_map(self) -> Dict[bpy.types.Object, bpy.types.Object]: - """ - Key: Source - Value: Target - """ - return self._object_map - - @property - def collection_map(self) -> Dict[bpy.types.Collection, bpy.types.Collection]: - """ - Key: Source - Value: Target - """ - return self._collection_map - - @property - def material_map(self) -> Dict[bpy.types.Material, bpy.types.Material]: - """ - Key: Source - Value: Target - """ - return self._material_map diff --git a/scripts-blender/addons/asset_pipeline/builder/asset_suffix.py b/scripts-blender/addons/asset_pipeline/builder/asset_suffix.py deleted file mode 100644 index c3afff4b..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/asset_suffix.py +++ /dev/null @@ -1,69 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple, Generator - -import bpy -from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids - -from .. import constants -from ..util import get_storage_of_id - -logger = logging.getLogger("BSP") - - -def remove_suffix_from_hierarchy( - collection: bpy.types.Collection, delimiter: str = constants.DELIMITER -): - """Removes the suffix after a set delimiter from all datablocks - referenced by a collection, itself included""" - - ref_map = get_id_reference_map() - datablocks = get_all_referenced_ids(collection, ref_map) - datablocks.add(collection) - for db in datablocks: - if db.library: - # Don't rename linked datablocks. - continue - try: - db.name = delimiter.join(db.name.split(delimiter)[:-1]) - except: - pass - - -def add_suffix_to_hierarchy(collection: bpy.types.Collection, suffix: str): - """Add a suffix to the names of all datablocks referenced by a collection, - itself included.""" - - ref_map = get_id_reference_map() - datablocks = get_all_referenced_ids(collection, ref_map) - datablocks.add(collection) - for db in datablocks: - if db.library: - # Don't rename linked datablocks. - continue - collision_db = get_storage_of_id(db).get(db.name+suffix) - if collision_db: - collision_db.name += '.OLD' - try: - db.name += suffix - except: - pass diff --git a/scripts-blender/addons/asset_pipeline/builder/blstarter.py b/scripts-blender/addons/asset_pipeline/builder/blstarter.py deleted file mode 100644 index a37c179b..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/blstarter.py +++ /dev/null @@ -1,50 +0,0 @@ -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -# - -# This file was made by Jeroen Bakker in the shot-builder repository: -# https://developer.blender.org/diffusion/BSTS/browse/master/shot-builder/shot_builder/sys_utils -import logging -import subprocess - -from pathlib import Path -from typing import List, Dict, Union, Any, Optional - -logger = logging.getLogger("BSP") - -import bpy - - -class BuilderBlenderStarter: - - path: Path = Path(bpy.app.binary_path) - publish_script: Path = Path(__file__).parent.joinpath("scripts/push.py") - - @classmethod - def start_publish(cls, filepath: Path, pickle_path: Path) -> subprocess.Popen: - cmd_str = ( - f'"{cls.path.as_posix()}" "{filepath.as_posix()}"' - ' -b' - # ' --factory-startup' - # f' --addons blender_kitsu,asset_pipeline' - f' -P "{cls.publish_script.as_posix()}"' - f' -- "{pickle_path.as_posix()}"' - ) - popen = subprocess.Popen(cmd_str, shell=True) - return popen diff --git a/scripts-blender/addons/asset_pipeline/builder/context.py b/scripts-blender/addons/asset_pipeline/builder/context.py deleted file mode 100644 index d3d086d1..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/context.py +++ /dev/null @@ -1,682 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -""" -The asset-pipeline works heavily with the concept of Contexts. -There are 3 types of contexts: - -ProductionContext: Global production level context, gets loaded on startup, processes all the config files. - -AssetContext: Local Asset Context, gets loaded on each scene load. Stores settings and information for active Asset. - -BuildContext: Gets loaded when starting a publish or a pull. Contains both the ProductionContext and AssetContext -as well as some other data. Is the actual context that gets processed by the AssetBuilder. - -A key feature is that we need to be able to 'exchange' this information with another blend file. As the actual -transfer process requires to: -open another blend file -> load the build context there -> process it -> close it again. -This can be achieved by using the `pickle` library and pickle the Contexts. All the contexts are pickleable. -""" - -import importlib -import logging - -from typing import List, Dict, Union, Any, Set, Optional -from types import ModuleType, FunctionType - -from pathlib import Path - -import bpy - -from .task_layer import TaskLayer, TaskLayerAssembly -from .hook import Hooks - -from .. import constants, prop_utils -from ..sys_utils import SystemPathInclude -from ..asset_files import AssetDir, AssetPublish, AssetTask - -logger = logging.getLogger("BSP") - - -class ProdContextFailedToInitialize(Exception): - pass - - -class AssetContextFailedToInitialize(Exception): - pass - - -class BuildContextFailedToInitialize(Exception): - pass - - -class InvalidTaskLayerDefinition(Exception): - pass - - -class ProcessPair: - """ - Simple Class that stores a logically connected target and a pull from path. - """ - - def __init__(self, asset_task: AssetTask, asset_publish: AssetPublish) -> None: - self.asset_task = asset_task - self.asset_publish = asset_publish - - def __eq__(self, other: object) -> bool: - if not isinstance(other, ProcessPair): - raise NotImplementedError() - - return bool( - self.asset_task == other.asset_task - and self.asset_publish == other.asset_publish - ) - - def __hash__(self) -> int: - return hash((self.asset_task, self.asset_publish)) - - -class ProductionContext: - - """ - A context that represents configuration on a Production Level. - Independent from Blender, no bpy access. This context mostly holds - the defined TaskLayers in the config files, the transfer settings and the hooks. - """ - - def __init__(self, config_folder: Path): - - if not config_folder or not config_folder.exists(): - raise ProdContextFailedToInitialize( - f"Failed to init ProductionContext. Invalid config folder: {config_folder}" - ) - - self._task_layers: List[type[TaskLayer]] = [] - self._transfer_settings: Optional[type[bpy.types.PropertyGroup]] = None - self._config_folder: Path = config_folder - self._module_of_task_layers: Optional[ModuleType] = None - self._module_of_hooks: Optional[ModuleType] = None - self._hooks = Hooks() - - # Load configs from config_folder. - self._collect_configs() - logger.debug("Initialized Production Context") - - @property - def config_folder(self) -> Path: - return self._config_folder - - @property - def task_layers(self) -> List[type[TaskLayer]]: - return self._task_layers - - def get_task_layer_orders(self) -> List[int]: - """ - Returns a list of all TaskLayers.order values. - """ - return [t.order for t in self.task_layers] - - def _collect_configs(self) -> None: - - # Add config folder temporarily to sys.path for convenient - # import. - - with SystemPathInclude([self._config_folder]): - - # Load Task Layers. - # TODO: information duplicated in add-on preferences - # Make it DRY - - # Check if task layers module was already imported. - # TODO: does not work perfectly, if we remove a TaskLayer from - # config file and then reload, it's still there. - # https://stackoverflow.com/questions/2918898/prevent-python-from-caching-the-imported-modules - if self._module_of_task_layers: - # Reload it so Users won't have to restart Blender. - self._module_of_task_layers = importlib.reload( - self._module_of_task_layers - ) - else: - import task_layers as prod_task_layers - - self._module_of_task_layers = prod_task_layers - - # Crawl module for TaskLayers. - self._collect_prod_task_layers() - self._collect_prod_transfer_settings() - - try: - import hooks - - except ModuleNotFoundError: - logger.debug( - "Found no 'hooks' module in: %s", self._config_folder.as_posix() - ) - self._module_of_hooks = None - - else: - self._module_of_hooks = hooks - self._collect_prod_hooks() - - def _collect_prod_task_layers(self) -> None: - - # Clear task layer list, otherwise we will add new but don't - # remove old. - self._task_layers.clear() - module = self._module_of_task_layers - - # Find all valid TaskLayer Classes. - for module_item_str in dir(module): - module_item = getattr(module, module_item_str) - - # This checks that the module item is a class definition - # and not e.G and instance of that class. - if module_item.__class__ != type: - continue - - if not issubclass(module_item, TaskLayer): - continue - - # We don't want to collect to Root TaskLayer class. - # Only classes that inherit from it. - if module_item == TaskLayer: - continue - - # Checks e.G that 'name' class attribute is set. - if not module_item.is_valid(): - if module_item.order < 0: - raise InvalidTaskLayerDefinition( - f"Invalid TaskLayer {str(module_item)} Order attribute not set.", - ) - if not module_item.name: - raise InvalidTaskLayerDefinition( - f"Invalid Task Layer {str(module_item)} Name attribute not set.", - ) - continue - - self._task_layers.append(module_item) - - # Check if any TaskLayers have the same order. - self._validate_task_layer_orders() - - # Sort TaskLayers after order attribute. - self._task_layers.sort(key=lambda tl: tl.order) - - if self.task_layers: - logger.info(f"Detected Production TaskLayers: {self.task_layers}") - - def _collect_prod_hooks(self) -> None: - - module = self._module_of_hooks - self._hooks = Hooks() - - for module_item_str in dir(module): - module_item = getattr(module, module_item_str) - # Skip non functions. - if not isinstance(module_item, FunctionType): - continue - # Skip functions of other modules. - if module_item.__module__ != module.__name__: - continue - # @hook() decorator adds this attribute which make a hook - # distinguishable from a regular function. - # Note: @hook() needs to be called otherwise this check - # will fail. - if not hasattr(module_item, constants.HOOK_ATTR_NAME): - continue - - self._hooks.register(module_item) - - if self._hooks: - logger.info(f"Detected Production Hooks: {self._hooks.callables}") - - def _collect_prod_transfer_settings(self) -> None: - """ - Here we search the task_layers.py module for a class that is - named as defined in constants.TRANSFER_SETTINGS_NAME. This is supposed to be - a regular Blender PropertyGroup. In this PropertyGroup Users can define - regular blender Properties that represent a setting to customize the - transfer data process. This PropertyGroup will be registered on scene level - and can then be easily queried in the transfer data function of the TaskLayer. - That way Users can provide themselves options to use in their code. - This options are also displayed in the Blender AssetPipeline Panel automatically. - """ - self._transfer_settings = None - module = self._module_of_task_layers - - try: - prop_group = getattr(module, constants.TRANSFER_SETTINGS_NAME) - except AttributeError: - logger.info( - "No Transfer Settings loaded. Failed to find %s variable.", - constants.TRANSFER_SETTINGS_NAME, - ) - else: - # Check if prop group is actually of type PropertyGroup. - if not issubclass(prop_group, bpy.types.PropertyGroup): - raise ProdContextFailedToInitialize( - f"{constants.TRANSFER_SETTINGS_NAME} must be subclass of bpy.types.PropertyGroup" - ) - self._transfer_settings = prop_group - try: - bpy.utils.unregister_class(prop_group) - except RuntimeError: - bpy.utils.register_class(prop_group) - # Scene Asset Pipeline Properties. - bpy.types.Scene.bsp_asset_transfer_settings = bpy.props.PointerProperty( - type=prop_group - ) - - logger.info(f"Detected Transfer Settings: {self._transfer_settings}") - logger.info( - f"Registered Transfer Settings: bpy.types.Scene.bsp_asset_transfer_settings" - ) - - def _validate_task_layer_orders(self) -> None: - for i in range(len(self._task_layers)): - tl = self._task_layers[i] - - for j in range(i + 1, len(self._task_layers)): - tl_comp = self._task_layers[j] - if tl.order == tl_comp.order: - raise InvalidTaskLayerDefinition( - f"Invalid Task Layer {str(tl)} has some 'order' as {str(tl_comp)}.", - ) - - @property - def hooks(self) -> Hooks: - return self._hooks - - def __repr__(self) -> str: - header = "\nPRODUCTION CONTEXT\n------------------------------------" - footer = "------------------------------------" - prod_task_layers = ( - f"Production Task Layers: {[t.name for t in self._task_layers]}" - ) - return "\n".join([header, prod_task_layers, footer]) - - def __getstate__(self) -> Dict[str, Any]: - # Pickle uses this function to generate a dictionary which it uses - # to pickle the instance. - # Here we can basically overwrite this dictionary, for example to - # delete some properties that pickle can't handle. - - # Pickle cannot store module objects. - state = self.__dict__.copy() - state["_module_of_task_layers"] = None - state["_module_of_hooks"] = None - return state - - def __setstate__(self, state: Dict[str, Any]) -> None: - # Pickle uses a state Dictionary to restore the instance attributes. - # In this function we can overwrite this behavior and restore - # data that pickle wasn't able to store - - self.__dict__.update(state) - - # Restore module object. - with SystemPathInclude([self.config_folder]): - import task_layers as prod_task_layers - - try: - import hooks - - except ModuleNotFoundError: - hooks = None - - self._module_of_task_layers = prod_task_layers - self._module_of_hooks = hooks - - -class AssetContext: - - """ - The Asset Context gets updated on each scene load. It holds all information that are related - to the current Asset. This includes the current Asset Collection, Asset Task, available Asset Publishes, - the Asset Directory, the configuration of Task Layers (which ones are enabled and disabled) - and the Transfer Settings. - """ - - def __init__(self, bl_context: bpy.types.Context, prod_context: ProductionContext): - - # Check if bl_context and config_folder are valid. - if not all([bl_context, bl_context.scene.bsp_asset.asset_collection]): - raise AssetContextFailedToInitialize( - "Failed to initialize AssetContext. Invalid blender_context or asset collection not set." - ) - # Check if file is saved. - if not bpy.data.filepath: - raise AssetContextFailedToInitialize( - "Failed to initialize AssetContext. File not saved" - ) - - self._bl_context: bpy.types.Context = bl_context - self._asset_collection: bpy.types.Collection = ( - bl_context.scene.bsp_asset.asset_collection - ) - self._task_layer_assembly = TaskLayerAssembly(prod_context._task_layers) - self._asset_dir = AssetDir(Path(bpy.data.filepath).parent) - self._asset_task = AssetTask(Path(bpy.data.filepath)) - self._asset_publishes: List[AssetPublish] = [] - - # Transfer settings are stored in a PropertyGroup on scene level. - # We cannot pickle those. So what we do is write them in a dictionary here - # before publish and restore the settings when we open the other blend file. - self._transfer_settings: Dict[str, Any] = {} - - # TODO: Load custom Task Layers. - self._custom_task_layers: List[Any] = [] - - self._collect_asset_publishes() - logger.debug("Initialized Asset Context") - - @property - def asset_collection(self) -> bpy.types.Collection: - return self._asset_collection - - @property - def asset_name(self) -> str: - return self.asset_collection.bsp_asset.entity_name - - @property - def asset_task(self) -> AssetTask: - return self._asset_task - - @property - def asset_dir(self) -> AssetDir: - return self._asset_dir - - @property - def asset_publishes(self) -> List[AssetPublish]: - return self._asset_publishes - - @property - def task_layer_assembly(self) -> TaskLayerAssembly: - return self._task_layer_assembly - - @property - def transfer_settings(self) -> Dict[str, Any]: - return self._transfer_settings - - def reload_asset_publishes(self) -> None: - self._collect_asset_publishes() - - def reload_asset_publishes_metadata(self) -> None: - for asset_publish in self.asset_publishes: - asset_publish.reload_metadata() - - def update_from_bl_context_pull(self, bl_context: bpy.types.Context) -> None: - self._bl_context = bl_context - self._asset_collection = bl_context.scene.bsp_asset.asset_collection - self._update_task_layer_assembly_from_context_pull(bl_context) - self._update_transfer_settings_from_context(bl_context) - - def update_from_bl_context_push(self, bl_context: bpy.types.Context) -> None: - self._bl_context = bl_context - self._asset_collection = bl_context.scene.bsp_asset.asset_collection - self._update_task_layer_assembly_from_context_push(bl_context) - self._update_transfer_settings_from_context(bl_context) - - def _collect_asset_publishes(self) -> None: - self._asset_publishes.clear() - self._asset_publishes.extend(self._asset_dir.get_asset_publishes()) - - def _update_task_layer_assembly_from_context_pull( - self, bl_context: bpy.types.Context - ) -> None: - # Update TaskLayerAssembly, to load the - # previously disabled and enabled TaskLayer States. - # They are stored in context.scene.bl_asset.task_layers - - # TODO: we should take in to account that in the meantime - # production TaskLayers could have been updated. - bsp = bl_context.scene.bsp_asset - for item in bsp.task_layers_pull: - task_layer_config = self.task_layer_assembly.get_task_layer_config( - item.task_layer_id - ) - task_layer_config.use = item.use - - def _update_task_layer_assembly_from_context_push( - self, bl_context: bpy.types.Context - ) -> None: - bsp = bl_context.scene.bsp_asset - for item in bsp.task_layers_push: - task_layer_config = self.task_layer_assembly.get_task_layer_config( - item.task_layer_id - ) - task_layer_config.use = item.use - - def _update_transfer_settings_from_context( - self, bl_context: bpy.types.Context - ) -> None: - for prop_name, prop in prop_utils.get_property_group_items( - bl_context.scene.bsp_asset_transfer_settings - ): - self._transfer_settings[prop_name] = getattr( - bl_context.scene.bsp_asset_transfer_settings, prop_name - ) - - def __repr__(self) -> str: - header = "\nASSET CONTEXT\n------------------------------------" - footer = "------------------------------------" - asset_info = f"Asset: {self.asset_collection.bsp_asset.entity_name}({self.asset_collection})" - task_layer_assembly = str(self.task_layer_assembly) - - return "\n".join( - [ - header, - asset_info, - task_layer_assembly, - footer, - ] - ) - - def __getstate__(self) -> Dict[str, Any]: - - # Pickle cannot pickle blender context or collection. - state = self.__dict__.copy() - state["_bl_context"] = None - state["_restore_asset_collection_name"] = self.asset_collection.name - state["_asset_collection"] = None - return state - - def __setstate__(self, state: Dict[str, Any]) -> None: - self.__dict__.update(state) - asset_coll_name = state["_restore_asset_collection_name"] - asset_coll = bpy.data.collections[asset_coll_name] - self._asset_collection = asset_coll - self._bl_context = bpy.context - - del self._restore_asset_collection_name - logger.info( - "Restored Asset Collection: %s, Context: %s", - str(self._asset_collection), - str(self._bl_context), - ) - - -class BuildContext: - - """ - Class that should function as Context for the asset build. - Here we want to store everything that is relevant for the build. - The Builder will process this context. - Should be updated on start publish/pull and only be relevant for publish/pull. - """ - - def __init__( - self, - prod_context: ProductionContext, - asset_context: AssetContext, - ): - if not all([prod_context, asset_context]): - raise BuildContextFailedToInitialize( - "Failed to initialize Build Context. Production or Asset Context not initialized." - ) - - self._prod_context: ProductionContext = prod_context - self._asset_context: AssetContext = asset_context - self._process_pairs: List[ProcessPair] = [] - self.is_push: bool = False # Only for TaskLayer.transfer_data() to know if its push or pull. - - self._collect_process_pairs() - - def _collect_process_pairs(self) -> None: - # Here we want to loop through all asset publishes and - # create a list of process pairs out of it. - # This is the place where we perform the logic of checking - # which task layers the user selected in self._asset_context.task_layer_assembly - # and then reading the metadata of each asset publish and check where the corresponding - # task layers are live. - # The result of this is a list of process pairs(target, pull_from) that - # the AssetBuilder needs to process - self._process_pairs.clear() - - process_pairs_set = set() - - tl_assembly = self._asset_context.task_layer_assembly - task_layers_enabled = tl_assembly.get_used_task_layers() - - for asset_publish in self.asset_publishes: - - # For this asset publish get all locked task layers IDs. - locked_task_layer_ids = asset_publish.metadata.get_locked_task_layer_ids() - - # Check if there is any enabled Task Layer ID that is not in the locked IDs. - for tl in task_layers_enabled: - if tl.get_id() not in locked_task_layer_ids: - process_pairs_set.add(ProcessPair(self.asset_task, asset_publish)) - - self._process_pairs.extend(list(process_pairs_set)) - self._process_pairs.sort(key=lambda x: x.asset_publish.path.name) - - @property - def prod_context(self) -> ProductionContext: - return self._prod_context - - @property - def asset_context(self) -> AssetContext: - return self._asset_context - - @property - def asset_task(self) -> AssetTask: - return self.asset_context.asset_task - - @property - def asset_dir(self) -> AssetDir: - return self.asset_context.asset_dir - - @property - def asset_publishes(self) -> List[AssetPublish]: - return self.asset_context.asset_publishes - - @property - def process_pairs(self) -> Set[ProcessPair]: - return self._process_pairs - - def __repr__(self) -> str: - header = "\nBUILD CONTEXT\n------------------------------------" - footer = "------------------------------------" - asset_task = f"Asset Task: {str(self.asset_task)}" - asset_disk_name = f"Asset Disk Name: {self.asset_dir.asset_disk_name}" - asset_dir = f"Asset Dir: {str(self.asset_dir)}" - return "\n".join( - [ - header, - asset_disk_name, - asset_task, - asset_dir, - str(self.prod_context), - str(self.asset_context), - footer, - ] - ) - - def get_hook_kwargs(self, context: bpy.types.Context) -> Dict[str, Any]: - return { - "asset_collection": context.scene.bsp_asset.asset_collection, - "context": context, - "asset_task": self.asset_task, - "asset_dir": self.asset_context.asset_dir, - } - - -class UndoContext: - """ - This should be a context that we can populate along the way of starting a publish and actually publishing. - The idea is that we can add 'undo' steps that we can then undo() if users aborts the publish. - The point of it is to mainly be able to revert the filesystem and other things that happen between starting - the publish and aborting it. - These steps will also be mirrored on the scene Property group so you can actually start a publish - open another scene and still abort it and it will undo the correct things. - """ - - def __init__(self): - self._asset_publishes: List[AssetPublish] = [] - - @property - def asset_publishes(self) -> List[AssetPublish]: - return self._asset_publishes - - def has_steps_files_create(self) -> bool: - return bool(self._asset_publishes) - - def add_step_publish_create( - self, bl_context: bpy.types.Context, asset_publish: AssetPublish - ) -> None: - # Add to self context. - self._asset_publishes.append(asset_publish) - - # Add to scene, to restore on load. - bl_context.scene.bsp_asset.undo_context.add_step_asset_publish_create( - asset_publish - ) - - logger.debug("Created file creation undo step: %s", asset_publish.path.name) - - def undo(self, bl_context: bpy.types.Context) -> None: - - # Delete files. - for asset_publish in self._asset_publishes: - if asset_publish.path.exists(): - logger.info( - "Undoing file creation. Delete: [%s, %s]", - asset_publish.path.name, - asset_publish.metadata_path.name, - ) - asset_publish.unlink() - - # Clear. - self.clear(bl_context) - - def update_from_bl_context(self, bl_context: bpy.types.Context) -> None: - - self._asset_publishes.clear() - - for item in bl_context.scene.bsp_asset.undo_context.files_created: - self._asset_publishes.append(AssetPublish(item.path)) - - def clear(self, bl_context: bpy.types.Context) -> None: - # Clear self steps. - self._asset_publishes.clear() - - # Clear scene. - bl_context.scene.bsp_asset.undo_context.clear() diff --git a/scripts-blender/addons/asset_pipeline/builder/hook.py b/scripts-blender/addons/asset_pipeline/builder/hook.py deleted file mode 100644 index 4de51c0e..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/hook.py +++ /dev/null @@ -1,161 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - - -# The Hook system for the Asset Builder is copied over from the shot-builder, developed by @Jeroen Bakker -# https://developer.blender.org/diffusion/BSTS/browse/master/shot-builder/ - -import logging -from typing import ( - Optional, - Any, - Set, - Tuple, - List, - Type, - Callable, - Dict, - cast, - Union, - Iterator, -) -from types import FunctionType, ModuleType -from pathlib import Path - -from .. import constants - -logger = logging.getLogger(name="BSP") - - -class Wildcard: - pass - - -class DoNotMatch: - pass - - -MatchCriteriaType = Union[str, List[str], Type[Wildcard], Type[DoNotMatch]] -""" -The MatchCriteriaType is a type definition for the parameters of the `hook` decorator. - -The matching parameters can use multiple types to detect how the matching criteria -would work. - -* `str`: would perform an exact string match. -* `Iterator[str]`: would perform an exact string match with any of the given strings. -* `Type[Wildcard]`: would match any type for this parameter. This would be used so a hook - is called for any value. -* `Type[DoNotMatch]`: would ignore this hook when matching the hook parameter. This is the default - value for the matching criteria and would normally not be set directly in a - production configuration. -""" - -MatchingRulesType = Dict[str, MatchCriteriaType] -""" -Hooks are stored as `constants.HOOK_ATTR_NAME' attribute on the function. -The MatchingRulesType is the type definition of the `constants.HOOK_ATTR_NAME` attribute. -""" - -HookFunction = Callable[[Any], None] - - -def _match_hook_parameter( - hook_criteria: MatchCriteriaType, match_query: Optional[str] -) -> bool: - - # print(f"hook_criteria: {hook_criteria} | match_query: {match_query}") - - if hook_criteria == DoNotMatch: - return match_query is None - - if hook_criteria == Wildcard: - return True - - if isinstance(hook_criteria, str): - return match_query == hook_criteria - - if isinstance(hook_criteria, list): - return match_query in hook_criteria - - logger.error(f"Incorrect matching criteria {hook_criteria}, {match_query}") - return False - - -class Hooks: - def __init__(self): - self._hooks: List[HookFunction] = [] - - def register(self, func: HookFunction) -> None: - # logger.info(f"Registering hook '{func.__name__}'") - self._hooks.append(func) - - @property - def callables(self) -> List[HookFunction]: - return self._hooks - - def matches( - self, - hook: HookFunction, - match_asset_type: Optional[str] = None, - match_asset: Optional[str] = None, - match_task_layers: Optional[str] = None, # Could be List[str] - **kwargs: Optional[str], - ) -> bool: - assert not kwargs - rules = cast(MatchingRulesType, getattr(hook, constants.HOOK_ATTR_NAME)) - return all( - ( - _match_hook_parameter(rules["match_asset_type"], match_asset_type), - _match_hook_parameter(rules["match_asset"], match_asset), - _match_hook_parameter(rules["match_task_layers"], match_task_layers), - ) - ) - - def filter(self, **kwargs: Optional[str]) -> Iterator[HookFunction]: - for hook in self._hooks: - if self.matches(hook=hook, **kwargs): - yield hook - - def __bool__(self) -> bool: - return bool(self._hooks) - - -def hook( - match_asset_type: MatchCriteriaType = DoNotMatch, - match_asset: MatchCriteriaType = DoNotMatch, - match_task_layers: MatchCriteriaType = DoNotMatch, -) -> Callable[[FunctionType], FunctionType]: - """ - Decorator to add custom logic when building a shot. - - Hooks are used to extend the configuration that would be not part of the core logic of the shot builder tool. - """ - rules = { - "match_asset_type": match_asset_type, - "match_asset": match_asset, - "match_task_layers": match_task_layers, - } - - def wrapper(func: FunctionType) -> FunctionType: - setattr(func, constants.HOOK_ATTR_NAME, rules) - return func - - return wrapper diff --git a/scripts-blender/addons/asset_pipeline/builder/lock_plan.py b/scripts-blender/addons/asset_pipeline/builder/lock_plan.py deleted file mode 100644 index 22645817..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/lock_plan.py +++ /dev/null @@ -1,71 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple - -from .task_layer import TaskLayer - -from ..asset_files import AssetPublish - -logger = logging.getLogger("BSP") - - -class TaskLayerLockPlan: - """ - When creating a new incrementation of an asset publish we need to somehow store - from which previous asset publishes which task layer will be locked. - This is automatically calculated, but this information should also be displayed in the UI. - This class helps with that. This class can also actually lock the task layers. - """ - - def __init__( - self, asset_publish: AssetPublish, task_layers_to_lock: List[TaskLayer] - ): - self._asset_publish = asset_publish - self._task_layers_to_lock = task_layers_to_lock - - @property - def asset_publish(self) -> AssetPublish: - return self._asset_publish - - @property - def task_layers_to_lock(self) -> List[TaskLayer]: - return self._task_layers_to_lock - - def get_task_layer_ids_to_lock(self) -> List[str]: - return [tl.get_id() for tl in self.task_layers_to_lock] - - def lock(self) -> None: - - """ - Sets the is_locked attribute of each TaskLayer to lock in writes - metadata to disk. - """ - for meta_task_layer in self.asset_publish.metadata.meta_task_layers: - - if ( - not meta_task_layer.is_locked - and meta_task_layer.id in self.get_task_layer_ids_to_lock() - ): - meta_task_layer.is_locked = True - - self.asset_publish.write_metadata() diff --git a/scripts-blender/addons/asset_pipeline/builder/meta_util.py b/scripts-blender/addons/asset_pipeline/builder/meta_util.py deleted file mode 100644 index d4e31ccd..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/meta_util.py +++ /dev/null @@ -1,71 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -import socket -from dataclasses import asdict -from datetime import datetime -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from pathlib import Path - -import bpy - -from .task_layer import TaskLayer -from .metadata import MetadataTaskLayer, MetadataUser -from ..asset_files import AssetTask, AssetPublish - -from .. import constants - -try: - from .util import is_addon_active - import blender_kitsu.cache - kitsu_available = True -except: - kitsu_available = False - -logger = logging.getLogger("BSP") - - -def init_meta_task_layer( - task_layer: type[TaskLayer], source_asset_file: Union[AssetTask, AssetPublish] -) -> MetadataTaskLayer: - - d: Dict[str, Any] = {} - time = datetime.now() - - d["id"] = task_layer.get_id() - d["name"] = task_layer.name - - d["source_revision"] = "" # TODO: - d["source_path"] = source_asset_file.path_relative_to_asset_dir.as_posix() - d["is_locked"] = False - - d["created_at"] = time.strftime(constants.TIME_FORMAT) - d["updated_at"] = time.strftime(constants.TIME_FORMAT) - d["software_hash"] = bpy.app.build_hash.decode() - d["hostname"] = socket.gethostname() - - user_dict = dict() - if kitsu_available and is_addon_active("blender_kitsu"): - user_dict = asdict(blender_kitsu.cache.user_active_get()) - d["author"] = MetadataUser.from_dict(user_dict) - - return MetadataTaskLayer.from_dict(d) diff --git a/scripts-blender/addons/asset_pipeline/builder/metadata.py b/scripts-blender/addons/asset_pipeline/builder/metadata.py deleted file mode 100644 index afddd65f..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/metadata.py +++ /dev/null @@ -1,421 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -""" -The idea here is to have Schemas in the form of Python `Dataclasses` that can be converted to their equivalent as XML Element. That way we have a clear definition of what kind of field are expected and available. -Schemas can have nested Dataclasses. The conversion from Dataclass to XML Element happens in the `ElementMetadata` class and is automated. -Metadata Classes can also be generated from ElementClasses. This conversion is happening in the `from_element()` function. - -The code base should only work with Dataclasses. -That means it is forbidden to import Element[] classes, the conversion from and to Dataclasses is only handled in this module. - -That results in this logic: -A: Saving Metadata to file: - -> MetadataClass -> ElementClass -> XML File on Disk -B: Loading Metadata from file: - -> XML File on Disk -> ElementClass -> MetadataClass - -""" - -import inspect -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple, TypeVar, Callable -from dataclasses import dataclass, asdict, field, fields -from pathlib import Path - -from xml.etree import ElementTree as ET -from xml.etree.ElementTree import Element, ElementTree -from xml.dom import minidom - -from ..asset_status import AssetStatus - -logger = logging.getLogger("BSP") - -M = TypeVar("M", bound="MetadataClass") -E = TypeVar("E", bound="ElementMetadata") - - -class FailedToInitAssetElementTree(Exception): - pass - - -class FailedToInitMetadataTaskLayer(Exception): - pass - - -def prettify(element: Element) -> str: - xmlstr = ET.tostring(element, "utf-8") - reparse: minidom.Document = minidom.parseString(xmlstr) - pretty_str: bytes = reparse.toprettyxml(indent=" ", encoding="utf-8") - return pretty_str.decode() - - -def write_element_tree_to_file(filepath: Path, tree: ElementTree) -> None: - xmlstr = prettify(tree.getroot()) - with open(filepath.as_posix(), "w") as f: - f.write(xmlstr) - # tree.write(filepath.as_posix()) - - -def write_asset_metadata_tree_to_file( - filepath: Path, asset_metadata_tree: "MetadataTreeAsset" -) -> None: - e_tree = ElementTreeAsset.from_metadata_cls(asset_metadata_tree) - write_element_tree_to_file(filepath, e_tree) - - -def load_from_file(filepath: Path) -> ElementTree: - return ET.parse(filepath.as_posix()) - - -def load_asset_metadata_tree_from_file(filepath: Path) -> "MetadataTreeAsset": - tree = load_from_file(filepath) - asset_tree = ElementTreeAsset(element=tree.getroot()) - return MetadataTreeAsset.from_element(asset_tree) - - -def convert_value_for_xml(value: Any) -> Any: - """ - Takes as input a value and converts it so it can - be saved by to the xml format. - """ - if type(value) == bool: - return str(value).lower() - - # TODO: XML does not support Lists, add some functionality to handle the conversion - # of lists in Metadata classes to elements. - elif type(value) == list: - return "" - - elif type(value) == Path: - return value.as_posix() - - elif type(value) == AssetStatus: - # If value is AssetStatus(Enum) - # save the name as str instead of value(int), so its - # more human readable - return value.name - - return value - - -def convert_value_from_xml(element: Element) -> Any: - """ - Takes as input an element and converts the element.text - to a value that works for the MetadataClasses. - """ - value = element.text - if value == "false": - return False - elif value == "true": - return True - elif element.tag == "status": - return getattr(AssetStatus, value) - return value - - -def convert_metadata_obj_to_elements( - root_element: Element, metadata_class: M -) -> Element: - """ - This function makes sure that the input MetadataClass - will be converted to an element tree. It also handles - nested MetadataClasses respectively. The resulting tree of elements - will be appended to the input root_element. - """ - # asdict() recursively converts all dataclasses to dicts. - # even nested ones. https://docs.python.org/3/library/dataclasses.html#dataclasses.asdict - # That's why we need to do it this way, otherwise the issubclass() check for MetadataClass - # won't work. - d = dict( - (field.name, getattr(metadata_class, field.name)) - for field in fields(metadata_class) - ) - for key, value in d.items(): - - e = Element(key) - # print(f"Processing: {key}:{value}") - # print(type(value)) - if issubclass(type(value), MetadataClass): - convert_metadata_obj_to_elements(e, value) - else: - e.text = convert_value_for_xml(value) - - root_element.append(e) - - return root_element - - -# METADATA CLASSES -# ---------------------------------------------- - - -class MetadataClass: - @classmethod - def from_dict(cls: type[M], env: Dict[str, Any]) -> M: - return cls( - **{k: v for k, v in env.items() if k in inspect.signature(cls).parameters} - ) - - @classmethod - def from_element(cls: type[M], element: Element) -> M: - d = {} - # Take care to only take fist layer with './', otherwise we would take the - # e.G the 'id' attribute of author and overwrite it. - # cannot use e.iter(). - for sub_e in element.findall("./"): - d[sub_e.tag] = convert_value_from_xml(sub_e) - return cls.from_dict(d) - - -@dataclass -class MetadataUser(MetadataClass): - """ - Tries to mirror Kitsu Asset data structure as much as possible. - """ - - id: str = "00000000-0000-0000-0000-000000000000" - first_name: str = "Unknown" - last_name: str = "Unknown" - full_name: str = "Unknown" - - -@dataclass -class MetadataTaskLayer(MetadataClass): - id: str - name: str - - source_path: str - source_revision: str - is_locked: bool - - created_at: str - updated_at: str - author: MetadataUser - software_hash: str - hostname: str - - # Optional. - flags: List[str] = field(default_factory=list) - - @classmethod - def from_element(cls: type[M], element: Element) -> M: - # For nested Metadata Classes we need to re-implement this. - d = {} - # Take care to only take fist layer with './', otherwise we would take the - # e.G the 'id' attribute of author and overwrite it. - # cannot use e.iter(). - for sub_e in element.findall("./"): - if sub_e.tag == "author": - continue - d[sub_e.tag] = convert_value_from_xml(sub_e) - - # Convert Author element to MetadataUser. - author = element.find(".author") - if author == None: - raise FailedToInitMetadataTaskLayer( - "Expected to find 'author' element in input" - ) - d["author"] = MetadataUser.from_element(element.author) - return cls.from_dict(d) - - -@dataclass -class MetadataAsset(MetadataClass): - """ - Tries to mirror Kitsu Asset data structure as much as possible. - """ - - name: str - parent_id: str - parent_name: str - project_id: str - - version: str - status: AssetStatus - - id: str = "00000000-0000-0000-0000-000000000000" - - # Optional. - flags: List[str] = field(default_factory=list) - - # This is only placeholder and will be filled when creating - task_layers_production: List[MetadataTaskLayer] = field(default_factory=list) - - -@dataclass -class MetadataTreeAsset(MetadataClass): - meta_asset: MetadataAsset - meta_task_layers: List[MetadataTaskLayer] - - @classmethod - def from_element(cls: type[M], element: "ElementTreeAsset") -> M: - # For nested Metadata Classes we need to re-implement this. - d = {} - e_asset = element.asset_element - e_task_layers: List[ElementTaskLayer] = element.get_element_task_layers() - d["meta_asset"] = MetadataAsset.from_element(e_asset) - d["meta_task_layers"] = [] - for e_tl in e_task_layers: - m_tl = MetadataTaskLayer.from_element(e_tl) - d["meta_task_layers"].append(m_tl) - - return cls.from_dict(d) - - def get_metadata_task_layer(self, id: str) -> Optional[MetadataTaskLayer]: - """ - Id == TaskLayer.get_id() - """ - for tl in self.meta_task_layers: - if tl.id == id: - return tl - return None - - def get_locked_metadata_task_layer(self) -> List[MetadataTaskLayer]: - return [tl for tl in self.meta_task_layers if tl.is_locked] - - def get_locked_task_layer_ids(self) -> List[str]: - return [tl.id for tl in self.meta_task_layers if tl.is_locked] - - def get_task_layer_ids(self) -> List[str]: - return [tl.id for tl in self.meta_task_layers] - - def add_metadata_task_layer(self, meta_tl: MetadataTaskLayer) -> None: - if meta_tl.id in self.get_task_layer_ids(): - logger.warning("Will not add metadata task layer. %s already in list", meta_tl.id) - return - self.meta_task_layers.append(meta_tl) - -# ELEMENT CLASSES -# ---------------------------------------------- -class ElementMetadata(Element): - _tag: str = "" - - def __init__(self, element: Optional[Element] = None) -> None: - super().__init__(self._tag) - # If we initialize with an element, we basically want to - # copy the content of the element in to an instance of type - # ElementMetadata to benefit from additional functions. - if element: - for child in element: - self.append(child) - - @classmethod - def from_metadata_cls(cls, meta_class: M) -> E: - # If metaclass has an ID field - # Add a "id" attribute to the element for convenient - # querying. - instance = cls() - if hasattr(meta_class, "id") and meta_class.id: - instance.attrib.update({"id": meta_class.id}) - - # This function makes sure that the input MetadataClass - # will be converted to an element tree. It also handles - # nested MetadataClasses respectively. - convert_metadata_obj_to_elements(instance, meta_class) - return instance - - -class ElementUser(ElementMetadata): - _tag: str = "User" - - -class ElementAsset(ElementMetadata): - _tag: str = "Asset" - - @property - def task_layers_production(self) -> Element: - return self.find(".task_layers_production") - - -class ElementTaskLayer(ElementMetadata): - _tag: str = "TaskLayer" - - @classmethod - def from_metadata_cls(cls, meta_class: MetadataTaskLayer) -> "ElementTaskLayer": - - instance = super().from_metadata_cls(meta_class) - - # Update Author field. - e = instance.find(".author") - e.text = e.find(".full_name").text - return instance - - @property - def author(self) -> Optional[Element]: - return self.find(".author") - - -class ElementTreeAsset(ElementTree): - @classmethod - def from_metadata_cls( - cls, meta_tree_asset: MetadataTreeAsset - ) -> "ElementTreeAsset": - # Create Asset Element and append to root. - asset_element: ElementAsset = ElementAsset.from_metadata_cls( - meta_tree_asset.meta_asset - ) - - # Create ProductionTaskLayers Element - prod_task_layers = asset_element.task_layers_production - - # TODO: I DONT UNDERSTAND: - # For some reasons the task_layers_production entry will - # be duplicated if we just use - # prod_task_layers = asset_element.task_layers_production - # no idea why, we need to first delete it and add it again??? - for i in asset_element: - if i.tag == "task_layers_production": - asset_element.remove(i) - - prod_task_layers = Element("task_layers_production") - - # Need to check for None, if element empty it is falsy. - if prod_task_layers == None: - raise FailedToInitAssetElementTree( - f"Failed to find task_layers_production child in ElementAsset Class." - ) - - # Append all meta task layers to it. - for meta_tl in meta_tree_asset.meta_task_layers: - tl_element = ElementTaskLayer.from_metadata_cls(meta_tl) - prod_task_layers.append(tl_element) - - asset_element.append(prod_task_layers) - - return cls(asset_element) - - def get_element_task_layers(self) -> List[ElementTaskLayer]: - l: List[ElementTaskLayer] = [] - for e in self.findall(".//TaskLayer"): - # We need to pass e as ElementTree otherwise we won't receive - # a full tree copy of all childrens recursively. - e_tl = ElementTaskLayer(element=e) - l.append(e_tl) - - return l - - def get_task_layer(self, id: str) -> Optional[Element]: - return self.find(f".//TaskLayer[@id='{id}']") - - @property - def asset_element(self) -> Element: - return self.getroot() diff --git a/scripts-blender/addons/asset_pipeline/builder/ops.py b/scripts-blender/addons/asset_pipeline/builder/ops.py deleted file mode 100644 index 644b4fa1..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/ops.py +++ /dev/null @@ -1,811 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from pathlib import Path - -import bpy -from bpy.app.handlers import persistent - -from . import opsdata - -from .. import asset_status, util, builder, constants -from ..asset_status import AssetStatus - -logger = logging.getLogger("BSP") - - -class BSP_ASSET_initial_publish(bpy.types.Operator): - bl_idname = "bsp_asset.initial_publish" - bl_label = "Create First Publish" - bl_description = "Creates the first publish by exporting the asset collection" - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - asset_coll = context.scene.bsp_asset.asset_collection - return bool( - util.is_file_saved() - and asset_coll - and not context.scene.bsp_asset.is_publish_in_progress - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT - and not builder.ASSET_CONTEXT.asset_publishes - ) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Update Asset Context from context so BUILD_CONTEXT works with up to date data. - builder.ASSET_CONTEXT.update_from_bl_context_push(context) - - # Create Build Context. - builder.BUILD_CONTEXT = builder.BuildContext( - builder.PROD_CONTEXT, builder.ASSET_CONTEXT - ) - - # Create Asset Builder. - builder.ASSET_BUILDER = builder.AssetBuilder(builder.BUILD_CONTEXT) - - # Publish - builder.ASSET_BUILDER.push(context) - - # Update Asset Context publish files. - builder.ASSET_CONTEXT.reload_asset_publishes() - opsdata.populate_asset_publishes_by_asset_context( - context, builder.ASSET_CONTEXT - ) - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - -class BSP_ASSET_start_publish(bpy.types.Operator): - bl_idname = "bsp_asset.start_publish" - bl_label = "Start Publish" - bl_description = "Saves .blend file and starts publish of the Asset Collection" - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - asset_coll = context.scene.bsp_asset.asset_collection - return bool( - util.is_file_saved() - and asset_coll - and not context.scene.bsp_asset.is_publish_in_progress - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - and opsdata.are_any_task_layers_enabled_push(context) - ) - - def execute(self, context: bpy.types.Context) -> Set[str]: - # Save blend file. - bpy.ops.wm.save_mainfile() - - # Update Asset Context from context so BUILD_CONTEXT works with up to date data. - builder.ASSET_CONTEXT.update_from_bl_context_push(context) - - # Update the asset publishes again. - builder.ASSET_CONTEXT.reload_asset_publishes() - - # Create Build Context. - builder.BUILD_CONTEXT = builder.BuildContext( - builder.PROD_CONTEXT, builder.ASSET_CONTEXT - ) - - # That means that the selected TaskLayers were locked in all versions. - if not builder.BUILD_CONTEXT.process_pairs: - enabled_tl_ids = [ - tl.get_id() - for tl in builder.BUILD_CONTEXT.asset_context.task_layer_assembly.get_used_task_layers() - ] - self.report( - {"WARNING"}, - f"Task Layers: {','.join(enabled_tl_ids)} are locked in all asset publishes.", - ) - builder.BUILD_CONTEXT = None - return {"CANCELLED"} - - # Make sure that the blender property group gets updated as well. - opsdata.populate_asset_publishes_by_build_context( - context, builder.BUILD_CONTEXT - ) - - # Update properties. - context.scene.bsp_asset.is_publish_in_progress = True - - # print(builder.BUILD_CONTEXT) - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - -class BSP_ASSET_start_publish_new_version(bpy.types.Operator): - bl_idname = "bsp_asset.start_publish_new_version" - bl_label = "Start Publish New Version" - bl_description = ( - "Saves .blend file and starts publish of the Asset Collection as a new Version" - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - asset_coll = context.scene.bsp_asset.asset_collection - return bool( - util.is_file_saved() - and asset_coll - and not context.scene.bsp_asset.is_publish_in_progress - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - and context.window_manager.bsp_asset.new_asset_version - and opsdata.are_any_task_layers_enabled_push(context) - ) - - def execute(self, context: bpy.types.Context) -> Set[str]: - # Save blend file. - bpy.ops.wm.save_mainfile() - - # Update Asset Context from context so BUILD_CONTEXT works with up to date data. - builder.ASSET_CONTEXT.update_from_bl_context_push(context) - - # Copy latest asset publish and increment. - asset_publish = builder.ASSET_CONTEXT.asset_dir.increment_latest_publish() - - # Add file create step of new asset publish. - builder.UNDO_CONTEXT.add_step_publish_create(context, asset_publish) - - # Update the asset publishes again. - builder.ASSET_CONTEXT.reload_asset_publishes() - - # Get task layers that need be locked resulting of the creation of the new - # asset publish with the currently enabled task layers. - lock_plans = opsdata.get_task_layer_lock_plans(builder.ASSET_CONTEXT) - opsdata.populate_context_with_lock_plans(context, lock_plans) - - # Lock task layers. - for task_layer_lock_plan in lock_plans: - task_layer_lock_plan.lock() - logger.info( - "TaskLayers locked(%s): %s", - task_layer_lock_plan.asset_publish.path.name, - ",".join(task_layer_lock_plan.get_task_layer_ids_to_lock()), - ) - - # TODO: Create Undo Step for metadata adjustment. - - # Create Build Context. - builder.BUILD_CONTEXT = builder.BuildContext( - builder.PROD_CONTEXT, builder.ASSET_CONTEXT - ) - # print(builder.BUILD_CONTEXT) - - # Make sure that the blender property group gets updated as well. - # Note: By Build context as we only want to show the relevant - # asset publishes. - opsdata.populate_asset_publishes_by_build_context( - context, builder.BUILD_CONTEXT - ) - - # Update properties. - context.scene.bsp_asset.is_publish_in_progress = True - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - -class BSP_ASSET_abort_publish(bpy.types.Operator): - bl_idname = "bsp_asset.abort_publish" - bl_label = "Abort Publish" - bl_description = "Aborts publish of the Asset Collection" - - new_files_handeling: bpy.props.EnumProperty( - items=[ - ("DELETE", "Delete", "This will delete newly created files on abort"), - ("KEEP", "Keep", "This will keep newly created files on abort"), - ] - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - return bool(context.scene.bsp_asset.is_publish_in_progress) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Undo. - # This will undo all steps that were done between start publish and the call of this function. - if self.new_files_handeling == "DELETE": - builder.UNDO_CONTEXT.undo(context) - else: - builder.UNDO_CONTEXT.clear(context) - - # Update Asset context after undo. - builder.ASSET_CONTEXT.reload_asset_publishes() - - # Reset asset publishes to global list. - opsdata.populate_asset_publishes_by_asset_context( - context, builder.ASSET_CONTEXT - ) - - # Uninitialize Build Context. - builder.BUILD_CONTEXT = None - - # Update properties. - context.scene.bsp_asset.is_publish_in_progress = False - - opsdata.clear_task_layer_lock_plans(context) - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - def invoke(self, context: bpy.types.Context, event: bpy.types.Event) -> Set[str]: - if builder.UNDO_CONTEXT.has_steps_files_create(): - return context.window_manager.invoke_props_dialog(self, width=400) - return self.execute(context) - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - # Target. - layout.row(align=True).label( - text="This Operation can delete files on disk", icon="ERROR" - ) - layout.row(align=True).separator() - - for asset_publish in builder.UNDO_CONTEXT.asset_publishes: - layout.row(align=True).label(text=f"- {asset_publish.path.name}") - - layout.row(align=True).separator() - layout.row(align=True).label(text="How do you want to proceed?") - - layout.row(align=True).prop(self, "new_files_handeling", expand=True) - - -class BSP_ASSET_push_task_layers(bpy.types.Operator): - bl_idname = "bsp_asset.push_task_layers" - bl_label = "Apply Changes" - bl_description = ( - "Calls the push function of the Asset Builder with the current Build Context" - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - return bool( - context.scene.bsp_asset.is_publish_in_progress - and util.is_file_saved() - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT, - ) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Create Asset Builder. - builder.ASSET_BUILDER = builder.AssetBuilder(builder.BUILD_CONTEXT) - - # That means that the selected TaskLayers were locked in all versions. - # This code shouldn't be running if all previous logic goes well. - # Just in case Users might change metadata manually, lets leave it here. - if not builder.BUILD_CONTEXT.process_pairs: - enabled_tl_ids = [ - tl.get_id() - for tl in builder.BUILD_CONTEXT.asset_context.task_layer_assembly.get_used_task_layers() - ] - self.report( - {"WARNING"}, - f"Task Layers: {','.join(enabled_tl_ids)} are locked in all asset publishes.", - ) - # Abort the publish. - bpy.ops.bsp_asset.abort_publish() - return {"CANCELLED"} - - # Publish. - builder.ASSET_BUILDER.push(context) - - # There can be a case where new task layers are added during production - # While the pushing will add the new task layer to the metadata file - # the task layer list for each asset publish does not update that change. - # This fixes that. - builder.BUILD_CONTEXT.asset_context.reload_asset_publishes_metadata() - opsdata.update_asset_publishes_by_build_context(context, builder.BUILD_CONTEXT) - - # TODO: Add undo step for metadata adjustment - # and task layer push to make it undoable on abort. - - # Update properties. - context.scene.bsp_asset.are_task_layers_pushed = True - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - -def draw_task_layers_list( - layout: bpy.types.UILayout, - context: bpy.types.Context, - prop_name: str, - disable: bool = False, -) -> bpy.types.UILayout: - """ - Draws context.bsp_asset.task_layers_owner. - `prop_name`: str has to be either: 'task_layer_pull' or 'task_layer_push' - """ - - row = layout.row(align=True) - - # Ui-list. - row.template_list( - "BSP_UL_task_layers", - f"{prop_name}_list", - context.scene.bsp_asset, - prop_name, - context.scene.bsp_asset, - f"{prop_name}_index", - rows=constants.DEFAULT_ROWS, - type="DEFAULT", - ) - if disable: - row.enabled = False - - return row - - -class BSP_ASSET_pull(bpy.types.Operator): - bl_idname = "bsp_asset.pull" - bl_label = "Pull Task Layers" - bl_description = ( - "Pull in data from a set of Task Layers. The initial set is those task layers which are not owned by this file" - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - return bool( - not context.scene.bsp_asset.is_publish_in_progress - and util.is_file_saved() - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT - ) - - def invoke(self, context, event): - bsp = context.scene.bsp_asset - - for tl_owned, tl_pull in zip(bsp.task_layers_push, bsp.task_layers_pull): - tl_pull.use = not tl_owned.use - - return context.window_manager.invoke_props_dialog(self, width=400) - - def draw(self, context): - draw_task_layers_list(self.layout, context, "task_layers_pull") - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Update Asset Context from context so BUILD_CONTEXT works with up to date data. - builder.ASSET_CONTEXT.update_from_bl_context_pull(context) - - # Update the asset publishes again. - # builder.ASSET_CONTEXT.update_asset_publishes() - - # Create Build Context. - builder.BUILD_CONTEXT = builder.BuildContext( - builder.PROD_CONTEXT, builder.ASSET_CONTEXT - ) - - # Create Asset Builder. - builder.ASSET_BUILDER = builder.AssetBuilder(builder.BUILD_CONTEXT) - - # Pull. - builder.ASSET_BUILDER.pull_from_publish(context) - - return {"FINISHED"} - - -class BSP_ASSET_publish(bpy.types.Operator): - bl_idname = "bsp_asset.publish" - bl_label = "Publish" - bl_description = "Publishes the pushed changes on SVN" - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - - return bool( - context.scene.bsp_asset.is_publish_in_progress - and util.is_file_saved() - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - and context.scene.bsp_asset.are_task_layers_pushed - ) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Placeholder - - # Commit to SVN. - - # Reset asset publishes to global list. - opsdata.populate_asset_publishes_by_asset_context( - context, builder.ASSET_CONTEXT - ) - opsdata.clear_task_layer_lock_plans(context) - - # Uninitialize Build Context. - builder.BUILD_CONTEXT = None - - # Update properties. - context.scene.bsp_asset.is_publish_in_progress = False - context.scene.bsp_asset.are_task_layers_pushed = False - - # Clear undo context. - builder.UNDO_CONTEXT.clear(context) - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - -class BSP_ASSET_create_prod_context(bpy.types.Operator): - bl_idname = "bsp_asset.create_prod_context" - bl_label = "Create Production Context" - bl_description = ( - "Process config files in production config folder. Loads all task layers." - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - addon_prefs = util.get_addon_prefs() - return bool(addon_prefs.is_prod_task_layers_module_path_valid()) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Initialize Asset Context. - addon_prefs = util.get_addon_prefs() - config_folder = Path(addon_prefs.prod_config_dir) - builder.PROD_CONTEXT = builder.ProductionContext(config_folder) - - # print(builder.PROD_CONTEXT) - - # When we run this operator to update the production context - # We also want the asset context to be updated. - if bpy.ops.bsp_asset.create_asset_context.poll(): - bpy.ops.bsp_asset.create_asset_context() - - return {"FINISHED"} - - -class BSP_ASSET_create_asset_context(bpy.types.Operator): - bl_idname = "bsp_asset.create_asset_context" - bl_label = "Create Asset Context" - bl_description = ( - "Initialize Asset Context from Production Context. " - "Try to restore Task Layer Settings for this Asset" - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - asset_coll: bpy.types.Collection = context.scene.bsp_asset.asset_collection - return bool(builder.PROD_CONTEXT and asset_coll and bpy.data.filepath) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Initialize Asset Context. - builder.ASSET_CONTEXT = builder.AssetContext(context, builder.PROD_CONTEXT) - - # Populate collection property with loaded task layers. - opsdata.populate_task_layers(context, builder.ASSET_CONTEXT) - - # Populate collection property with found asset publishes. - opsdata.populate_asset_publishes_by_asset_context( - context, builder.ASSET_CONTEXT - ) - - # Update Asset Context from bl context again, as populate - # task layers tries to restore previous task layer selection states. - builder.ASSET_CONTEXT.update_from_bl_context_push(context) - - # print(builder.ASSET_CONTEXT) - return {"FINISHED"} - - -class BSP_ASSET_set_task_layer_status(bpy.types.Operator): - bl_idname = "bsp_asset.set_task_layer_status" - bl_label = "Set Task Layer Status" - bl_description = "Sets the Status of a Task Layer of a specific Asset Publish, which controls the is_locked attribute" - - @staticmethod - def get_current_state(self: bpy.types.Operator) -> str: - # Get Metadata Task Layer. - asset_publish = opsdata.get_active_asset_publish(bpy.context) - m_tl = asset_publish.metadata.get_metadata_task_layer(self.task_layer) - return "locked" if m_tl.is_locked else "live" - - target: bpy.props.StringProperty(name="Target") # type: ignore - task_layer: bpy.props.EnumProperty( # type: ignore - items=opsdata.get_task_layers_for_bl_enum, - name="Task Layer", - description="Task Layer for which to change the Status", - ) - current_status: bpy.props.StringProperty( # type: ignore - name="Current Status", - description="Current State of selected Task Layer", - get=get_current_state.__func__, - ) - new_status: bpy.props.EnumProperty( # type: ignore - items=[("locked", "locked", ""), ("live", "live", "")], - name="New Status", - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - asset_coll = context.scene.bsp_asset.asset_collection - return bool( - util.is_file_saved() - and asset_coll - and not context.scene.bsp_asset.is_publish_in_progress - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - ) - - def invoke(self, context: bpy.types.Context, event: bpy.types.Event) -> Set[str]: - # Get selected asset publish. - self.asset_publish = opsdata.get_active_asset_publish(context) - - # Update target attribute. - self.target = self.asset_publish.path.name - - return context.window_manager.invoke_props_dialog(self, width=400) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Exit if no status change. - if self.new_status == self.current_status: - return {"CANCELLED"} - - # Update locked state. - is_locked = True if self.new_status == "locked" else False - self.asset_publish.metadata.get_metadata_task_layer( - self.task_layer - ).is_locked = is_locked - - # Write metadata to file. - self.asset_publish.write_metadata() - - # Log. - logger.info( - f"Set {self.asset_publish.path.name} {self.task_layer} Task Layer Status: {self.new_status}" - ) - - # Reset attributes. - del self.asset_publish - - # Reload asset publishes. - builder.ASSET_CONTEXT.reload_asset_publishes_metadata() - opsdata.populate_asset_publishes_by_asset_context( - context, builder.ASSET_CONTEXT - ) - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - # Target. - row = layout.row(align=True) - row.prop(self, "target") - row.enabled = False - - # Task Layer. - row = layout.row(align=True) - row.prop(self, "task_layer") - - # Current State. - row = layout.row(align=True) - row.prop(self, "current_status") - - layout.separator() - layout.separator() - - # New State. - row = layout.row(align=True) - row.prop(self, "new_status") - - -class BSP_ASSET_set_asset_status(bpy.types.Operator): - bl_idname = "bsp_asset.set_asset_status" - bl_label = "Set Asset Status" - bl_description = "Sets the Status of a specific Asset Publish" - - @staticmethod - def get_current_status(self: bpy.types.Operator) -> str: - # Get Metadata Task Layer. - asset_publish = opsdata.get_active_asset_publish(bpy.context) - return asset_publish.metadata.meta_asset.status.name.capitalize() - - target: bpy.props.StringProperty(name="Target") # type: ignore - - current_status: bpy.props.StringProperty( # type: ignore - name="Current Status", - description="Current State of selected Task Layer", - get=get_current_status.__func__, - ) - new_status: bpy.props.EnumProperty( # type: ignore - items=asset_status.get_asset_status_as_bl_enum, - name="New Status", - ) - - @classmethod - def poll(cls, context: bpy.types.Context) -> bool: - asset_coll = context.scene.bsp_asset.asset_collection - return bool( - util.is_file_saved() - and asset_coll - and not context.scene.bsp_asset.is_publish_in_progress - and builder.PROD_CONTEXT - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - ) - - def invoke(self, context: bpy.types.Context, event: bpy.types.Event) -> Set[str]: - # Get selected asset publish. - self.asset_publish = opsdata.get_active_asset_publish(context) - - # Update target attribute. - self.target = self.asset_publish.path.name - - return context.window_manager.invoke_props_dialog(self, width=400) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - status = AssetStatus(int(self.new_status)) - - # Current status is in in int, convert new status to it so - # we can compare. - # Exit if no status change. - if status.name == self.current_status.upper(): - return {"CANCELLED"} - - # Update Assset Status. - self.asset_publish.metadata.meta_asset.status = status - - # Write metadata to file. - self.asset_publish.write_metadata() - - # Log. - logger.info(f"Set {self.asset_publish.path.name} Asset Status: {status.name}") - - # Reset attributes. - del self.asset_publish - - # Reload asset publishes. - builder.ASSET_CONTEXT.reload_asset_publishes_metadata() - opsdata.populate_asset_publishes_by_asset_context( - context, builder.ASSET_CONTEXT - ) - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - # Target. - row = layout.row(align=True) - row.prop(self, "target") - row.enabled = False - - # Current State. - row = layout.row(align=True) - row.prop(self, "current_status") - - layout.separator() - layout.separator() - - # New State. - row = layout.row(align=True) - row.prop(self, "new_status") - - -@persistent -def create_undo_context(_): - builder.UNDO_CONTEXT = builder.UndoContext() - builder.UNDO_CONTEXT.update_from_bl_context(bpy.context) - - -@persistent -def create_asset_context(_): - # We want this to run on every scene load. - # As active assets might change after scene load. - if bpy.ops.bsp_asset.create_asset_context.poll(): - bpy.ops.bsp_asset.create_asset_context() - else: - # That means we load a scene with no asset collection - # assigned. Previous ASSET_CONTEXT should therefore - # be uninitialized. - logger.error( - "Failed to initialize Asset Context. bpy.ops.bsp_asset.create_asset_context.poll() failed." - ) - builder.ASSET_CONTEXT = None - opsdata.clear_asset_publishes(bpy.context) - opsdata.clear_task_layers(bpy.context) - - -@persistent -def create_prod_context(_): - - # Should only run once on startup. - if not builder.PROD_CONTEXT: - if bpy.ops.bsp_asset.create_prod_context.poll(): - bpy.ops.bsp_asset.create_prod_context() - else: - logger.error( - "Failed to initialize Production Context. bpy.ops.bsp_asset.create_prod_context.poll() failed." - ) - builder.PROD_CONTEXT = None - - -# ----------------REGISTER--------------. - -classes = [ - BSP_ASSET_create_prod_context, - BSP_ASSET_create_asset_context, - BSP_ASSET_initial_publish, - BSP_ASSET_start_publish, - BSP_ASSET_start_publish_new_version, - BSP_ASSET_abort_publish, - BSP_ASSET_push_task_layers, - BSP_ASSET_pull, - BSP_ASSET_publish, - BSP_ASSET_set_task_layer_status, - BSP_ASSET_set_asset_status, -] - - -def register() -> None: - for cls in classes: - bpy.utils.register_class(cls) - - # Handlers. - bpy.app.handlers.load_post.append(create_prod_context) - bpy.app.handlers.load_post.append(create_asset_context) - bpy.app.handlers.load_post.append(create_undo_context) - - -def unregister() -> None: - - # Handlers. - bpy.app.handlers.load_post.remove(create_undo_context) - bpy.app.handlers.load_post.remove(create_asset_context) - bpy.app.handlers.load_post.remove(create_prod_context) - - for cls in reversed(classes): - bpy.utils.unregister_class(cls) diff --git a/scripts-blender/addons/asset_pipeline/builder/opsdata.py b/scripts-blender/addons/asset_pipeline/builder/opsdata.py deleted file mode 100644 index 9957067f..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/opsdata.py +++ /dev/null @@ -1,235 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from pathlib import Path - -import bpy - -from .context import AssetContext, BuildContext -from .task_layer import TaskLayer -from .lock_plan import TaskLayerLockPlan - -from .. import builder -from ..asset_files import AssetPublish - -logger = logging.getLogger("BSP") - - -def populate_task_layers( - context: bpy.types.Context, asset_context: AssetContext -) -> None: - - for prop_group in [ - context.scene.bsp_asset.task_layers_push, - context.scene.bsp_asset.task_layers_pull, - ]: - # Make a backup to restore task layer settings as good as possible. - tmp_backup: Dict[str, Dict[str, Any]] = {} - for ( - task_layer_id, - task_layer_prop_group, - ) in prop_group.items(): - tmp_backup[task_layer_id] = task_layer_prop_group.as_dict() - - # Clear task layer collection property. - prop_group.clear() - - # Load Task Layers from Production Context, try to restore - # previous task layer settings - for ( - key, - task_layer_config, - ) in asset_context.task_layer_assembly.task_layer_config_dict.items(): - item = prop_group.add() - item.name = key - item.task_layer_id = key - item.task_layer_name = task_layer_config.task_layer.name - - # Restore previous settings. - if key in tmp_backup: - bkp = tmp_backup.get(key) - if not bkp: - continue - item.use = bkp["use"] - - # Update actual ASSET_CONTEXT, which will transfer the task layer settings, - # which we restored from scene level. - task_layer_config.use = bkp["use"] - - -def add_asset_publish_to_context( - context: bpy.types.Context, asset_publish: AssetPublish -) -> None: - - item = context.scene.bsp_asset.asset_publishes.add() - item.update_props_by_asset_publish(asset_publish) - - -def update_asset_publishes_by_build_context( - context: bpy.types.Context, build_context: BuildContext -) -> None: - - for asset_publish in build_context.asset_publishes: - item = context.scene.bsp_asset.asset_publishes.get(asset_publish.path.name) - if item: - item.update_props_by_asset_publish(asset_publish) - - -def populate_asset_publishes_by_asset_context( - context: bpy.types.Context, asset_context: AssetContext -) -> None: - - """ - This populates the context with asset publishes based on the asset context. - Meaning it will take all found asset publishes (asset_context.asset_publishes). - """ - - # Clear asset_publishes collection property. - clear_asset_publishes(context) - - # Load Asset Publishes from Asset Context. - for asset_publish in asset_context.asset_publishes: - add_asset_publish_to_context(context, asset_publish) - - -def populate_asset_publishes_by_build_context( - context: bpy.types.Context, build_context: BuildContext -) -> None: - """ - This populates the context with asset publishes based on the build context. - Meaning it will only take the asset publishes it will find in - build_context.process_pairs. - """ - - # Clear asset_publishes collection property. - clear_asset_publishes(context) - - # Load Asset Publishes from Asset Context. - for process_pair in build_context.process_pairs: - asset_publish = process_pair.asset_publish - add_asset_publish_to_context(context, asset_publish) - - -def clear_task_layers(context: bpy.types.Context) -> None: - context.scene.bsp_asset.task_layers_push.clear() - context.scene.bsp_asset.task_layers_pull.clear() - - -def clear_task_layer_lock_plans(context: bpy.types.Context) -> None: - context.scene.bsp_asset.task_layer_lock_plans.clear() - - -def clear_asset_publishes(context: bpy.types.Context) -> None: - context.scene.bsp_asset.asset_publishes.clear() - - -def get_active_asset_publish(context: bpy.types.Context) -> AssetPublish: - index = context.scene.bsp_asset.asset_publishes_index - asset_file = context.scene.bsp_asset.asset_publishes[index] - return AssetPublish(asset_file.path) - - -def get_task_layers_for_bl_enum( - self: bpy.types.Operator, context: bpy.types.Context -) -> List[Tuple[str, str, str]]: - if not builder.ASSET_CONTEXT: - return [] - return builder.ASSET_CONTEXT.task_layer_assembly.get_task_layers_for_bl_enum() - - -def get_task_layer_lock_plans(asset_context: AssetContext) -> List[TaskLayerLockPlan]: - - """ - This function should be called when you want to know which task layers of which asset publishes - need to be locked after creating a new asset publish with a selection of task layers. - This information will be returned in the form of a List of TaskLayerLockPlan classes. - """ - - task_layer_lock_plans: List[TaskLayerLockPlan] = [] - task_layers_to_push = asset_context.task_layer_assembly.get_used_task_layers() - - for asset_publish in asset_context.asset_publishes[:-1]: - - task_layers_to_lock: List[TaskLayer] = [] - - for task_layer in task_layers_to_push: - - # This is an interesting case, that means the task layer is not even in the assset publish - # metadata file. Could happen if there was a new production task layer added midway production. - if task_layer.get_id() not in asset_publish.metadata.get_task_layer_ids(): - # TODO: How to handle this case? - logger.warning( - "TaskLayer: %s does not exist in %s. Maybe added during production?", - task_layer.get_id(), - asset_publish.metadata_path.name, - ) - continue - - # Task Layer is already locked. - if ( - task_layer.get_id() - in asset_publish.metadata.get_locked_task_layer_ids() - ): - continue - - # Otherwise this Task Layer should be locked. - task_layers_to_lock.append(task_layer) - - # If task layers need to be locked - # Store that in TaskLayerLockPlan. - if task_layers_to_lock: - task_layer_lock_plans.append( - TaskLayerLockPlan(asset_publish, task_layers_to_lock) - ) - - return task_layer_lock_plans - - -def populate_context_with_lock_plans( - context: bpy.types.Context, lock_plan_list: List[TaskLayerLockPlan] -) -> None: - - context.scene.bsp_asset.task_layer_lock_plans.clear() - - # Add asset publishes. - for lock_plan in lock_plan_list: - item = context.scene.bsp_asset.task_layer_lock_plans.add() - item.path_str = lock_plan.asset_publish.path.as_posix() - - # Add task layers to lock for that asset publish. - for tl_to_lock in lock_plan.task_layers_to_lock: - tl_item = item.task_layers.add() - tl_item.name = tl_to_lock.get_id() - tl_item.task_layer_id = tl_to_lock.get_id() - tl_item.task_layer_name = tl_to_lock.name - - -def are_any_task_layers_enabled_push(context: bpy.types.Context) -> bool: - """ - Returns true if any task layers are selected in the task layer push list. - """ - bsp = context.scene.bsp_asset - enabled_task_layers = [ - tlg for tlg in bsp.task_layers_push.values() if tlg.use - ] - return bool(enabled_task_layers) \ No newline at end of file diff --git a/scripts-blender/addons/asset_pipeline/builder/scripts/push.py b/scripts-blender/addons/asset_pipeline/builder/scripts/push.py deleted file mode 100644 index f5c749ed..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/scripts/push.py +++ /dev/null @@ -1,157 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -""" -As the publish process requires a number of more complex operations, we need to actually have a Blender Instance -opening that file and then executing the operations. -This script can be passed as -P option when starting a blender exe. -It needs a pickle_path after -- . The pickle path contains a pickled BuildContext from the AssetTask. -This BuildContext will be unpickled in this script and processed, which means performing -the publish of the selected TaskLayers in the AssetTask. -""" -import traceback -import logging -import pickle -import sys - -from typing import List, Dict, Union, Any, Set, Optional -from pathlib import Path - -from asset_pipeline import prop_utils -from asset_pipeline.builder import AssetBuilder -from asset_pipeline.builder.context import BuildContext -from asset_pipeline.asset_files import AssetPublish - -import bpy - -logger = logging.getLogger("BSP") - -# Get cli input. -argv = sys.argv -# logger.info(argv) -argv = argv[argv.index("--") + 1 :] - -logger.info("\n" * 2) -logger.info(f"STARTING NEW BLENDER: {bpy.data.filepath}") -logger.info("RUNNING PUSH SCRIPT") -logger.info("------------------------------------") - -try: - argv[0] -except IndexError: - logger.error("Supply pickle path as first argument after '--'.") - sys.exit(1) - -# Check if pickle path is valid. -pickle_path: str = argv[0] - -if not pickle_path: - logger.error("Supply valid pickle path as first argument after '--'.") - sys.exit(1) - -pickle_path: Path = Path(pickle_path) - -if not pickle_path.exists(): - logger.error(f"Pickle path does not exist: {pickle_path.as_posix()}") - sys.exit(1) - - -def exception_handler(func): - def func_wrapper(*args, **kwargs): - try: - return func(*args, **kwargs) - - except Exception as error: - print("\n" *2) - print(f"------------------------EXCEPTION({Path(bpy.data.filepath).name})------------------------") - exc_info = sys.exc_info() - traceback.print_exception(*exc_info) - del exc_info - print("\n" *2) - sys.exit(1) # Enables us to show warning in UI - - return func_wrapper - - -@exception_handler -def run(): - # Load pickle - logger.info(f"LOADING PICKLE: %s", pickle_path.as_posix()) - with open(pickle_path.as_posix(), "rb") as f: - BUILD_CONTEXT: BuildContext = pickle.load(f) - - # If first publish, only link in asset collection and update properties. - if not BUILD_CONTEXT.asset_publishes: - asset_publish = AssetPublish(Path(bpy.data.filepath)) - asset_coll = BUILD_CONTEXT.asset_context.asset_collection - # Update scene asset collection. - bpy.context.scene.bsp_asset.asset_collection = asset_coll - - # Update asset collection properties. - asset_coll.bsp_asset.update_props_by_asset_publish(asset_publish) - - # Link in asset collection in scene. - bpy.context.scene.collection.children.link(asset_coll) - bpy.context.scene.bsp_asset.asset_collection = asset_coll - - # Delete pickle. - pickle_path.unlink() - logger.info("Deleted pickle: %s", pickle_path.name) - - # Shutdown Blender. - bpy.ops.wm.save_mainfile() - bpy.ops.wm.quit_blender() - sys.exit(0) - - logger.info("LOAD TRANSFER SETTINGS") - - # Fetch transfer settings from AssetContext and update scene settings - # as they are the ones that are used by the pull() process. - TRANSFER_SETTINGS = bpy.context.scene.bsp_asset_transfer_settings - for prop_name, prop in prop_utils.get_property_group_items(TRANSFER_SETTINGS): - try: - value = BUILD_CONTEXT.asset_context.transfer_settings[prop_name] - except KeyError: - continue - else: - setattr(TRANSFER_SETTINGS, prop_name, value) - logger.info("Loaded setting(%s: %s)", prop_name, str(value)) - - logger.info(BUILD_CONTEXT) - - logger.info( - f"IMPORTING ASSET COLLECTION FROM TASK: %s", - BUILD_CONTEXT.asset_task.path.as_posix(), - ) - - ASSET_BUILDER = AssetBuilder(BUILD_CONTEXT) - - ASSET_BUILDER.pull_from_task(bpy.context) - - # Delete pickle. - pickle_path.unlink() - logger.info("Deleted pickle: %s", pickle_path.name) - - # Shutdown Blender. - bpy.ops.wm.save_mainfile() - bpy.ops.wm.quit_blender() - sys.exit(0) - - -run() diff --git a/scripts-blender/addons/asset_pipeline/builder/task_layer.py b/scripts-blender/addons/asset_pipeline/builder/task_layer.py deleted file mode 100644 index 6dd6c629..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/task_layer.py +++ /dev/null @@ -1,380 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from types import ModuleType - -from pathlib import Path - -import bpy -from bpy_extras.id_map_utils import get_id_reference_map - -from .asset_mapping import AssetTransferMapping -from ..util import unlink_collections_recursive -from ..constants import FULLY_OWNED_SUFFIX - -logger = logging.getLogger("BSP") - - -# TODO: Do we maybe need a BaseTask Layer that has default order = 0 -# and has no transfer_data function? -# The Base Task Layer gives us the base data on which we apply all other -# TaskLayers. Merging this layer just means, take it as a starting point. -# Note: Right now the Asset Importer already handles this logic by checking if the -# asset task source has the TaskLayer with the lowest order enabled and creates a TARGET collection. - - -class TaskLayer: - """ - This class is more or less boilerplate so Users can easily write their TaskLayer - in the production config file. Users need to implement the transfer_data function - and fille out the class attributes. - """ - name: str = "" - description: str = "" - order: int = -1 - task_suffix: str = "" - - @classmethod - def get_id(cls) -> str: - """ - Used to uniquely identify a TaskLayer as we expect that there are not 2 TaskLayer Classes - That have the same name. - """ - return cls.__name__ - - @classmethod - def is_valid(cls) -> bool: - return bool(cls.name and cls.order >= 0) - - @classmethod - def transfer( - cls, - context: bpy.types.Context, - build_context: "BuildContext", # Otherwise get stupid circular import errors. - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - cls.transfer_collections(transfer_mapping) - cls.transfer_data(context, build_context, transfer_mapping, transfer_settings) - cls.assign_objects(transfer_mapping) - cls.fix_geonode_modifiers() - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: "BuildContext", # Otherwise get stupid circular import errors. - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - - # TODO: transfer_settings can be None if Users didn't provide a - # TransferSettings class in the task layer module. We should update this. - """ - The AssetTranfserMapping class represents a mapping between a source and a target. - It contains an object mapping which connects each source object with a target. - The maps are just dictionaries where the key is the source and the value the target. - Both key and target are actual Blender ID Datablocks. - This makes it easy to write Merge Instructions. - With it you can do access things like: - - transfer_mapping.object_map: Dict[bpy.types.Object, bpy.types.Object] - transfer_mapping.collection_map: Dict[bpy.types.Collection, bpy.types.Collection] - transfer_mapping.material_map: Dict[bpy.types.Material, bpy.types.Material] - - For all mappings: - Key: Source - Value: Target - - You can also access the root Asset source and Target Collection: - transfer_mapping.source_coll: bpy.types.Collection - transfer_mapping.target_coll: bpy.types.Collection - - Further than that you can access to objects which had no match. - transfer_mapping.no_match_target_objs: Set[bpy.types.Object] (all objs that exist in target but not in source) - transfer_mapping.no_match_source_objs: Set[bpy.types.Object] (vice versa) - - - Further then that Users can define custom transfer settings by defining a TransferSettings - Class which inherits from a PropertyGroup in the task_layer module. Users can query these settings - by checking the transfer_settings argument. - - transfer_settings.custom_option - """ - raise NotImplementedError - - @classmethod - def get_task_collections(cls, root_coll: bpy.types.Collection) -> Set[bpy.types.Collection]: - """Return direct children of an Asset Collection who have the suffix of this Task Layer (eg. "einar.rig"). - The root_coll that is the Asset Collection can be either the .TASK or .PUBLISH or .TARGET collection. - """ - transfer_suffix = root_coll.bsp_asset.transfer_suffix - task_collections = set() - - for c in root_coll.children: - if cls.task_suffix and c.name.replace(transfer_suffix, "").endswith(cls.task_suffix): - task_collections.add(c) - - return task_collections - - - @classmethod - def transfer_collections(cls, transfer_mapping: AssetTransferMapping): - source_root_coll = transfer_mapping.source_coll - - # Empty target collections that end in ".FULLY_OWNED". - fully_owned_colls = {c for c in transfer_mapping.target_coll.children_recursive if FULLY_OWNED_SUFFIX in c.name} - for fully_owned_coll in fully_owned_colls: - cls.recursive_clear_fully_owned_target_coll(fully_owned_coll, transfer_mapping) - - for src_coll in cls.get_task_collections(source_root_coll): - cls.transfer_collection_objects(transfer_mapping, src_coll, source_root_coll) - - # Unlink target collections that no longer exist in source. - for target_coll in transfer_mapping.target_coll.children: - if cls.task_suffix and cls.task_suffix in target_coll.name: - unlink_collections_recursive(target_coll, transfer_mapping.no_match_target_colls) - - @classmethod - def recursive_clear_fully_owned_target_coll(cls, coll, transfer_mapping): - if not cls.task_suffix or cls.task_suffix not in coll.name: - return - - for ob in coll.objects[:]: - coll.objects.unlink(ob) - # The object mapping also needs to be removed (this should be more effective than purging, I think) - for key in list(transfer_mapping.object_map.keys()): - if transfer_mapping.object_map[key] == ob: - del transfer_mapping._object_map[key] - - for subcoll in coll.children[:]: - coll.children.unlink(subcoll) - # # The collection mapping also needs to be removed. - for key in list(transfer_mapping.collection_map.keys()): - if transfer_mapping.collection_map[key] == subcoll: - del transfer_mapping._collection_map[key] - cls.recursive_clear_fully_owned_target_coll(subcoll, transfer_mapping) - bpy.data.collections.remove(subcoll) # Just to free up the name, so we avoid a .001 suffix when the target collection is (re-)created later. - - - @classmethod - def transfer_collection_objects(cls, - transfer_mapping: AssetTransferMapping, - src_coll: bpy.types.Collection, - src_parent_coll: bpy.types.Collection): - """ - Recursively transfer object assignments from source to target collections. - If an object ends up being un-assigned, it may get purged. - New collections will be created as necessary. - """ - - # Create target collection if necessary. - tgt_coll = transfer_mapping.collection_map.get(src_coll) - if not tgt_coll: - src_suffix = transfer_mapping.source_coll.bsp_asset.transfer_suffix - tgt_suffix = transfer_mapping.target_coll.bsp_asset.transfer_suffix - tgt_coll = bpy.data.collections.new(src_coll.name.replace(src_suffix, tgt_suffix)) - tgt_coll.hide_viewport = src_coll.hide_viewport - tgt_coll.hide_render = src_coll.hide_render - transfer_mapping._collection_map[src_coll] = tgt_coll - tgt_parent = transfer_mapping.collection_map.get(src_parent_coll) - assert tgt_parent, "The corresponding target parent collection should've been created in the previous recursion: " + src_coll.name - tgt_parent.children.link(tgt_coll) - - # Un-assigning everything from the target coll. - for o in tgt_coll.objects: - tgt_coll.objects.unlink(o) - - # Re-assign those objects which correspond to the ones in source coll. - for src_ob in src_coll.objects: - tgt_ob = transfer_mapping.object_map.get(src_ob) - if not tgt_ob: - # Allow task layers to add objects that didn't previously exist in target coll. - tgt_ob = src_ob - ref_map = get_id_reference_map() - if src_ob in ref_map: - for dependency in ref_map[src_ob]: - if type(dependency) == bpy.types.Object: - tgt_dependency = transfer_mapping.object_map.get(dependency) - if tgt_dependency: - dependency.user_remap(tgt_dependency) - tgt_coll.objects.link(tgt_ob) - - # Do the same recursively for child collections. - for child_coll in src_coll.children: - cls.transfer_collection_objects(transfer_mapping, child_coll, src_coll) - - @classmethod - def assign_objects(cls, - transfer_mapping: AssetTransferMapping): - """ - Unassign remaining source collections/objects and replace them with - target collections/objects for the whole file. - """ - # iterate through all collections in the file - for coll in list(bpy.data.collections): - collection_map = transfer_mapping.collection_map - transfer_collections = set().union(*[{k, v} for k, v in collection_map.items()]) - if coll in transfer_collections: - continue - for child_coll in coll.children: - if child_coll not in collection_map: - continue - if child_coll in {transfer_mapping.source_coll, transfer_mapping.target_coll}: - continue - tgt_coll = collection_map.get(child_coll) - if not tgt_coll: - continue - coll.children.unlink(child_coll) - coll.children.link(tgt_coll) - for ob in coll.objects: - if ob not in transfer_mapping.object_map: - continue - tgt_ob = transfer_mapping.object_map.get(ob) - if not tgt_ob: - continue - coll.objects.unlink(ob) - coll.objects.link(tgt_ob) - ob.user_remap(tgt_ob) - - @classmethod - def fix_geonode_modifiers(cls): - """Workaround to a weird issue where some GeoNode modifier inputs disappear...""" - for o in bpy.data.objects: - if o.type != 'MESH': - continue - for m in o.modifiers: - if m.type == 'NODES': - m.node_group = m.node_group - - def __repr__(self) -> str: - return f"TaskLayer{self.name}" - - -class TaskLayerConfig: - """ - This Class holds a TaskLayer and additional Information that - determine how this TaskLayer is handeled during build. - For example .use controls if TaskLayer should be used for build. - """ - - def __init__(self, task_layer: type[TaskLayer]): - self._task_layer = task_layer - self._use: bool = False - - @property - def task_layer(self) -> type[TaskLayer]: - return self._task_layer - - @property - def use(self) -> bool: - return self._use - - @use.setter - def use(self, value: bool) -> None: - self._use = value - - def reset(self) -> None: - self._use = False - - def __repr__(self) -> str: - return f"{self.task_layer.name}(use: {self.use})" - - -class TaskLayerAssembly: - - """ - This Class holds all TaskLayers relevant for the build. - Each TaskLayer is stored as a TaskLayerConfig object which provides - the additional information. - """ - - def __init__(self, task_layers: List[type[TaskLayer]]): - # Create a dictionary data structure here, so we can easily control - # from within Blender by string which TaskLayers to enable and disable for built. - # As key we will use the class.get_id() attribute of each TaskLayer. (Should be unique) - self._task_layer_config_dict: Dict[str, TaskLayerConfig] = {} - self._task_layers = task_layers - self._task_layer_configs: List[TaskLayerConfig] = [] - # For each TaskLayer create a TaskLayerConfig and add entry in - # dictionary. - for task_layer in task_layers: - - # Make sure that for whatever reason there are no 2 identical TaskLayer. - if task_layer.get_id() in self._task_layer_config_dict: - - self._task_layer_config_dict.clear() - raise Exception( - f"Detected 2 TaskLayers with the same Class name. [{task_layer.get_id()}]" - ) - tc = TaskLayerConfig(task_layer) - self._task_layer_configs.append(tc) - self._task_layer_config_dict[task_layer.get_id()] = tc - - # Sort lists. - self._task_layer_configs.sort(key=lambda tc: tc.task_layer.order) - self._task_layers.sort(key=lambda tl: tl.order) - - def get_task_layer_config(self, key: str) -> TaskLayerConfig: - return self._task_layer_config_dict[key] - - def get_used_task_layers(self) -> List[type[TaskLayer]]: - return [tc.task_layer for tc in self.task_layer_configs if tc.use] - - def get_used_task_layer_ids(self) -> List[str]: - return [t.get_id() for t in self.get_used_task_layers()] - - def get_task_layers_for_bl_enum(self) -> List[Tuple[str, str, str]]: - return [(tl.get_id(), tl.name, tl.description) for tl in self.task_layers] - - @property - def task_layer_config_dict(self) -> Dict[str, TaskLayerConfig]: - return self._task_layer_config_dict - - @property - def task_layer_configs(self) -> List[TaskLayerConfig]: - return self._task_layer_configs - - @property - def task_layers(self) -> List[type[TaskLayer]]: - return self._task_layers - - @property - def task_layer_names(self) -> List[str]: - return [l.name for l in self.task_layers] - - def get_task_layer_orders(self, only_used: bool = False) -> List[int]: - """ - Returns a list of all TaskLayers.order values. - """ - if not only_used: - return [t.order for t in self.task_layers] - else: - return [tc.task_layer.order for tc in self.task_layer_configs if tc.use] - - def __repr__(self) -> str: - body = f"{', '.join([str(t) for t in self.task_layer_configs])}" - return f"TaskLayerAssembly: ({body})" - - def __bool__(self) -> bool: - return bool(self._task_layer_config_dict) diff --git a/scripts-blender/addons/asset_pipeline/builder/ui.py b/scripts-blender/addons/asset_pipeline/builder/ui.py deleted file mode 100644 index 8e9316ea..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/ui.py +++ /dev/null @@ -1,534 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -from pathlib import Path -from typing import List, Dict, Union, Any, Set, Optional - -import bpy - -from .ops import ( - draw_task_layers_list, - BSP_ASSET_initial_publish, - BSP_ASSET_start_publish, - BSP_ASSET_start_publish_new_version, - BSP_ASSET_abort_publish, - BSP_ASSET_create_prod_context, - BSP_ASSET_create_asset_context, - BSP_ASSET_push_task_layers, - BSP_ASSET_pull, - BSP_ASSET_publish, - BSP_ASSET_set_task_layer_status, - BSP_ASSET_set_asset_status, -) -from .. import builder, constants, prop_utils, util - -try: - from .util import is_addon_active - import blender_kitsu.cache - kitsu_available = True -except: - kitsu_available = False - - -def poll_asset_collection_not_init(context: bpy.types.Context) -> bool: - return not bool(context.scene.bsp_asset.asset_collection) - - -def poll_error_invalid_task_layer_module_path() -> bool: - addon_prefs = util.get_addon_prefs() - return bool(not addon_prefs.is_prod_task_layers_module_path_valid()) - - -def poll_error_file_not_saved() -> bool: - return not bool(bpy.data.filepath) - - -def poll_error(context: bpy.types.Context) -> bool: - return ( - poll_asset_collection_not_init(context) - or poll_error_file_not_saved() - or poll_error_invalid_task_layer_module_path() - ) - - -def draw_error_invalid_task_layer_module_path( - box: bpy.types.UILayout, -) -> bpy.types.UILayout: - row = box.row(align=True) - row.label(text="Invalid Task Layer Module") - row.operator( - "preferences.addon_show", text="Open Addon Preferences" - ).module = "asset_pipeline" - - -def draw_error_file_not_saved( - box: bpy.types.UILayout, -) -> bpy.types.UILayout: - row = box.row(align=True) - row.label(text="File needs to be saved") - - -def draw_error_asset_collection_not_init( - box: bpy.types.UILayout, -) -> bpy.types.UILayout: - box.row().label(text="Initialize Asset Collection") - - -def draw_affected_asset_publishes_list( - self: bpy.types.Panel, - context: bpy.types.Context, - disable: bool = False, -) -> bpy.types.UILayout: - layout: bpy.types.UILayout = self.layout - - box = layout.box() - row = box.row(align=True) - row.label(text="Asset Publishes") - row.operator(BSP_ASSET_create_asset_context.bl_idname, icon="FILE_REFRESH", text="") - - # Ui-list. - row = box.row() - row.template_list( - "BSP_UL_affected_asset_publishes", - "affected_asset_publishes_list", - context.scene.bsp_asset, - "asset_publishes", - context.scene.bsp_asset, - "asset_publishes_index", - rows=constants.DEFAULT_ROWS, - type="DEFAULT", - ) - if disable: - row.enabled = False - - return box - - -def draw_task_layer_lock_plans_on_new_publish( - self: bpy.types.Panel, - context: bpy.types.Context, - disable: bool = False, -) -> bpy.types.UILayout: - layout: bpy.types.UILayout = self.layout - - box = layout.box() - row = box.row(align=True) - row.label(text="Locked Task Layers") - - # Ui-list. - row = box.row() - row.template_list( - "BSP_UL_task_layer_lock_plans", - "task_layer_lock_plans", - context.scene.bsp_asset, - "task_layer_lock_plans", - context.scene.bsp_asset, - "task_layer_lock_plans_index", - rows=constants.DEFAULT_ROWS, - type="DEFAULT", - ) - if disable: - row.enabled = False - - return box - - -# ----------------PANELS--------------. - - -class BSP_ASSET_main_panel: - bl_category = "Asset Pipeline" - bl_label = "Asset Pipeline" - bl_space_type = "VIEW_3D" - bl_region_type = "UI" - - -class BSP_ASSET_PT_vi3d_asset_pipeline(BSP_ASSET_main_panel, bpy.types.Panel): - def draw(self, context: bpy.types.Context) -> None: - - layout: bpy.types.UILayout = self.layout - bsp = context.scene.bsp_asset - - # Warning box. - if poll_error(context): - box = layout.box() - box.label(text="Warning", icon="ERROR") - - if poll_error_file_not_saved: - draw_error_file_not_saved(box) - - if poll_error_invalid_task_layer_module_path(): - draw_error_invalid_task_layer_module_path(box) - - if poll_asset_collection_not_init(context): - draw_error_asset_collection_not_init(box) - - -class BSP_ASSET_PT_vi3d_configure(BSP_ASSET_main_panel, bpy.types.Panel): - bl_label = "Configure" - bl_parent_id = "BSP_ASSET_PT_vi3d_asset_pipeline" - bl_options = {"DEFAULT_CLOSED"} - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - if not context.scene.bsp_asset.asset_collection: - if kitsu_available and is_addon_active("blender_kitsu", context) and not blender_kitsu.cache.asset_active_get(): - box = layout.box() - box.label(text="Warning", icon="ERROR") - box.row(align=True).label(text="Select Asset in Kitsu Context Browser") - - layout.row().prop_search(context.scene.bsp_asset, "asset_collection_name", bpy.data, 'collections') - layout.separator() - - # Draw Task Layer List. - row = layout.row() - row.label(text="Owned Task Layers") - row = row.row() - row.enabled = False # TODO: This operator is crashing Blender! - row.operator(BSP_ASSET_create_prod_context.bl_idname, icon="FILE_REFRESH", text="") - draw_task_layers_list(layout, context, "task_layers_push") - - -class BSP_ASSET_PT_vi3d_publish(BSP_ASSET_main_panel, bpy.types.Panel): - - bl_label = "Publish" - bl_parent_id = "BSP_ASSET_PT_vi3d_asset_pipeline" - bl_options = {"DEFAULT_CLOSED"} - - @classmethod - def poll(cls, context): - return bool(builder.ASSET_CONTEXT and context.scene.bsp_asset.asset_collection) - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - bsp = context.scene.bsp_asset - - # Show warning if blend file not saved. - if not bpy.data.filepath: - layout.row().label(text="Blend files needs to be saved", icon="ERROR") - return - - # Initial publish. - if not builder.ASSET_CONTEXT.asset_publishes: - layout.row().operator(BSP_ASSET_initial_publish.bl_idname, icon="ADD") - return - - # Publish is in progress. - # --------------------------------- - if bsp.is_publish_in_progress: - - # Draw abort button. - layout.row().operator(BSP_ASSET_abort_publish.bl_idname, icon='X') - - # Draw Task Layer List. - layout.label(text="Pushing Task Layers:") - draw_task_layers_list(layout, context, "task_layers_push", disable=True) - - # If new publish, draw task layer lock list. - if len(bsp.task_layer_lock_plans.items()) > 0: - draw_task_layer_lock_plans_on_new_publish(self, context) - - # Draw affected publishes list. - box = draw_affected_asset_publishes_list(self, context) - - # Draw push task layers operator inside of box. - row = box.row() - row.operator(BSP_ASSET_push_task_layers.bl_idname) - - # Draw publish operator. - row = layout.operator(BSP_ASSET_publish.bl_idname) - - return - - # No publish in progress. - # --------------------------------- - - # Production Context not loaded. - if not builder.PROD_CONTEXT: - layout.row().operator( - BSP_ASSET_create_prod_context.bl_idname, icon="FILE_REFRESH" - ) - return - - # Production Context is initialized. - row = layout.row(align=True) - if context.window_manager.bsp_asset.new_asset_version: - row.operator(BSP_ASSET_start_publish_new_version.bl_idname) - else: - row.operator(BSP_ASSET_start_publish.bl_idname) - - row.prop( - context.window_manager.bsp_asset, - "new_asset_version", - text="", - icon="ADD", - ) - return - - -class BSP_ASSET_PT_vi3d_pull(BSP_ASSET_main_panel, bpy.types.Panel): - - bl_label = "Pull" - bl_parent_id = "BSP_ASSET_PT_vi3d_asset_pipeline" - bl_options = {"DEFAULT_CLOSED"} - - @classmethod - def poll(cls, context): - return bool( - context.scene.bsp_asset.asset_collection - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - ) - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - # Show warning if blend file not saved. - if not bpy.data.filepath: - layout.row().label(text="Blend files needs to be saved", icon="ERROR") - return - - box = layout.box() - box.label(text="Pull") - - row = box.row(align=True) - row.prop(context.window_manager.bsp_asset, "asset_publish_source_path") - - row = box.row(align=True) - row.operator(BSP_ASSET_pull.bl_idname) - - -class BSP_ASSET_PT_vi3d_status(BSP_ASSET_main_panel, bpy.types.Panel): - - bl_label = "Status" - bl_parent_id = "BSP_ASSET_PT_vi3d_asset_pipeline" - bl_options = {"DEFAULT_CLOSED"} - - @classmethod - def poll(cls, context): - return bool( - context.scene.bsp_asset.asset_collection - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - ) - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - box = draw_affected_asset_publishes_list(self, context, disable=False) - - # Task Layer Status. - box = layout.box() - box.label(text="Task Layer Status") - row = box.row(align=True) - row.operator(BSP_ASSET_set_task_layer_status.bl_idname) - - # Asset Status. - box = layout.box() - box.label(text="Asset Status") - row = box.row(align=True) - row.operator(BSP_ASSET_set_asset_status.bl_idname) - - -class BSP_ASSET_PT_vi3d_transfer_settings(BSP_ASSET_main_panel, bpy.types.Panel): - - bl_label = "Transfer Settings" - bl_parent_id = "BSP_ASSET_PT_vi3d_asset_pipeline" - bl_options = {"DEFAULT_CLOSED"} - - @classmethod - def poll(cls, context): - return bool( - hasattr(context.scene, "bsp_asset_transfer_settings") - and context.scene.bsp_asset.asset_collection - and builder.ASSET_CONTEXT - and builder.ASSET_CONTEXT.asset_publishes - ) - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - for (pname, prop,) in prop_utils.get_property_group_items( - context.scene.bsp_asset_transfer_settings - ): - layout.row().prop(context.scene.bsp_asset_transfer_settings, pname) - - -class BSP_ASSET_PT_collection_asset_properties(bpy.types.Panel): - bl_label = "Asset Properties" - bl_space_type = "PROPERTIES" - bl_region_type = "WINDOW" - bl_context = "collection" - - @classmethod - def poll(cls, context): - coll = context.collection - return ( - context.collection != context.scene.collection and coll.bsp_asset.is_asset - ) - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - coll = context.collection - - layout.row().prop(coll.bsp_asset, "displ_entity_name") - layout.row().prop(coll.bsp_asset, "displ_entity_id") - - layout.row().prop(coll.bsp_asset, "rig") - - # Display publish properties. - if coll.bsp_asset.is_publish: - box = layout.box() - box.row().label(text="Publish Properties") - box.row().prop(coll.bsp_asset, "displ_version") - box.row().prop(coll.bsp_asset, "displ_publish_path") - - -# ----------------UI-LISTS--------------. - - -class BSP_UL_task_layers(bpy.types.UIList): - def draw_item( - self, context, layout, data, item, icon, active_data, active_propname, index - ): - layout: bpy.types.UILayout = layout - - if self.layout_type in {"DEFAULT", "COMPACT"}: - layout.label(text=item.task_layer_name) - layout.prop(item, "use", text="") - - elif self.layout_type in {"GRID"}: - layout.alignment = "CENTER" - layout.label(text=item.task_layer_name) - - -class BSP_UL_affected_asset_publishes(bpy.types.UIList): - def draw_item( - self, context, layout, data, item, icon, active_data, active_propname, index - ): - layout: bpy.types.UILayout = layout - - if self.layout_type in {"DEFAULT", "COMPACT"}: - - # Di split for filename spacing. - row = layout.row(align=True) - row.alignment = "LEFT" - - # Draw filename with status in brackets. - base_split = row.split(factor=0.4) - - label_text = item.path.name - label_text += f"({item.status[:1]})".upper() - - # Calculate icon depending on the subprocess return code. - # This is a nice way to indicate User if something went wrong - # during push through UI. - icon = "NONE" - if context.scene.bsp_asset.is_publish_in_progress: - if item.returncode_publish == 0: - icon = "CHECKMARK" - elif item.returncode_publish == -1: - icon = "NONE" - else: - icon = "ERROR" - - base_split.label(text=label_text, icon=icon) - - # Draw each task layer. - for tl_item in item.task_layers: - - # Get locked state. - icon = "MESH_CIRCLE" - if tl_item.is_locked: - icon = "LOCKED" - - # Draw label that represents task layer with locked state as icon. - base_split.label(text=f"{tl_item.task_layer_id[:2]}".upper(), icon=icon) - - elif self.layout_type in {"GRID"}: - layout.alignment = "CENTER" - layout.label(text=item.path.name) - - -class BSP_UL_task_layer_lock_plans(bpy.types.UIList): - def draw_item( - self, context, layout, data, item, icon, active_data, active_propname, index - ): - layout: bpy.types.UILayout = layout - - if self.layout_type in {"DEFAULT", "COMPACT"}: - - # Di split for filename spacing. - row = layout.row(align=True) - row.alignment = "LEFT" - - # Draw filename with status in brackets. - base_split = row.split(factor=0.4) - - label_text = item.path.name - base_split.label(text=label_text) - - for tl_item in context.scene.bsp_asset.task_layers_push: - - # Draw label for each task layer to align spacing. - if tl_item.task_layer_id in [ - tl.task_layer_id for tl in item.task_layers - ]: - # Get locked state. - icon = "LOCKED" - - # Draw label that represents task layer with locked state as icon. - base_split.label( - text=f"{tl_item.task_layer_id[:2]}".upper(), icon=icon - ) - # If task layer was not locked just draw empty string but still draw it for - # alignment. - else: - base_split.label(text=f" ") - - elif self.layout_type in {"GRID"}: - layout.alignment = "CENTER" - layout.label(text=item.path.name) - - -# ----------------REGISTER--------------. - -classes = [ - BSP_ASSET_PT_collection_asset_properties, - BSP_UL_task_layers, - BSP_UL_affected_asset_publishes, - BSP_ASSET_PT_vi3d_asset_pipeline, - BSP_ASSET_PT_vi3d_configure, - BSP_ASSET_PT_vi3d_publish, - BSP_ASSET_PT_vi3d_pull, - BSP_ASSET_PT_vi3d_status, - BSP_ASSET_PT_vi3d_transfer_settings, - BSP_UL_task_layer_lock_plans, -] - - -def register() -> None: - for cls in classes: - bpy.utils.register_class(cls) - - -def unregister() -> None: - for cls in reversed(classes): - bpy.utils.unregister_class(cls) diff --git a/scripts-blender/addons/asset_pipeline/builder/vis.py b/scripts-blender/addons/asset_pipeline/builder/vis.py deleted file mode 100644 index ea91a378..00000000 --- a/scripts-blender/addons/asset_pipeline/builder/vis.py +++ /dev/null @@ -1,132 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -# This code is copied over from character-pipeline-assistant/utils.py file. -# https://gitlab.com/blender/character-pipeline-assistant -# Original Author of this code is: Unknown. - -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple - -from pathlib import Path - -import bpy - -from .. import util - -logger = logging.getLogger("BSP") - - -def get_layer_coll_from_coll( - collection: bpy.types.Collection, -) -> Optional[bpy.types.LayerCollection]: - - lcolls = util.traverse_collection_tree(bpy.context.view_layer.layer_collection) - for lcoll in lcolls: - if lcoll.name == collection.name: - return lcoll - - return None - - -def set_active_collection(collection: bpy.types.Collection) -> None: - layer_collection = get_layer_coll_from_coll(collection) - bpy.context.view_layer.active_layer_collection = layer_collection - - -class EnsureObjectVisibility: - def get_visibility_driver(self) -> Optional[bpy.types.FCurve]: - obj = bpy.data.objects.get(self.obj_name) - assert obj, "Object was renamed while its visibility was being ensured?" - if hasattr(obj, "animation_data") and obj.animation_data: - return obj.animation_data.drivers.find("hide_viewport") - - - def __init__(self, obj: bpy.types.Object): - self.obj_name = obj.name - - # Eye icon - self.hide = obj.hide_get() - obj.hide_set(False) - - # Screen icon driver - self.drv_mute = None - drv = self.get_visibility_driver() - if drv: - self.drv_mute = drv.mute - drv.mute = True - - # Screen icon - self.hide_viewport = obj.hide_viewport - obj.hide_viewport = False - - # Temporarily assign the object to the scene root collection, and - # take note of whether it was already assigned previously, or not. - self.assigned_to_scene_root = False - if obj.name not in bpy.context.scene.collection.objects: - self.assigned_to_scene_root = True - bpy.context.scene.collection.objects.link(obj) - - - def restore(self): - obj = bpy.data.objects.get(self.obj_name) - assert obj, f"Error: Object {self.obj_name} was renamed or removed before its visibility was restored!" - obj.hide_set(self.hide) - - if self.drv_mute != None: # We want to catch both True and False here. - drv = self.get_visibility_driver() - drv.mute = self.drv_mute - - obj.hide_viewport = self.hide_viewport - - if self.assigned_to_scene_root and obj.name in bpy.context.scene.collection.objects: - bpy.context.scene.collection.objects.unlink(obj) - - -class EnsureCollectionVisibility: - """Ensure all objects in a collection are visible. - The original visibility states can be restored using .restore(). - NOTE: Collection and Object names must not change until restore() is called!!! - """ - - def __init__(self, coll: bpy.types.Collection, do_objects=True): - self.coll_name = coll.name - - # Assign object to scene root to make sure it doesn't get hidden by collection - # settings. - # NOTE: Previously, we just messed with and then reset collection settings, - # but that stopped working in the background Blender process since D15885. - - # Objects - self.object_visibilities = [] - if do_objects: - for ob in coll.objects: - self.object_visibilities.append(EnsureObjectVisibility(ob)) - - def restore(self) -> None: - """Restore visibility settings to their original state.""" - coll = bpy.data.collections.get(self.coll_name) - if not coll: - return - - # Objects - for ob_vis in self.object_visibilities: - ob_vis.restore() diff --git a/scripts-blender/addons/asset_pipeline/config.py b/scripts-blender/addons/asset_pipeline/config.py new file mode 100644 index 00000000..82d7ca9f --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/config.py @@ -0,0 +1,49 @@ +import bpy +from pathlib import Path +import json +from . import constants + +TASK_LAYER_TYPES = {} +TRANSFER_DATA_DEFAULTS = {} +ATTRIBUTE_DEFAULTS = {} + + +def get_json_file(): + directory = Path(bpy.data.filepath).parent + json_file_path = directory.joinpath(constants.TASK_LAYER_CONFIG_NAME) + if json_file_path.exists(): + return json_file_path + return + + +def get_task_layer_presets_path(): + return Path(__file__).parent.joinpath(constants.TASK_LAYER_CONFIG_DIR_NAME) + + +def verify_json_data(json_file_path=""): + global TASK_LAYER_TYPES + global TRANSFER_DATA_DEFAULTS + global ATTRIBUTE_DEFAULTS + directory = Path(bpy.data.filepath).parent + if json_file_path == "": + json_file_path = directory.joinpath(constants.TASK_LAYER_CONFIG_NAME) + if not json_file_path.exists(): + return + json_file = open(json_file_path) + json_content = json.load(json_file) + try: + TASK_LAYER_TYPES = json_content["TASK_LAYER_TYPES"] + TRANSFER_DATA_DEFAULTS = json_content["TRANSFER_DATA_DEFAULTS"] + ATTRIBUTE_DEFAULTS = json_content["ATTRIBUTE_DEFAULTS"] + return True + except KeyError: + return + + +def write_json_file(asset_path: Path, source_file_path: Path): + json_file_path = asset_path.joinpath(constants.TASK_LAYER_CONFIG_NAME) + json_file = open(source_file_path) + json_content = json.load(json_file) + json_dump = json.dumps(json_content, indent=4) + with open(json_file_path, "w") as config_output: + config_output.write(json_dump) diff --git a/scripts-blender/addons/asset_pipeline/constants.py b/scripts-blender/addons/asset_pipeline/constants.py index f5833e49..bd94954c 100644 --- a/scripts-blender/addons/asset_pipeline/constants.py +++ b/scripts-blender/addons/asset_pipeline/constants.py @@ -1,32 +1,110 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -METADATA_EXT = ".xmp" -VERSION_PATTERN = r"v(\d\d\d)" -DELIMITER = "." -TARGET_SUFFIX = ".TARGET" -TASK_SUFFIX = ".TASK" -PUBLISH_SUFFIX = ".PUBLISH" -FULLY_OWNED_SUFFIX = ".FULLY_OWNED" -TRANSFER_SETTINGS_NAME = "TransferSettings" -DEFAULT_ROWS = 3 -TIME_FORMAT = r"%Y-%m-%dT%H:%M:%S" -DEFAULT_ASSET_STATUS = "REVIEW" -HOOK_ATTR_NAME = "_asset_builder_rules" +ADDON_NAME = "asset_pipeline" + +# Delimiter used for naming data within Blender +NAME_DELIMITER = "-" + + +################### +# MERGE +################### + +# Delimiter used by suffixes in the merge process +MERGE_DELIMITER = "." + +# Suffixes used when naming items to merge +LOCAL_SUFFIX = "LOCAL" +EXTERNAL_SUFFIX = "EXTERNAL" + + +################### +# Task Layers +################### + +# Name of directory containing task layer prefixes internal to add-on +TASK_LAYER_CONFIG_DIR_NAME = "task_layer_configs" + +# Name of task layer file found a the root of an asset +TASK_LAYER_CONFIG_NAME = "task_layers.json" + + +################### +# Transferable Data +################### + +# Keys for transferable data +NONE_KEY = "NONE" +VERTEX_GROUP_KEY = "GROUP_VERTEX" +MODIFIER_KEY = "MODIFIER" +CONSTRAINT_KEY = "CONSTRAINT" +MATERIAL_SLOT_KEY = "MATERIAL" +SHAPE_KEY_KEY = "SHAPE_KEY" +ATTRIBUTE_KEY = "ATTRIBUTE" +PARENT_KEY = "PARENT" + +# Information about supported transferable data. +# UI Bools are defined in props.py file +# {Key string : ("UI Name", 'ICON', "UI_BOOL_KEY")} +TRANSFER_DATA_TYPES = { + NONE_KEY: ("None", "BLANK1", 'none'), + VERTEX_GROUP_KEY: ("Vertex Groups", 'GROUP_VERTEX', 'group_vertex_ui_bool'), + MODIFIER_KEY: ("Modifiers", 'MODIFIER', 'modifier_ui_bool'), + CONSTRAINT_KEY: ("Constraints", 'CONSTRAINT', 'constraint_ui_bool'), + MATERIAL_SLOT_KEY: ("Materials", 'MATERIAL', 'material_ui_bool'), + SHAPE_KEY_KEY: ("Shape Keys", 'SHAPEKEY_DATA', 'shapekey_ui_bool'), + ATTRIBUTE_KEY: ("Attributes", 'EVENT_A', 'attribute_ui_bool'), + PARENT_KEY: ("Parent", 'FILE_PARENT', 'file_parent_ui_bool'), +} + +# Convert it to the format that EnumProperty.items wants: +# List of 5-tuples; Re-use name as description at 3rd element, add index at 5th. +TRANSFER_DATA_TYPES_ENUM_ITEMS = [ + (tup[0], tup[1][0], tup[1][0], tup[1][1], i) + for i, tup in enumerate(TRANSFER_DATA_TYPES.items()) +] + + +# Name used in all material transferable data +MATERIAL_TRANSFER_DATA_ITEM_NAME = "All Materials" + +# Name used in parent transferable data +PARENT_TRANSFER_DATA_ITEM_NAME = "Parent Relationship" + +MATERIAL_ATTRIBUTE_NAME = "material_index" + + +################### +# SHARED IDs +################### + +# SHARED ID Icons +GEO_NODE = "GEOMETRY_NODES" +IMAGE = "IMAGE_DATA" +BLANK = "BLANK1" + + +################### +# Publish +################### + +# List of different states used when Publishing a Final Asset +PUBLISH_TYPES = [ + ( + "publish", + "Active", + "Publish a new active version that will become the latest published version", + ), + ( + "staged", + "Staged", + """Publish a staged version that will replace the last active version as the Push/Pull target. + Used for internal asset pipeline use only""", + ), + ( + "review", + "Review", + "Test the results that will be published in the review area, will not be used as Push/Pull target", + ), +] +PUBLISH_KEYS = [pub_type[0] for pub_type in PUBLISH_TYPES] +ACTIVE_PUBLISH_KEY = PUBLISH_KEYS[0] +STAGED_PUBLISH_KEY = PUBLISH_KEYS[1] diff --git a/scripts-blender/addons/asset_pipeline/docs/production_config_example/hooks.py b/scripts-blender/addons/asset_pipeline/docs/production_config_example/hooks.py deleted file mode 100644 index a1ee3528..00000000 --- a/scripts-blender/addons/asset_pipeline/docs/production_config_example/hooks.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Any, Dict, List, Set, Union, Optional -import bpy - -from asset_pipeline.api import hook, Wildcard, DoNotMatch - -""" -Hooks can be matched with the following parameters. -As match input you can use str, list, WildCard, DoNotMatch - -Examples: -- Global Hooks (No match filter): @hook() -- Hooks for an asset type only: @hook(match_asset_type="Character") -- Hooks for a specific asset: @hook(match_asset: "Sprite") -- Hooks for a task layer only @hook(match_task_layers: ["ShadingTaskLayer", "RiggingTaskLayer"] -- Hooks for an asset and a task layer combination: @hook(macth_asset: "Sprite", match_task_layers: "ShadingTaskLayer") -Note: the decorator needs to be executed. - -It is important to note that the asset-pipeline follows a certain order to execute the hooks. And that is exactly the one of the examples hook described above: - -1. Global hooks -2. Asset Type Hooks -3. Task Layer Hooks -4. Asset Hooks -5. Asset + TaskLayer specific Hooks - - -The function itself should always have **\*\*kwargs** as a parameter. The asset-pipeline automatically passes a couple of useful keyword arguments to the function: -- `asset_collection`: bpy.types.Collection -- `context`: bpy.types.Context -- `asset_task`: asset_pipeline.asset_files.AssetTask -- `asset_dir`: asset_pipeline.asset_files.AssetDir - -By exposing these parameters in the hook function you can use them in your code. -""" - -@hook() -def test_hook_A(asset_collection: bpy.types.Collection, **kwargs) -> None: - print("Test Hook A running!") - - -@hook(match_asset="Test") -def test_hook_B(**kwargs) -> None: - print("Test Hook B running!") - - -@hook( - match_asset="Generic Sprite", - match_task_layers="ShadingTaskLayer", -) -def test_hook_sprite(asset_collection: bpy.types.Collection, **kwargs) -> None: - print(f"Test Hook Sprite {asset_collection} is running!") diff --git a/scripts-blender/addons/asset_pipeline/docs/production_config_example/task_layers.py b/scripts-blender/addons/asset_pipeline/docs/production_config_example/task_layers.py deleted file mode 100644 index d7a16260..00000000 --- a/scripts-blender/addons/asset_pipeline/docs/production_config_example/task_layers.py +++ /dev/null @@ -1,468 +0,0 @@ -from typing import Any, Dict, List, Set, Union, Optional - -import bpy -from asset_pipeline.api import ( - AssetTransferMapping, - TaskLayer, - BuildContext -) - -""" -The AssetTranfserMapping class represents a mapping between a source and a target. -It contains an object mapping which connects each source object with a target. -The maps are just dictionaries where the key is the source and the value the target. -Both key and target are actual Blender ID Datablocks. -This makes it easy to write Merge Instructions. -With it you can do access things like: - -transfer_mapping.object_map: Dict[bpy.types.Object, bpy.types.Object] -transfer_mapping.collection_map: Dict[bpy.types.Collection, bpy.types.Collection] -transfer_mapping.material_map: Dict[bpy.types.Material, bpy.types.Material] - -For all mappings: -Key: Source -Value: Target - -You can also access the root Asset source and Target Collection: -transfer_mapping.source_coll: bpy.types.Collection -transfer_mapping.target_coll: bpy.types.Collection - -Further than that you can access to objects which had no match. -transfer_mapping.no_match_target_objs: Set[bpy.types.Object] (all objs that exist in target but not in source) -transfer_mapping.no_match_source_objs: Set[bpy.types.Object] (vice versa) - - -Further then that Users can define custom transfer settings by defining a TransferSettings -Class which inherits from a PropertyGroup in the task_layer module. Users can query these settings -by checking the transfer_settings argument. - -transfer_settings.custom_option -""" - -class TransferSettings(bpy.types.PropertyGroup): - imp_mat: bpy.props.BoolProperty(name="Materials", default=True) # type: ignore - imp_uv: bpy.props.BoolProperty(name="UVs", default=True) # type: ignore - imp_vcol: bpy.props.BoolProperty(name="Vertex Colors", default=True) # type: ignore - transfer_type: bpy.props.EnumProperty( # type: ignore - name="Transfer Type", - items=[("VERTEX_ORDER", "Vertex Order", ""), ("PROXIMITY", "Proximity", "")], - ) - -class RiggingTaskLayer(TaskLayer): - name = "Rigging" - order = 0 - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - print(f"Processing data from TaskLayer {cls.__name__}") - -# Not allowed: 2 TaskLayer Classes with the same ClassName (Note: note 'name' attribute) -class ShadingTaskLayer(TaskLayer): - name = "Shading" - order = 2 - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - print(f"Processing data from TaskLayer {cls.__name__}") - - settings = transfer_settings - - for obj_source, obj_target in transfer_mapping.object_map.items(): - - if not obj_target.type in ["MESH", "CURVE"]: - continue - - if obj_target.name.startswith("WGT-"): - while obj_target.material_slots: - obj_target.active_material_index = 0 - bpy.ops.object.material_slot_remove({"object": obj_target}) - continue - - if obj_target.type != obj_source.type: - print(f"Warning: {obj_target.name} of incompatible object type") - continue - - # Transfer material slot assignments. - # Delete all material slots of target object. - while len(obj_target.material_slots) > len(obj_source.material_slots): - obj_target.active_material_index = len(obj_source.material_slots) - bpy.ops.object.material_slot_remove({"object": obj_target}) - - # Transfer material slots - for idx in range(len(obj_source.material_slots)): - if idx >= len(obj_target.material_slots): - bpy.ops.object.material_slot_add({"object": obj_target}) - obj_target.material_slots[idx].link = obj_source.material_slots[ - idx - ].link - obj_target.material_slots[idx].material = obj_source.material_slots[ - idx - ].material - - # Transfer active material slot index - obj_target.active_material_index = obj_source.active_material_index - - # Transfer material slot assignments for curve - if obj_target.type == "CURVE": - for spl_to, spl_from in zip( - obj_target.data.splines, obj_source.data.splines - ): - spl_to.material_index = spl_from.material_index - - # Rest of the loop applies only to meshes. - if obj_target.type != "MESH": - continue - - # Transfer material slot assignments for mesh - for pol_to, pol_from in zip( - obj_target.data.polygons, obj_source.data.polygons - ): - pol_to.material_index = pol_from.material_index - pol_to.use_smooth = pol_from.use_smooth - - # Transfer UV Seams - if settings.imp_uv: - if settings.transfer_type == "VERTEX_ORDER" and len( - obj_source.data.edges - ) == len(obj_target.data.edges): - for edge_from, edge_to in zip( - obj_source.data.edges, obj_target.data.edges - ): - edge_to.use_seam = edge_from.use_seam - else: - bpy.ops.object.data_transfer( - { - "object": obj_source, - "selected_editable_objects": [obj_target], - }, - data_type="SEAM", - edge_mapping="NEAREST", - mix_mode="REPLACE", - ) - - # Transfer UV layers - if settings.imp_uv: - while len(obj_target.data.uv_layers) > 0: - rem = obj_target.data.uv_layers[0] - obj_target.data.uv_layers.remove(rem) - if settings.transfer_type == "VERTEX_ORDER": - for uv_from in obj_source.data.uv_layers: - uv_to = obj_target.data.uv_layers.new( - name=uv_from.name, do_init=False - ) - for loop in obj_target.data.loops: - try: - uv_to.data[loop.index].uv = uv_from.data[loop.index].uv - except: - print( - f"no UVs transferred for {obj_target.name}. Probably mismatching vertex count: {len(obj_source.data.vertices)} vs {len(obj_target.data.vertices)}" - ) - break - elif settings.transfer_type == "PROXIMITY": - bpy.ops.object.data_transfer( - { - "object": obj_source, - "selected_editable_objects": [obj_target], - }, - data_type="UV", - use_create=True, - loop_mapping="NEAREST_POLYNOR", - poly_mapping="NEAREST", - layers_select_src="ALL", - layers_select_dst="NAME", - mix_mode="REPLACE", - ) - # Make sure correct layer is set to active - for uv_l in obj_source.data.uv_layers: - if uv_l.active_render: - obj_target.data.uv_layers[uv_l.name].active_render = True - break - - # Transfer Vertex Colors - if settings.imp_vcol: - while len(obj_target.data.vertex_colors) > 0: - rem = obj_target.data.vertex_colors[0] - obj_target.data.vertex_colors.remove(rem) - if settings.transfer_type == "VERTEX_ORDER": - for vcol_from in obj_source.data.vertex_colors: - vcol_to = obj_target.data.vertex_colors.new( - name=vcol_from.name, do_init=False - ) - for loop in obj_target.data.loops: - try: - vcol_to.data[loop.index].color = vcol_from.data[ - loop.index - ].color - except: - print( - f"no Vertex Colors transferred for {obj_target.name}. Probably mismatching vertex count: {len(obj_source.data.vertices)} vs {len(obj_target.data.vertices)}" - ) - elif settings.transfer_type == "PROXIMITY": - bpy.ops.object.data_transfer( - { - "object": obj_source, - "selected_editable_objects": [obj_target], - }, - data_type="VCOL", - use_create=True, - loop_mapping="NEAREST_POLYNOR", - layers_select_src="ALL", - layers_select_dst="NAME", - mix_mode="REPLACE", - ) - - # Set 'PREVIEW' vertex color layer as active - for idx, vcol in enumerate(obj_target.data.vertex_colors): - if vcol.name == "PREVIEW": - obj_target.data.vertex_colors.active_index = idx - break - - # Set 'Baking' or 'UVMap' uv layer as active - for idx, uvlayer in enumerate(obj_target.data.uv_layers): - if uvlayer.name == "Baking": - obj_target.data.uv_layers.active_index = idx - break - if uvlayer.name == "UVMap": - obj_target.data.uv_layers.active_index = idx - - # Select preview texture as active if found - for mslot in obj_target.material_slots: - if not mslot.material or not mslot.material.node_tree: - continue - for node in mslot.material.node_tree.nodes: - if not node.type == "TEX_IMAGE": - continue - if not node.image: - continue - if "preview" in node.image.name: - mslot.material.node_tree.nodes.active = node - break - - -### Object utilities -def copy_parenting(source_ob: bpy.types.Object, target_ob: bpy.types.Object) -> None: - """Copy parenting data from one object to another.""" - target_ob.parent = source_ob.parent - target_ob.parent_type = source_ob.parent_type - target_ob.parent_bone = source_ob.parent_bone - target_ob.matrix_parent_inverse = source_ob.matrix_parent_inverse.copy() - - -def copy_attributes(a: Any, b: Any) -> None: - keys = dir(a) - for key in keys: - if ( - not key.startswith("_") - and not key.startswith("error_") - and key != "group" - and key != "is_valid" - and key != "rna_type" - and key != "bl_rna" - ): - try: - setattr(b, key, getattr(a, key)) - except AttributeError: - pass - - -def copy_driver( - source_fcurve: bpy.types.FCurve, - target_obj: bpy.types.Object, - data_path: Optional[str] = None, - index: Optional[int] = None, -) -> bpy.types.FCurve: - if not data_path: - data_path = source_fcurve.data_path - - new_fc = None - try: - if index: - new_fc = target_obj.driver_add(data_path, index) - else: - new_fc = target_obj.driver_add(data_path) - except: - print(f"Couldn't copy driver {source_fcurve.data_path} to {target_obj.name}") - return - - copy_attributes(source_fcurve, new_fc) - copy_attributes(source_fcurve.driver, new_fc.driver) - - # Remove default modifiers, variables, etc. - for m in new_fc.modifiers: - new_fc.modifiers.remove(m) - for v in new_fc.driver.variables: - new_fc.driver.variables.remove(v) - - # Copy modifiers - for m1 in source_fcurve.modifiers: - m2 = new_fc.modifiers.new(type=m1.type) - copy_attributes(m1, m2) - - # Copy variables - for v1 in source_fcurve.driver.variables: - v2 = new_fc.driver.variables.new() - copy_attributes(v1, v2) - for i in range(len(v1.targets)): - copy_attributes(v1.targets[i], v2.targets[i]) - - return new_fc - - -def copy_drivers(source_ob: bpy.types.Object, target_ob: bpy.types.Object) -> None: - """Copy all drivers from one object to another.""" - if not hasattr(source_ob, "animation_data") or not source_ob.animation_data: - return - - for fc in source_ob.animation_data.drivers: - copy_driver(fc, target_ob) - - -def copy_rigging_object_data( - source_ob: bpy.types.Object, target_ob: bpy.types.Object -) -> None: - """Copy all object data that could be relevant to rigging.""" - # TODO: Object constraints, if needed. - copy_drivers(source_ob, target_ob) - copy_parenting(source_ob, target_ob) - # HACK: For some reason Armature constraints on grooming objects lose their target when updating? Very strange... - for c in target_ob.constraints: - if c.type == "ARMATURE": - for t in c.targets: - if t.target == None: - t.target = target_ob.parent - - -class GroomingTaskLayer(TaskLayer): - name = "Grooming" - order = 1 - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - - print(f"Processing data from TaskLayer {cls.__name__}") - coll_source = transfer_mapping.source_coll - coll_target = transfer_mapping.target_coll - for obj_source, obj_target in transfer_mapping.object_map.items(): - if not "PARTICLE_SYSTEM" in [mod.type for mod in obj_source.modifiers]: - continue - l = [] - for mod in obj_source.modifiers: - if not mod.type == "PARTICLE_SYSTEM": - l += [mod.show_viewport] - mod.show_viewport = False - - bpy.ops.particle.copy_particle_systems( - {"object": obj_source, "selected_editable_objects": [obj_target]} - ) - - c = 0 - for mod in obj_source.modifiers: - if mod.type == "PARTICLE_SYSTEM": - continue - mod.show_viewport = l[c] - c += 1 - - # TODO: handle cases where collections with exact naming convention cannot be found - try: - coll_from_hair = next( - c for name, c in coll_source.children.items() if ".hair" in name - ) - coll_from_part = next( - c - for name, c in coll_from_hair.children.items() - if ".hair.particles" in name - ) - coll_from_part_proxy = next( - c - for name, c in coll_from_part.children.items() - if ".hair.particles.proxy" in name - ) - except: - print( - "Warning: Could not find existing particle hair collection. Make sure you are following the exact naming and structuring convention!" - ) - return - - # link 'from' hair.particles collection in 'to' - try: - coll_to_hair = next( - c for name, c in coll_target.children.items() if ".hair" in name - ) - except: - coll_target.children.link(coll_from_hair) - return - - coll_to_hair.children.link(coll_from_part) - try: - coll_to_part = next( - c - for name, c in coll_to_hair.children.items() - if ".hair.particles" in name - ) - except: - print( - "Warning: Failed to find particle hair collections in target collection" - ) - coll_to_part.user_clear() - bpy.data.collections.remove(coll_to_part) - return - - # transfer shading - # transfer_dict = map_objects_by_name(coll_to_part, coll_from_part) - # transfer_shading_data(context, transfer_dict) - ShadingTaskLayer.transfer_data(context, transfer_mapping, transfer_settings) - - # transfer modifers - for obj_source, obj_target in transfer_mapping.object_map.items(): - if not "PARTICLE_SYSTEM" in [m.type for m in obj_target.modifiers]: - bpy.ops.object.make_links_data( - {"object": obj_source, "selected_editable_objects": [obj_target]}, - type="MODIFIERS", - ) - - # We want to rig the hair base mesh with an Armature modifier, so transfer vertex groups by proximity. - bpy.ops.object.data_transfer( - {"object": obj_source, "selected_editable_objects": [obj_target]}, - data_type="VGROUP_WEIGHTS", - use_create=True, - vert_mapping="NEAREST", - layers_select_src="ALL", - layers_select_dst="NAME", - mix_mode="REPLACE", - ) - - # We used to want to rig the auto-generated hair particle proxy meshes with Surface Deform, so re-bind those. - # NOTE: Surface Deform probably won't be used for final rigging - for mod in obj_target.modifiers: - if mod.type == "SURFACE_DEFORM" and mod.is_bound: - for i in range(2): - bpy.ops.object.surfacedeform_bind( - {"object": obj_target}, modifier=mod.name - ) - - copy_rigging_object_data(obj_source, obj_target) - # remove 'to' hair.particles collection - coll_to_part.user_clear() - bpy.data.collections.remove(coll_to_part) - - return - - diff --git a/scripts-blender/addons/asset_pipeline/docs/production_config_heist/hooks.py b/scripts-blender/addons/asset_pipeline/docs/production_config_heist/hooks.py deleted file mode 100644 index a8e0ccd0..00000000 --- a/scripts-blender/addons/asset_pipeline/docs/production_config_heist/hooks.py +++ /dev/null @@ -1,110 +0,0 @@ -from typing import Any, Dict, List, Set, Union, Optional -import bpy - -from asset_pipeline.api import hook, Wildcard, DoNotMatch - -""" -Hooks can be matched with the following parameters. -As match input you can use str, list, WildCard, DoNotMatch - -Examples: -- Global Hooks (No match filter): @hook() -- Hooks for an asset type only: @hook(match_asset_type="Character") -- Hooks for a specific asset: @hook(match_asset: "Sprite") -- Hooks for a task layer only @hook(match_task_layers: ["ShadingTaskLayer", "RiggingTaskLayer"] -- Hooks for an asset and a task layer combination: @hook(macth_asset: "Sprite", match_task_layers: "ShadingTaskLayer") -Note: the decorator needs to be executed. - -It is important to note that the asset-pipeline follows a certain order to execute the hooks. And that is exactly the one of the examples hook described above: - -1. Global hooks -2. Asset Type Hooks -3. Task Layer Hooks -4. Asset Hooks -5. Asset + TaskLayer specific Hooks - - -The function itself should always have **\*\*kwargs** as a parameter. The asset-pipeline automatically passes a couple of useful keyword arguments to the function: -- `asset_collection`: bpy.types.Collection -- `context`: bpy.types.Context -- `asset_task`: asset_pipeline.asset_files.AssetTask -- `asset_dir`: asset_pipeline.asset_files.AssetDir - -By exposing these parameters in the hook function you can use them in your code. -""" - -@hook( - match_task_layers="ModelingTaskLayer", -) -def geometry_cleanup(context: bpy.types.Context, asset_collection: bpy.types.Collection, **kwargs) -> None: - for ob in asset_collection.all_objects: - if not ob.data: - continue - if not ob.type == 'MESH': # TODO: Support other object types - continue - # make meshes single user - if ob.data.users > 1: - ob.data = ob.data.copy() - - # check for modifiers to apply - if not [mod for mod in ob.modifiers if mod.name.split('-')[0]=='APL']: - continue - - # remember modifier visibility - mod_vis = [] - for i, mod in enumerate(ob.modifiers): - if mod.name.split('-')[0] != 'APL': - if mod.show_viewport: - mod_vis += [i] - mod.show_viewport = False - - # apply modifiers - depsgraph = context.evaluated_depsgraph_get() - old_mesh = ob.data - ob.data = bpy.data.meshes.new_from_object(ob.evaluated_get(depsgraph)) - ob.data.name = old_mesh.name - bpy.data.meshes.remove(old_mesh) - - for i in mod_vis[::-1]: - ob.modifiers[i].show_viewport = True - for mod in ob.modifiers: - if mod.name.split('-')[0] == 'APL': - ob.modifiers.remove(mod) - - -@hook( - match_task_layers="ShadingTaskLayer", -) -def set_preview_shading(context: bpy.types.Context, asset_collection: bpy.types.Collection, **kwargs) -> None: - for ob in asset_collection.all_objects: - if not ob.data: - continue - if not ob.type == 'MESH': - continue - - # Set 'PREVIEW' vertex color layer as active - for idx, vcol in enumerate(ob.data.vertex_colors): - if vcol.name == "PREVIEW": - ob.data.vertex_colors.active_index = idx - break - - # Set 'Baking' or 'UVMap' uv layer as active - for idx, uvlayer in enumerate(ob.data.uv_layers): - if uvlayer.name == "Baking": - ob.data.uv_layers.active_index = idx - break - if uvlayer.name == "UVMap": - ob.data.uv_layers.active_index = idx - - # Select preview texture as active if found - for mslot in ob.material_slots: - if not mslot.material or not mslot.material.node_tree: - continue - for node in mslot.material.node_tree.nodes: - if not node.type == "TEX_IMAGE": - continue - if not node.image: - continue - if "preview" in node.image.name: - mslot.material.node_tree.nodes.active = node - break diff --git a/scripts-blender/addons/asset_pipeline/docs/production_config_heist/task_layers.py b/scripts-blender/addons/asset_pipeline/docs/production_config_heist/task_layers.py deleted file mode 100644 index cba71bd6..00000000 --- a/scripts-blender/addons/asset_pipeline/docs/production_config_heist/task_layers.py +++ /dev/null @@ -1,838 +0,0 @@ -from typing import Any, Dict, List, Set, Union, Optional - -import bpy -import mathutils -import bmesh -import numpy as np -from asset_pipeline.api import ( - AssetTransferMapping, - TaskLayer, - BuildContext, -) - -class TransferSettings(bpy.types.PropertyGroup): - pass - #imp_mat: bpy.props.BoolProperty(name="Materials", default=True) # type: ignore - #imp_uv: bpy.props.BoolProperty(name="UVs", default=True) # type: ignore - #imp_vcol: bpy.props.BoolProperty(name="Vertex Colors", default=True) # type: ignore - #transfer_type: bpy.props.EnumProperty( # type: ignore - # name="Transfer Type", - # items=[("VERTEX_ORDER", "Vertex Order", ""), ("PROXIMITY", "Proximity", "")], - #) - -class RiggingTaskLayer(TaskLayer): - name = "Rigging" - order = 0 - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - print(f"\n\033[1mProcessing data from {cls.__name__}...\033[0m") - - settings = transfer_settings - - depsgraph = context.evaluated_depsgraph_get() - transfer_mapping.generate_mapping() - - # add prefixes to existing modifiers - for obj_source, obj_target in transfer_mapping.object_map.items(): - prefix_modifiers(obj_target, 0) - - -class ModelingTaskLayer(TaskLayer): - name = "Modeling" - order = 1 - ''' - Only affects objects of the target inside collections ending with '.geometry'. New objects can be created anywhere. - New modifiers are automatically prefixed with 'GEO-'. Any modifier that is given the prefix 'APL-' will be automatically applied after push. - The order of the modifier stack is generally owned by the rigging task layer. Newly created modifiers in the modeling task layer are an exception. - ''' - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - - ) -> None: - print(f"\n\033[1mProcessing data from {cls.__name__}...\033[0m") - - settings = transfer_settings - - depsgraph = context.evaluated_depsgraph_get() - transfer_mapping.generate_mapping() - - # identify geometry collections in source and target - geometry_colls_source = [] - for coll in transfer_mapping.collection_map.keys(): - if coll.name.split('.')[-2] == 'geometry': - geometry_colls_source += [coll] - geometry_objs_source = {ob for coll in geometry_colls_source for ob in list(coll.all_objects)} - - geometry_colls_target = [] - for coll in transfer_mapping.collection_map.keys(): - if coll.name.split('.')[-2] == 'geometry': - geometry_colls_target += [transfer_mapping.collection_map[coll]] - geometry_objs_target = {ob for coll in geometry_colls_target for ob in list(coll.all_objects)} - - # handle new objects - for ob in transfer_mapping.no_match_source_objs: - # link new object to target parent collection - for coll_source in transfer_mapping.collection_map.keys(): - if ob in set(coll_source.objects): - transfer_mapping.collection_map[coll_source].objects.link(ob) - - # (replace object dependencies) - pass - - # handle removed objects - for ob in transfer_mapping.no_match_target_objs: - # delete objects inside the target .geometry collections - if ob in geometry_objs_target: - print(info_text(f"DELETING {ob.name}")) - bpy.data.objects.remove(ob) - - # transfer data between object geometries - for obj_source, obj_target in transfer_mapping.object_map.items(): - if obj_source not in geometry_objs_source: - continue - - # transfer object transformation (world space) - con_vis = [] - for con in obj_target.constraints: - con_vis += [con.enabled] - con.enabled = False - for con in obj_source.constraints: - con.enabled = False - depsgraph = context.evaluated_depsgraph_get() - - obj_target.matrix_world = obj_source.matrix_world - for con, vis in zip(obj_target.constraints, con_vis): - con.enabled = vis - - # TODO: support object type change - if obj_source.type != obj_target.type: - print(warning_text(f"Mismatching object type. Skipping {obj_target.name}.")) - continue - - # check for topology match (vertex, edge, loop count) (mesh, curve separately) - topo_match = match_topology(obj_source, obj_target) - if topo_match is None: # TODO: support geometry types other than mesh or curve - continue - - # if topology matches: transfer position attribute (keeping shapekeys intact) - if topo_match: - if obj_target.type == 'MESH': - if len(obj_target.data.vertices)==0: - print(warning_text(f"Mesh object '{obj_target.name}' has empty object data")) - continue - offset = [obj_source.data.vertices[i].co - obj_target.data.vertices[i].co for i in range(len(obj_source.data.vertices))] - - offset_sum = 0 - for x in offset: - offset_sum += x.length - offset_avg = offset_sum/len(offset) - if offset_avg>0.1: - print(warning_text(f"Average Vertex offset is {offset_avg} for {obj_target.name}")) - - for i, vec in enumerate(offset): - obj_target.data.vertices[i].co += vec - - # update shapekeys - if obj_target.data.shape_keys: - for key in obj_target.data.shape_keys.key_blocks: - for i, point in enumerate([dat.co for dat in key.data]): - key.data[i].co = point + offset[i] - elif obj_target.type == 'CURVE': # TODO: proper geometry transfer for curves - obj_target.data = obj_source.data - else: - pass - - # if topology does not match replace geometry (throw warning) -> TODO: handle data transfer onto mesh for simple cases (trivial topological changes: e.g. added separate mesh island, added span) - else: - # replace the object data and do proximity transfer of all rigging data - - # generate new transfer source object from mesh data - obj_target_original = bpy.data.objects.new(f"{obj_target.name}.original", obj_target.data) - if obj_target.data.shape_keys: - sk_original = obj_target.data.shape_keys.copy() - else: sk_original = None - context.scene.collection.objects.link(obj_target_original) - - print(warning_text(f"Topology Mismatch! Replacing object data and transferring with potential data loss on '{obj_target.name}'")) - obj_target.data = obj_source.data - - # transfer weights - bpy.ops.object.data_transfer( - { - "object": obj_target_original, - "active_object": obj_target_original, - "selected_editable_objects": [obj_target], - }, - data_type="VGROUP_WEIGHTS", - use_create=True, - vert_mapping='POLYINTERP_NEAREST', - layers_select_src="ALL", - layers_select_dst="NAME", - mix_mode="REPLACE", - ) - - # transfer shapekeys - transfer_shapekeys_proximity(obj_target_original, obj_target) - - # transfer drivers - copy_drivers(sk_original, obj_target.data.shape_keys) - - del sk_original - bpy.data.objects.remove(obj_target_original) - - # sync modifier stack (those without prefix on the source are added and prefixed, those with matching/other prefix are synced/ignored based on their prefix) - # add prefix to existing modifiers - prefix_modifiers(obj_source, 1) - # remove old and sync existing modifiers TODO: Stack position and parameters - for mod in obj_target.modifiers: - if mod.name.split('-')[0] not in ['GEO', 'APL']: - continue - if mod.name not in [m.name for m in obj_source.modifiers]: - print(info_text(f"Removing modifier {mod.name}")) - obj_target.modifiers.remove(mod) - - # transfer new modifiers - for i, mod in enumerate(obj_source.modifiers): - if mod.name.split('-')[0] not in ['GEO', 'APL']: - continue - if mod.name in [m.name for m in obj_target.modifiers]: - continue - mod_new = obj_target.modifiers.new(mod.name, mod.type) - # sort new modifier at correct index (default to beginning of the stack) - idx = 0 - if i>0: - name_prev = obj_source.modifiers[i-1].name - for target_mod_i, target_mod in enumerate(obj_target.modifiers): - if target_mod.name == name_prev: - idx = target_mod_i+1 - bpy.ops.object.modifier_move_to_index({'object': obj_target}, modifier=mod_new.name, index=idx) - - # sync modifier settings - for i, mod_source in enumerate(obj_source.modifiers): - mod_target = obj_target.modifiers.get(mod_source.name) - if not mod_target: - continue - if mod_source.name.split('-')[0] not in ['GEO', 'APL']: - continue - for prop in [p.identifier for p in mod_source.bl_rna.properties if not p.is_readonly]: - value = getattr(mod_source, prop) - if type(value) == bpy.types.Object and value in transfer_mapping.object_map: - # If a modifier is referencing a .TASK object, - # remap that reference to a .TARGET object. - # (Eg. modeling Mirror modifier with a mirror object) - value = transfer_mapping.object_map[value] - setattr(mod_target, prop, value) - - # rebind modifiers (corr. smooth, surf. deform, mesh deform) - for mod in obj_target.modifiers: - if mod.type == 'SURFACE_DEFORM': - if not mod.is_bound: - continue - for i in range(2): - bpy.ops.object.surfacedeform_bind({"object": obj_target,"active_object": obj_target}, modifier=mod.name) - elif mod.type == 'MESH_DEFORM': - if not mod.is_bound: - continue - for i in range(2): - bpy.ops.object.meshdeform_bind({"object": obj_target,"active_object": obj_target}, modifier=mod.name) - elif mod.type == 'CORRECTIVE_SMOOTH': - if not mod.is_bind: - continue - for i in range(2): - bpy.ops.object.correctivesmooth_bind({"object": obj_target,"active_object": obj_target}, modifier=mod.name) - - - # restore multiusers - if not (build_context.is_push or type(cls) in build_context.asset_context.task_layer_assembly.get_used_task_layers()): - meshes_dict = dict() - for ob in transfer_mapping.object_map.keys(): - if not ob.data: - continue - if ob.type not in ['MESH', 'CURVE']: - continue - if ob.data not in meshes_dict.keys(): - meshes_dict[ob.data] = [ob] - else: - meshes_dict[ob.data] += [ob] - for mesh, objects in meshes_dict.items(): - main_mesh = transfer_mapping.object_map[objects[0]].data - for ob in objects: - transfer_mapping.object_map[ob].data = main_mesh - -def prefix_modifiers(obj: bpy.types.Object, idx: int, delimiter = '-') -> None: - prefixes = ['RIG', 'GEO', 'APL'] - for mod in obj.modifiers: - if not mod.name.split(delimiter)[0] in prefixes: - mod.name = f'{prefixes[idx]}{delimiter}{mod.name}' - -# Not allowed: 2 TaskLayer Classes with the same ClassName (Note: note 'name' attribute) -class ShadingTaskLayer(TaskLayer): - name = "Shading" - order = 3 - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - print(f"\n\033[1mProcessing data from {cls.__name__}...\033[0m") - - settings = transfer_settings - - depsgraph = context.evaluated_depsgraph_get() - transfer_mapping.generate_mapping() - - for obj_source, obj_target in transfer_mapping.object_map.items(): - - if not obj_target.type in ["MESH", "CURVE"]: - continue - - if obj_target.name.startswith("WGT-"): - while obj_target.material_slots: - obj_target.active_material_index = 0 - bpy.ops.object.material_slot_remove({"object": obj_target}) - continue - - # TODO: support object type change - if obj_source.type != obj_target.type: - print(warning_text(f"Mismatching object type. Skipping {obj_target.name}.")) - continue - - # Transfer material slot assignments. - # Delete all material slots of target object. - while len(obj_target.material_slots) > len(obj_source.material_slots): - obj_target.active_material_index = len(obj_source.material_slots) - bpy.ops.object.material_slot_remove({"object": obj_target}) - - # Transfer material slots - for idx in range(len(obj_source.material_slots)): - if idx >= len(obj_target.material_slots): - bpy.ops.object.material_slot_add({"object": obj_target}) - obj_target.material_slots[idx].link = obj_source.material_slots[idx].link - obj_target.material_slots[idx].material = obj_source.material_slots[idx].material - - # Transfer active material slot index - obj_target.active_material_index = obj_source.active_material_index - - # Transfer material slot assignments for curve - if obj_target.type == "CURVE": - if len(obj_target.data.splines)==0: - print(warning_text(f"Curve object '{obj_target.name}' has empty object data")) - continue - for spl_to, spl_from in zip(obj_target.data.splines, obj_source.data.splines): - spl_to.material_index = spl_from.material_index - - # Rest of the loop applies only to meshes. - if obj_target.type != "MESH": - continue - - if len(obj_target.data.vertices)==0: - print(warning_text(f"Mesh object '{obj_target.name}' has empty object data")) - continue - - topo_match = match_topology(obj_source, obj_target) - if not topo_match: # TODO: Support trivial topology changes in more solid way than proximity transfer - print(warning_text(f"Mismatch in topology, falling back to proximity transfer. (Object '{obj_target.name}')")) - - # generate new transfer source object from mesh data - obj_source_original = bpy.data.objects.new(f"{obj_source.name}.original", obj_source.data) - context.scene.collection.objects.link(obj_source_original) - - # Transfer face data - if topo_match: - for pol_to, pol_from in zip(obj_target.data.polygons, obj_source.data.polygons): - pol_to.material_index = pol_from.material_index - pol_to.use_smooth = pol_from.use_smooth - else: - obj_source_eval = obj_source.evaluated_get(depsgraph) - for pol_target in obj_target.data.polygons: - (hit, loc, norm, face_index) = obj_source_eval.closest_point_on_mesh(pol_target.center) - pol_source = obj_source_eval.data.polygons[face_index] - pol_target.material_index = pol_source.material_index - pol_target.use_smooth = pol_source.use_smooth - - # Transfer UV Seams - if topo_match: - for edge_from, edge_to in zip(obj_source.data.edges, obj_target.data.edges): - edge_to.use_seam = edge_from.use_seam - else: - bpy.ops.object.data_transfer( - { - "object": obj_source_original, - "active_object": obj_source_original, - "selected_editable_objects": [obj_target], - }, - data_type="SEAM", - edge_mapping="NEAREST", - mix_mode="REPLACE", - ) - - # Transfer UV layers - while len(obj_target.data.uv_layers) > 0: - rem = obj_target.data.uv_layers[0] - obj_target.data.uv_layers.remove(rem) - if topo_match: - for uv_from in obj_source.data.uv_layers: - uv_to = obj_target.data.uv_layers.new(name=uv_from.name, do_init=False) - for loop in obj_target.data.loops: - uv_to.data[loop.index].uv = uv_from.data[loop.index].uv - else: - for uv_from in obj_source.data.uv_layers: - uv_to = obj_target.data.uv_layers.new(name=uv_from.name, do_init=False) - transfer_corner_data(obj_source, obj_target, uv_from.data, uv_to.data, data_suffix = 'uv') - - # Make sure correct layer is set to active - for uv_l in obj_source.data.uv_layers: - if uv_l.active_render: - obj_target.data.uv_layers[uv_l.name].active_render = True - break - - # Transfer Vertex Colors - while len(obj_target.data.vertex_colors) > 0: - rem = obj_target.data.vertex_colors[0] - obj_target.data.vertex_colors.remove(rem) - if topo_match: - for vcol_from in obj_source.data.vertex_colors: - vcol_to = obj_target.data.vertex_colors.new(name=vcol_from.name, do_init=False) - for loop in obj_target.data.loops: - vcol_to.data[loop.index].color = vcol_from.data[loop.index].color - else: - for vcol_from in obj_source.data.vertex_colors: - vcol_to = obj_target.data.vertex_colors.new(name=vcol_from.name, do_init=False) - transfer_corner_data(obj_source, obj_target, vcol_from.data, vcol_to.data, data_suffix = 'color') - bpy.data.objects.remove(obj_source_original) - - -### Utilities - -def info_text(text: str) -> str: - return f"\t\033[1mInfo\033[0m\t: "+text - -def warning_text(text: str) -> str: - return f"\t\033[1m\033[93mWarning\033[0m\t: "+text - -def error_text(text: str) -> str: - return f"\t\033[1m\033[91mError\033[0m\t: "+text - -def match_topology(a: bpy.types.Object, b: bpy.types.Object) -> bool: - """Checks if two objects have matching topology (efficiency over exactness)""" - if a.type != b.type: - return False - if a.type == 'MESH': - if len(a.data.vertices) != len(b.data.vertices): - return False - if len(a.data.edges) != len(b.data.edges): - return False - if len(a.data.polygons) != len(b.data.polygons): - return False - for e1, e2 in zip(a.data.edges, b.data.edges): - for v1, v2 in zip(e1.vertices, e2.vertices): - if v1 != v2: - return False - return True - elif a.type == 'CURVE': - if len(a.data.splines) != len(b.data.splines): - return False - for spline1, spline2 in zip(a.data.splines, b.data.splines): - if len(spline1.points) != len(spline2.points): - return False - return True - return None - -def copy_parenting(source_ob: bpy.types.Object, target_ob: bpy.types.Object) -> None: - """Copy parenting data from one object to another.""" - target_ob.parent = source_ob.parent - target_ob.parent_type = source_ob.parent_type - target_ob.parent_bone = source_ob.parent_bone - target_ob.matrix_parent_inverse = source_ob.matrix_parent_inverse.copy() - - -def copy_attributes(a: Any, b: Any) -> None: - keys = dir(a) - for key in keys: - if ( - not key.startswith("_") - and not key.startswith("error_") - and key != "group" - and key != "is_valid" - and key != "rna_type" - and key != "bl_rna" - ): - try: - setattr(b, key, getattr(a, key)) - except AttributeError: - pass - - -def copy_driver( - source_fcurve: bpy.types.FCurve, - target_obj: bpy.types.Object, - data_path: Optional[str] = None, - index: Optional[int] = None, -) -> bpy.types.FCurve: - if not data_path: - data_path = source_fcurve.data_path - - new_fc = None - try: - if index: - new_fc = target_obj.driver_add(data_path, index) - else: - new_fc = target_obj.driver_add(data_path) - except: - print(warning_text(f"Couldn't copy driver {source_fcurve.data_path} to {target_obj.name}")) - return - - copy_attributes(source_fcurve, new_fc) - copy_attributes(source_fcurve.driver, new_fc.driver) - - # Remove default modifiers, variables, etc. - for m in new_fc.modifiers: - new_fc.modifiers.remove(m) - for v in new_fc.driver.variables: - new_fc.driver.variables.remove(v) - - # Copy modifiers - for m1 in source_fcurve.modifiers: - m2 = new_fc.modifiers.new(type=m1.type) - copy_attributes(m1, m2) - - # Copy variables - for v1 in source_fcurve.driver.variables: - v2 = new_fc.driver.variables.new() - copy_attributes(v1, v2) - for i in range(len(v1.targets)): - copy_attributes(v1.targets[i], v2.targets[i]) - - return new_fc - - -def copy_drivers(source_ob: bpy.types.Object, target_ob: bpy.types.Object) -> None: - """Copy all drivers from one object to another.""" - if not hasattr(source_ob, "animation_data") or not source_ob.animation_data: - return - - for fc in source_ob.animation_data.drivers: - copy_driver(fc, target_ob) - - -def copy_rigging_object_data( - source_ob: bpy.types.Object, target_ob: bpy.types.Object -) -> None: - """Copy all object data that could be relevant to rigging.""" - # TODO: Object constraints, if needed. - copy_drivers(source_ob, target_ob) - copy_parenting(source_ob, target_ob) - # HACK: For some reason Armature constraints on grooming objects lose their target when updating? Very strange... - for c in target_ob.constraints: - if c.type == "ARMATURE": - for t in c.targets: - if t.target == None: - t.target = target_ob.parent - -# mesh interpolation utilities -def edge_data_split(edge, data_layer, data_suffix: str): - for vert in edge.verts: - vals = [] - for loop in vert.link_loops: - loops_edge_vert = set([loop for f in edge.link_faces for loop in f.loops]) - if loop not in loops_edge_vert: - continue - dat = data_layer[loop.index] - element = list(getattr(dat,data_suffix)) - if not vals: - vals.append(element) - elif not vals[0] == element: - vals.append(element) - if len(vals) > 1: - return True - return False - -def closest_edge_on_face_to_line(face, p1, p2, skip_edges=None): - ''' Returns edge of a face which is closest to line.''' - for edge in face.edges: - if skip_edges: - if edge in skip_edges: - continue - res = mathutils.geometry.intersect_line_line(p1, p2, *[edge.verts[i].co for i in range(2)]) - if not res: - continue - (p_traversal, p_edge) = res - frac_1 = (edge.verts[1].co-edge.verts[0].co).dot(p_edge-edge.verts[0].co)/(edge.verts[1].co-edge.verts[0].co).length**2. - frac_2 = (p2-p1).dot(p_traversal-p1)/(p2-p1).length**2. - if (frac_1 >= 0 and frac_1 <= 1) and (frac_2 >= 0 and frac_2 <= 1): - return edge - return None - -def interpolate_data_from_face(bm_source, tris_dict, face, p, data_layer_source, data_suffix = ''): - ''' Returns interpolated value of a data layer within a face closest to a point.''' - - (tri, point) = closest_tri_on_face(tris_dict, face, p) - if not tri: - return None - weights = mathutils.interpolate.poly_3d_calc([tri[i].vert.co for i in range(3)], point) - - if not data_suffix: - cols_weighted = [weights[i]*np.array(data_layer_source[tri[i].index]) for i in range(3)] - col = sum(np.array(cols_weighted)) - else: - cols_weighted = [weights[i]*np.array(getattr(data_layer_source[tri[i].index], data_suffix)) for i in range(3)] - col = sum(np.array(cols_weighted)) - return col - -def closest_face_to_point(bm_source, p_target, bvh_tree = None): - if not bvh_tree: - bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) - (loc, norm, index, distance) = bvh_tree.find_nearest(p_target) - return bm_source.faces[index] - -def tris_per_face(bm_source): - tris_source = bm_source.calc_loop_triangles() - tris_dict = dict() - for face in bm_source.faces: - tris_face = [] - for i in range(len(tris_source))[::-1]: - if tris_source[i][0] in face.loops: - tris_face.append(tris_source.pop(i)) - tris_dict[face] = tris_face - return tris_dict - -def closest_tri_on_face(tris_dict, face, p): - points = [] - dist = [] - tris = [] - for tri in tris_dict[face]: - point = mathutils.geometry.closest_point_on_tri(p, *[tri[i].vert.co for i in range(3)]) - tris.append(tri) - points.append(point) - dist.append((point-p).length) - min_idx = np.argmin(np.array(dist)) - point = points[min_idx] - tri = tris[min_idx] - return (tri, point) - -def transfer_corner_data(obj_source, obj_target, data_layer_source, data_layer_target, data_suffix = ''): - ''' - Transfers interpolated face corner data from data layer of a source object to data layer of a - target object, while approximately preserving data seams (e.g. necessary for UV Maps). - The transfer is face interpolated per target corner within the source face that is closest - to the target corner point and does not have any data seams on the way back to the - source face that is closest to the target face's center. - ''' - bm_source = bmesh.new() - bm_source.from_mesh(obj_source.data) - bm_source.faces.ensure_lookup_table() - bm_target = bmesh.new() - bm_target.from_mesh(obj_target.data) - bm_target.faces.ensure_lookup_table() - - bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) - - tris_dict = tris_per_face(bm_source) - - for face_target in bm_target.faces: - face_target_center = face_target.calc_center_median() - - face_source = closest_face_to_point(bm_source, face_target_center, bvh_tree) - - for corner_target in face_target.loops: - #find nearest face on target compared to face that loop belongs to - p = corner_target.vert.co - - face_source_closest = closest_face_to_point(bm_source, p, bvh_tree) - enclosed = face_source_closest is face_source - face_source_int = face_source - if not enclosed: - # traverse faces between point and face center - traversed_faces = set() - traversed_edges = set() - while(face_source_int is not face_source_closest): - traversed_faces.add(face_source_int) - edge = closest_edge_on_face_to_line(face_source_int, face_target_center, p, skip_edges = traversed_edges) - if edge == None: - break - if len(edge.link_faces)!=2: - break - traversed_edges.add(edge) - - split = edge_data_split(edge, data_layer_source, data_suffix) - if split: - break - - # set new source face to other face belonging to edge - face_source_int = edge.link_faces[1] if edge.link_faces[1] is not face_source_int else edge.link_faces[0] - - # avoid looping behaviour - if face_source_int in traversed_faces: - face_source_int = face_source - break - - # interpolate data from selected face - col = interpolate_data_from_face(bm_source, tris_dict, face_source_int, p, data_layer_source, data_suffix) - if col is None: - continue - if not data_suffix: - data_layer_target.data[corner_target.index] = col - else: - setattr(data_layer_target[corner_target.index], data_suffix, list(col)) - return - -def transfer_shapekeys_proximity(obj_source, obj_target) -> None: - ''' - Transfers shapekeys from one object to another - based on the mesh proximity with face interpolation. - ''' - # copy shapekey layout - if not obj_source.data.shape_keys: - return - for sk_source in obj_source.data.shape_keys.key_blocks: - if obj_target.data.shape_keys: - sk_target = obj_target.data.shape_keys.key_blocks.get(sk_source.name) - if sk_target: - continue - sk_target = obj_target.shape_key_add() - sk_target.name = sk_source.name - for sk_target in obj_target.data.shape_keys.key_blocks: - sk_source = obj_source.data.shape_keys.key_blocks[sk_target.name] - sk_target.vertex_group = sk_source.vertex_group - sk_target.relative_key = obj_target.data.shape_keys.key_blocks[sk_source.relative_key.name] - - bm_source = bmesh.new() - bm_source.from_mesh(obj_source.data) - bm_source.faces.ensure_lookup_table() - - bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) - - tris_dict = tris_per_face(bm_source) - - for i, vert in enumerate(obj_target.data.vertices): - p = vert.co - face = closest_face_to_point(bm_source, p, bvh_tree) - - (tri, point) = closest_tri_on_face(tris_dict, face, p) - if not tri: - continue - weights = mathutils.interpolate.poly_3d_calc([tri[i].vert.co for i in range(3)], point) - - for sk_target in obj_target.data.shape_keys.key_blocks: - sk_source = obj_source.data.shape_keys.key_blocks.get(sk_target.name) - - vals_weighted = [weights[i]*(sk_source.data[tri[i].vert.index].co-obj_source.data.vertices[tri[i].vert.index].co) for i in range(3)] - val = mathutils.Vector(sum(np.array(vals_weighted))) - sk_target.data[i].co = vert.co+val - -class GroomingTaskLayer(TaskLayer): - name = "Grooming" - order = 2 - - @classmethod - def transfer_data( - cls, - context: bpy.types.Context, - build_context: BuildContext, - transfer_mapping: AssetTransferMapping, - transfer_settings: bpy.types.PropertyGroup, - ) -> None: - - print(f"\n\033[1mProcessing data from {cls.__name__}...\033[0m") - coll_source = transfer_mapping.source_coll - coll_target = transfer_mapping.target_coll - for obj_source, obj_target in transfer_mapping.object_map.items(): - if not "PARTICLE_SYSTEM" in [mod.type for mod in obj_source.modifiers]: - continue - l = [] - for mod in obj_source.modifiers: - if not mod.type == "PARTICLE_SYSTEM": - l += [mod.show_viewport] - mod.show_viewport = False - - bpy.ops.particle.copy_particle_systems( - {"object": obj_source, "selected_editable_objects": [obj_target]} - ) - - c = 0 - for mod in obj_source.modifiers: - if mod.type == "PARTICLE_SYSTEM": - continue - mod.show_viewport = l[c] - c += 1 - - # TODO: handle cases where collections with exact naming convention cannot be found - try: - coll_from_hair = next(c for name, c in coll_source.children.items() if ".hair" in name) - coll_from_part = next(c for name, c in coll_from_hair.children.items() if ".hair.particles" in name) - coll_from_part_proxy = next(c for name, c in coll_from_part.children.items() if ".hair.particles.proxy" in name) - except: - print(warning_text(f"Could not find existing particle hair collection. Make sure you are following the exact naming and structuring convention!")) - return - - # link 'from' hair.particles collection in 'to' - try: - coll_to_hair = next(c for name, c in coll_target.children.items() if ".hair" in name) - except: - coll_target.children.link(coll_from_hair) - return - - coll_to_hair.children.link(coll_from_part) - try: - coll_to_part = next(c for name, c in coll_to_hair.children.items() if ".hair.particles" in name) - except: - print(warning_text(f"Failed to find particle hair collections in target collection")) - coll_to_part.user_clear() - bpy.data.collections.remove(coll_to_part) - return - - # transfer shading - # transfer_dict = map_objects_by_name(coll_to_part, coll_from_part) - # transfer_shading_data(context, transfer_dict) - ShadingTaskLayer.transfer_data(context, transfer_mapping, transfer_settings) - - # transfer modifers - for obj_source, obj_target in transfer_mapping.object_map.items(): - if not "PARTICLE_SYSTEM" in [m.type for m in obj_target.modifiers]: - bpy.ops.object.make_links_data( - {"object": obj_source, "selected_editable_objects": [obj_target]}, - type="MODIFIERS", - ) - - # We want to rig the hair base mesh with an Armature modifier, so transfer vertex groups by proximity. - bpy.ops.object.data_transfer( - {"object": obj_source, "selected_editable_objects": [obj_target]}, - data_type="VGROUP_WEIGHTS", - use_create=True, - vert_mapping="NEAREST", - layers_select_src="ALL", - layers_select_dst="NAME", - mix_mode="REPLACE", - ) - - # We used to want to rig the auto-generated hair particle proxy meshes with Surface Deform, so re-bind those. - # NOTE: Surface Deform probably won't be used for final rigging - for mod in obj_target.modifiers: - if mod.type == "SURFACE_DEFORM" and mod.is_bound: - for i in range(2): - bpy.ops.object.surfacedeform_bind( - {"object": obj_target}, modifier=mod.name - ) - - copy_rigging_object_data(obj_source, obj_target) - # remove 'to' hair.particles collection - coll_to_part.user_clear() - bpy.data.collections.remove(coll_to_part) - return diff --git a/scripts-blender/addons/asset_pipeline/images.py b/scripts-blender/addons/asset_pipeline/images.py new file mode 100644 index 00000000..2b9b1c6c --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/images.py @@ -0,0 +1,14 @@ +import bpy +from pathlib import Path +from .prefs import get_addon_prefs + + +def save_images(): + prefs = get_addon_prefs() + user_path = Path(prefs.save_images_path) + default_path = Path(bpy.data.filepath).parent.joinpath("images") + save_path = default_path if prefs.save_images_path == "" else user_path + for img in bpy.data.images: + if img.is_dirty: + filepath = save_path.joinpath(img.name).__str__() + ".png" + img.save(filepath=filepath) diff --git a/scripts-blender/addons/asset_pipeline/lib_util.py b/scripts-blender/addons/asset_pipeline/lib_util.py deleted file mode 100644 index 87e3e1f8..00000000 --- a/scripts-blender/addons/asset_pipeline/lib_util.py +++ /dev/null @@ -1,74 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import os -import logging -from typing import Optional, Any, Set, Tuple, List, Union -from pathlib import Path - -import bpy - - -class ItemIsLocal(Exception): - pass - - -def is_item_local( - item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera] -) -> bool: - # Local collection of blend file. - if not item.override_library and not item.library: - return True - return False - - -def is_item_lib_override( - item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera] -) -> bool: - # Collection from libfile and overwritten. - if item.override_library and not item.library: - return True - return False - - -def is_item_lib_source( - item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera] -) -> bool: - # Source collection from libfile not overwritten. - if not item.override_library and item.library: - return True - return False - - -def get_item_lib( - item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera] -) -> bpy.types.Library: - if is_item_local(item): - # Local collection - raise ItemIsLocal(f"{item} is local to this blend file. Cannot get lib.") - - if is_item_lib_source(item): - # Source collection not overwritten. - return item.library - - if is_item_lib_override(item): - # Overwritten collection. - return item.override_library.reference.library - - raise RuntimeError(f"Failed to get libfile for {item}") diff --git a/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py new file mode 100644 index 00000000..d794554e --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/asset_mapping.py @@ -0,0 +1,291 @@ +import bpy +from typing import Dict, Set +from .naming import ( + merge_get_target_name, + task_layer_prefix_basename_get, +) +from .util import get_storage_of_id +from .transfer_data.transfer_util import transfer_data_add_entry +from .shared_ids import get_shared_ids +from .. import constants + + +class AssetTransferMapping: + """ + The AssetTranfserMapping class represents a mapping between a source and a target. + It contains an object mapping which connects each source object with a target + object as well as a collection mapping. + The mapping process relies heavily on suffixes, which is why we use + MergeCollections as input that store a suffix. + + Instances of this class will be pased TaskLayer data transfer function so Users + can easily write their merge instructions. + """ + + def __init__( + self, + local_coll: bpy.types.Collection, + external_coll: bpy.types.Collection, + local_tls: Set[str], + ): + self._local_top_col = local_coll + self._external_col = external_coll + self._local_tls = local_tls + + self.external_col_to_remove: Set[bpy.types.Object] = set() + self.external_col_to_add: Set[bpy.types.Object] = set() + self.external_obj_to_add: Set[bpy.types.Object] = set() + self.surrendered_obj_to_remove: Set[bpy.types.Object] = set() + self._no_match_source_objs: Set[bpy.types.Object] = set() + + self._no_match_source_colls: Set[bpy.types.Object] = set() + self._no_match_target_colls: Set[bpy.types.Object] = set() + + self.conflict_ids: list[bpy.types.ID] = [] + self.conflict_transfer_data = [] # Item of bpy.types.CollectionProperty + + self.generate_mapping() + + def generate_mapping(self) -> None: + self.object_map = self._gen_object_map() + self.collection_map = self._gen_collection_map() + self.transfer_data_map = self._gen_transfer_data_map() + self.shared_id_map = self._gen_shared_id_map() + + def _get_external_object(self, local_obj): + external_obj_name = merge_get_target_name( + local_obj.name, + ) + external_obj = self._external_col.all_objects.get(external_obj_name) + if not external_obj: + print(f"Failed to find match obj {external_obj_name} for {local_obj.name}") + self._no_match_source_objs.add(local_obj) + return + return external_obj + + def _check_id_conflict(self, external_id, local_id): + if external_id.asset_id_owner != local_id.asset_id_owner and ( + local_id.asset_id_surrender == external_id.asset_id_owner + ): + self.conflict_ids.append(local_id) + + def _gen_object_map(self) -> Dict[bpy.types.Object, bpy.types.Object]: + """ + Tries to link all objects in source collection to an object in + target collection. Uses suffixes to match them up. + """ + object_map: Dict[bpy.types.Object, bpy.types.Object] = {} + for local_obj in self._local_top_col.all_objects: + # Skip items with no owner + if local_obj.asset_id_owner == "NONE": + continue + external_obj = self._get_external_object(local_obj) + if not external_obj: + print(f"Couldn't find external obj for {local_obj}") + continue + self._check_id_conflict(external_obj, local_obj) + # IF ITEM IS OWNED BY LOCAL TASK LAYERS + + if ( + external_obj.asset_id_surrender + and not local_obj.asset_id_surrender + and local_obj.asset_id_owner != external_obj.asset_id_owner + ): + print(f"Skipping {external_obj} is surrendered") + object_map[external_obj] = local_obj + continue + + if ( + local_obj.asset_id_surrender + and not external_obj.asset_id_surrender + and local_obj.asset_id_owner != external_obj.asset_id_owner + ): + print(f"Skipping {local_obj} is surrendered") + object_map[local_obj] = external_obj + continue + + if local_obj.asset_id_owner in self._local_tls: + object_map[external_obj] = local_obj + # IF ITEM IS NOT OWNED BY LOCAL TASK LAYERS + else: + object_map[local_obj] = external_obj + + # Find new objects to add to local_col + for external_obj in self._external_col.all_objects: + local_col_objs = self._local_top_col.all_objects + obj = local_col_objs.get(merge_get_target_name(external_obj.name)) + if not obj and external_obj.asset_id_owner not in self._local_tls: + self.external_obj_to_add.add(external_obj) + return object_map + + def _gen_collection_map(self) -> Dict[bpy.types.Collection, bpy.types.Collection]: + """ + Tries to link all source collections to a target collection. + Uses suffixes to match them up. + """ + coll_map: Dict[bpy.types.Collection, bpy.types.Collection] = {} + + for local_task_layer_col in self._local_top_col.children: + if local_task_layer_col.asset_id_owner not in self._local_tls: + # Replace source object suffix with target suffix to get target object. + external_col_name = merge_get_target_name(local_task_layer_col.name) + local_col = bpy.data.collections.get(external_col_name) + if local_col: + coll_map[local_task_layer_col] = local_col + else: + print( + f"Failed to find match collection {local_task_layer_col.name} for {external_col_name}" + ) + self._no_match_source_colls.add(local_task_layer_col) + + external_top_col_name = merge_get_target_name(self._local_top_col.name) + external_top_col = bpy.data.collections.get(external_top_col_name) + + # TODO Refactor + for external_col in external_top_col.children: + local_col_name = merge_get_target_name(external_col.name) + local_col = bpy.data.collections.get(local_col_name) + if not local_col and external_col.asset_id_owner not in self._local_tls: + self.external_col_to_add.add(external_col) + + for local_col in self._local_top_col.children: + external_col_name = merge_get_target_name(local_col.name) + external_col = bpy.data.collections.get(external_col_name) + if not external_col and local_col.asset_id_owner not in self._local_tls: + self.external_col_to_remove.add(local_col) + + all_tgt_colls = set(self._external_col.children_recursive) + all_tgt_colls.add(self._external_col) + match_target_colls = set([coll for coll in coll_map.values()]) + self._no_match_target_colls = all_tgt_colls - match_target_colls + + return coll_map + + def _get_transfer_data_map_item(self, obj, target_obj, transfer_data_item): + temp_transfer_data = bpy.context.scene.asset_pipeline.temp_transfer_data + temp_transfer_data_item_index = len(temp_transfer_data) + temp_transfer_data_item = transfer_data_add_entry( + transfer_data=temp_transfer_data, + name=transfer_data_item.name, + td_type_key=transfer_data_item.type, + task_layer_name=transfer_data_item.owner, + surrender=transfer_data_item.surrender, + ) + + map_item = { + 'transfer_data_item_index': temp_transfer_data_item_index, + 'source_obj': obj, + 'target_obj': target_obj, + } + # Names of each map item need to be unique + # below name avoids name conflicts between different types + name = transfer_data_item.name + '_' + transfer_data_item.type + obj.name + return name, map_item + + def _check_transfer_data_conflict(self, obj, transfer_data_item): + other_obj = bpy.data.objects.get(merge_get_target_name(obj.name)) + check_transfer_data_item = None + if not other_obj: + return + for other_transfer_data_item in other_obj.transfer_data_ownership: + if other_transfer_data_item.type == transfer_data_item.type and ( + task_layer_prefix_basename_get(other_transfer_data_item.name) + == task_layer_prefix_basename_get(transfer_data_item.name) + ): + check_transfer_data_item = other_transfer_data_item + if check_transfer_data_item is None: + return + if check_transfer_data_item.owner != transfer_data_item.owner and not ( + check_transfer_data_item.surrender or transfer_data_item.surrender + ): + # In the case where we have a modifier where I changed my local task layer + # this should not return as a conflict + # Do this only if both task layers are local to the current file + if ( + check_transfer_data_item.owner in self._local_tls + and transfer_data_item.owner in self._local_tls + ): + return + self.conflict_transfer_data.append(transfer_data_item) + print("CONFLICT FOUND") + return True + + def transfer_data_get_other(self, transfer_data_item): + # THIS IS FOR WHEN SURRENDERED DATA HAS BEEN REPLACED + obj = transfer_data_item.id_data + other_obj = bpy.data.objects.get(merge_get_target_name(obj.name)) + # Find Related Transferable Data Item on Target/Source Object + for other_obj_transfer_data_item in other_obj.transfer_data_ownership: + if task_layer_prefix_basename_get( + other_obj_transfer_data_item.name + ) == task_layer_prefix_basename_get(transfer_data_item.name): + return other_obj_transfer_data_item + return None + + def _gen_transfer_data_map(self): + context = bpy.context # Bruh. + transfer_data_map: Dict[bpy.types.Collection, bpy.types.Collection] = {} + temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data + temp_transfer_data.clear() + for objs in self.object_map.items(): + source_obj, target_obj = objs + for obj in objs: + for transfer_data_item in obj.transfer_data_ownership: + self._check_transfer_data_conflict(obj, transfer_data_item) + if ( + transfer_data_item.owner in self._local_tls + and obj.name.endswith(constants.LOCAL_SUFFIX) + ): + other_td = self.transfer_data_get_other(transfer_data_item) + if other_td: + if ( + transfer_data_item.surrender + and not other_td.surrender + and transfer_data_item.owner != other_td.owner + ): + continue + name, map_item = self._get_transfer_data_map_item( + obj, target_obj, transfer_data_item + ) + transfer_data_map[name] = map_item + + if ( + transfer_data_item.owner not in self._local_tls + and transfer_data_item.owner != "NONE" + and obj.name.endswith(constants.EXTERNAL_SUFFIX) + ): + other_td = self.transfer_data_get_other(transfer_data_item) + if other_td: + if ( + transfer_data_item.surrender + and not other_td.surrender + and transfer_data_item.owner != other_td.owner + ): + continue + + name, map_item = self._get_transfer_data_map_item( + obj, target_obj, transfer_data_item + ) + transfer_data_map[name] = map_item + return transfer_data_map + + def _gen_shared_id_map(self): + shared_id_map: Dict[bpy.types.ID, bpy.types.ID] = {} + for local_id in get_shared_ids(self._local_top_col): + external_id_name = merge_get_target_name(local_id.name) + id_storage = get_storage_of_id(local_id) + external_id = id_storage.get(external_id_name) + if not external_id: + continue + self._check_id_conflict(external_id, local_id) + if ( + local_id.asset_id_owner in self._local_tls + and local_id.asset_id_owner != "NONE" + ): + if external_id: + shared_id_map[external_id] = local_id + else: + if external_id: + shared_id_map[local_id] = external_id + + return shared_id_map diff --git a/scripts-blender/addons/asset_pipeline/merge/core.py b/scripts-blender/addons/asset_pipeline/merge/core.py new file mode 100644 index 00000000..fe1325d6 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/core.py @@ -0,0 +1,296 @@ +import bpy +from ..merge.naming import task_layer_prefix_transfer_data_update +from .asset_mapping import AssetTransferMapping +from .transfer_data.transfer_core import ( + init_transfer_data, + transfer_data_is_missing, + apply_transfer_data, + transfer_data_clean, +) +from .transfer_data.transfer_util import transfer_data_add_entry + +from .naming import ( + merge_add_suffix_to_hierarchy, + merge_remove_suffix_from_hierarchy, + asset_prefix_name_get, + get_id_type_name, +) + +from pathlib import Path +from typing import Dict +from .. import constants + + +def ownership_transfer_data_cleanup( + asset_pipe: 'bpy.types.AssetPipeline', + obj: bpy.types.Object, +) -> None: + """Remove Transferable Data ownership items if the corrisponding data is missing + + Args: + obj (bpy.types.Object): Object that contains the Transferable Data + """ + local_task_layer_keys = asset_pipe.get_local_task_layers() + transfer_data = obj.transfer_data_ownership + to_remove = [] + for transfer_data_item in transfer_data: + if transfer_data_item.owner in local_task_layer_keys: + if transfer_data_is_missing(transfer_data_item): + to_remove.append(transfer_data_item.name) + + for name in to_remove: + transfer_data.remove(transfer_data.keys().index(name)) + + +def ownership_get( + local_col: bpy.types.Collection, + scene: bpy.types.Scene, +) -> None: + """Find new Transferable Data owned by the local task layer. + Marks items as owned by the local task layer if they are in the + corrisponding task layer collection and have no owner. + + Args: + local_col (bpy.types.Collection): The top level asset collection that is local to the file + task_layer_name (str): Name of the current task layer that will be the owner of the data + temp_transfer_data (bpy.types.CollectionProperty): Collection property containing newly found + data and the object that contains this data. + + Returns: + list[bpy.types.Object]: Returns a list of objects that have no owner and will not be included + in the merge process + """ + asset_pipe = scene.asset_pipeline + asset_pipe.temp_transfer_data.clear() + + default_task_layer = asset_pipe.get_local_task_layers()[0] + + for col in asset_pipe.asset_collection.children: + if col.asset_id_owner == "NONE": + col.asset_id_owner = default_task_layer + + task_layer_objs = get_task_layer_objects(asset_pipe) + + for obj in local_col.all_objects: + # TODO REPLACE This is expensive to loop over everything again + for transfer_data_item in obj.transfer_data_ownership: + task_layer_prefix_transfer_data_update(transfer_data_item) + + # Mark Asset ID Owner for objects in the current task layers collection + if obj.asset_id_owner == "NONE" and obj in task_layer_objs: + obj.asset_id_owner = default_task_layer + # obj.name = asset_prefix_name_get(obj.name) + # Skip items that have no owner + if obj.asset_id_owner == "NONE": + continue + ownership_transfer_data_cleanup(asset_pipe, obj) + init_transfer_data(scene, obj) + + +def ownership_set(temp_transfer_data: bpy.types.CollectionProperty) -> None: + """Add new Transferable Data items on each object found in the + temp Transferable Data collection property + + Args: + temp_transfer_data (bpy.types.CollectionProperty): Collection property containing newly found + data and the object that contains this data. + """ + for transfer_data_item in temp_transfer_data: + transfer_data = transfer_data_item.obj.transfer_data_ownership + transfer_data_add_entry( + transfer_data, + transfer_data_item.name, + transfer_data_item.type, + transfer_data_item.owner, + transfer_data_item.surrender, + ) + + +def get_invalid_objects( + asset_pipe: 'bpy.types.AssetPipeline', + local_col: bpy.types.Collection, +) -> list[bpy.types.Object]: + """Returns a list of objects not used in the merge processing, + which are considered invalid. The objects will be excluded from + the merge process. + + Args: + local_col (bpy.types.Collection): The top level asset collection that is local to the file + scene (bpy.types.Scene): Scene that contains a the file's asset + + Returns: + list[bpy.types.Object]: List of Invalid Objects + """ + local_task_layer_keys = asset_pipe.get_local_task_layers() + task_layer_objs = get_task_layer_objects(asset_pipe) + + invalid_obj = [] + for obj in local_col.all_objects: + if obj.asset_id_owner == "NONE": + invalid_obj.append(obj) + if obj not in task_layer_objs and obj.asset_id_owner in local_task_layer_keys: + invalid_obj.append(obj) + return invalid_obj + + +def remap_user(source_datablock: bpy.data, target_datablock: bpy.data) -> None: + """Remap datablock and append name to datablock that has been remapped + + Args: + source_datablock (bpy.data): datablock that will be replaced by the target + target_datablock (bpy.data): datablock that will replace the source + """ + print(f"REMAPPING {source_datablock.name} to {target_datablock.name}") + source_datablock.user_remap(target_datablock) + source_datablock.name += "_Users_Remapped" + + +def merge_task_layer( + context: bpy.types.Context, + local_tls: list[str], + external_file: Path, +) -> None: + """Combines data from an external task layer collection in the local + task layer collection. By finding the owner of each collection, + object and Transferable Data item and keeping each layer of data via a copy + from it's respective owners. + + This ensures that objects owned by an external task layer will always be kept + linked into the scene, and any local Transferable Data like a modifier will be applied + ontop of that external object of vice versa. Ownership is stored in an objects properties, + and map is created to match each object to it's respective owner. + + Args: + context: (bpy.types.Context): context of current .blend + local_tls: (list[str]): list of task layers that are local to the current file + external_file (Path): external file to pull data into the current file from + """ + local_col = context.scene.asset_pipeline.asset_collection + if not local_col: + return "Unable to find Asset Collection" + col_base_name = local_col.name + local_suffix = constants.LOCAL_SUFFIX + external_suffix = constants.EXTERNAL_SUFFIX + merge_add_suffix_to_hierarchy(local_col, local_suffix) + + appended_col = import_data_from_lib(external_file, "collections", col_base_name) + merge_add_suffix_to_hierarchy(appended_col, external_suffix) + + local_col = bpy.data.collections[f"{col_base_name}.{local_suffix}"] + external_col = bpy.data.collections[f"{col_base_name}.{external_suffix}"] + + # External col may come from publish, ensure it is not marked as asset so it purges correctly + external_col.asset_clear() + + map = AssetTransferMapping(local_col, external_col, local_tls) + error_msg = '' + if len(map.conflict_transfer_data) != 0: + for conflict in map.conflict_transfer_data: + error_msg += f"Transferable Data conflict found for '{conflict.name}' on obj '{conflict.id_data.name}'\n" + return error_msg + + if len(map.conflict_ids) != 0: + for conflict_obj in map.conflict_ids: + type_name = get_id_type_name(type(conflict_obj)) + error_msg += ( + f"Ownership conflict found for {type_name}: '{conflict_obj.name}'\n" + ) + return error_msg + + # Remove all Transferable Data from target objects + for source_obj in map.object_map: + if source_obj.data and source_obj.data.users > 1: + error_msg += f"Object {source_obj.name} contains multi-user datablock'\n" + return error_msg + target_obj = map.object_map[source_obj] + target_obj.transfer_data_ownership.clear() + + apply_transfer_data(context, map.transfer_data_map) + + for source_obj in map.object_map: + target_obj = map.object_map[source_obj] + if target_obj.data and target_obj.data.users > 1: + error_msg += f"Object {target_obj.name} contains multi-user datablock'\n" + return error_msg + remap_user(source_obj, target_obj) + transfer_data_clean(target_obj) + + for col in map.collection_map: + remap_user(col, map.collection_map[col]) + + for col in map.external_col_to_add: + local_col.children.link(col) + + for col in map.external_col_to_remove: + local_col.children.unlink(col) + + for id in map.shared_id_map: + remap_user(id, map.shared_id_map[id]) + + bpy.ops.outliner.orphans_purge( + do_local_ids=True, do_linked_ids=False, do_recursive=True + ) + merge_remove_suffix_from_hierarchy(local_col) + + +def import_data_from_lib( + libpath: Path, + data_category: str, + data_name: str, + link: bool = False, +) -> bpy.data: + """Appends/Links data from an external file into the current file. + + Args: + libpath (Path): path to .blend file that contains library + data_category (str): bpy.types, like object or collection + data_name (str): name of datablock to link/append + link (bool, optional): Set to link library otherwise append. Defaults to False. + + Returns: + bpy.data: returns whichever data_category/type that was linked/appended + """ + + noun = "Appended" + if link: + noun = "Linked" + + with bpy.data.libraries.load(libpath.as_posix(), relative=True, link=link) as ( + data_from, + data_to, + ): + if data_name not in eval(f"data_from.{data_category}"): + print( + f"Failed to import {data_category} {data_name} from {libpath.as_posix()}. Doesn't exist in file.", + ) + + # Check if datablock with same name already exists in blend file. + try: + eval(f"bpy.data.{data_category}['{data_name}']") + except KeyError: + pass + else: + print( + f"{data_name} already in bpy.data.{data_category} of this blendfile.", + ) + + # Append data block. + eval(f"data_to.{data_category}.append('{data_name}')") + print(f"{noun}:{data_name} from library: {libpath.as_posix()}") + + if link: + return eval( + f"bpy.data.{data_category}['{data_name}', '{bpy.path.relpath(libpath.as_posix())}']" + ) + + return eval(f"bpy.data.{data_category}['{data_name}']") + + +def get_task_layer_objects(asset_pipe): + local_task_layer_keys = asset_pipe.get_local_task_layers() + local_col = asset_pipe.asset_collection + task_layer_objs = [] + for col in local_col.children: + if col.asset_id_owner in local_task_layer_keys: + task_layer_objs = task_layer_objs + list(col.all_objects) + return task_layer_objs diff --git a/scripts-blender/addons/asset_pipeline/merge/naming.py b/scripts-blender/addons/asset_pipeline/merge/naming.py new file mode 100644 index 00000000..d3e08f75 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/naming.py @@ -0,0 +1,221 @@ +# ***** BEGIN GPL LICENSE BLOCK ***** +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# +# ***** END GPL LICENCE BLOCK ***** +# +# (c) 2021, Blender Foundation - Paul Golter + +import bpy +from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids +from .util import get_storage_of_id +from .. import constants, config +from .util import data_type_from_transfer_data_key + + +def merge_get_target_suffix(suffix: str) -> str: + """Get the corrisponding suffix for a given suffix + + Args: + suffix (str): Suffix for External or Local Datablock + + Returns: + str: Returns External Suffix if given Local suffix for vice-versa + """ + if suffix.endswith(constants.EXTERNAL_SUFFIX): + return constants.LOCAL_SUFFIX + if suffix.endswith(constants.LOCAL_SUFFIX): + return constants.EXTERNAL_SUFFIX + + +def merge_get_target_name(name: str) -> str: + """Get the corrisponding target name for a given datablock's suffix. + Suffixes are set by the add_suffix_to_hierarchy() function prior to + calling this function. + + Args: + name (str): Name of a given datablock including it's suffix + + Returns: + str: Returns datablock name with the opposite suffix + """ + old = name.split(constants.MERGE_DELIMITER)[-1] + new = merge_get_target_suffix(old) + li = name.rsplit(old, 1) + return new.join(li) + + +def merge_get_basename(name: str) -> str: + """Returns the name of an asset without it's suffix""" + if name.endswith(constants.LOCAL_SUFFIX) or name.endswith( + constants.EXTERNAL_SUFFIX + ): + return constants.MERGE_DELIMITER.join( + name.split(constants.MERGE_DELIMITER)[:-1] + ) + return name + + +def merge_remove_suffix_from_hierarchy(collection: bpy.types.Collection) -> None: + """Removes the suffix after a set delimiter from all datablocks + referenced by a collection, itself included + + Args: + collection (bpy.types.Collection): Collection that as been suffixed + """ + + ref_map = get_id_reference_map() + datablocks = get_all_referenced_ids(collection, ref_map) + datablocks.add(collection) + for db in datablocks: + if db.library: + # Don't rename linked datablocks. + continue + try: + db.name = merge_get_basename(db.name) + except: + pass + + +def merge_add_suffix_to_hierarchy( + collection: bpy.types.Collection, suffix_base: str +) -> None: + """Add a suffix to the names of all datablocks referenced by a collection, + itself included. + + Args: + collection (bpy.types.Collection): Collection that needs to be suffixed + suffix_base (str): Suffix to append to collection and items linked to collection + """ + + suffix = f"{constants.MERGE_DELIMITER}{suffix_base}" + + ref_map = get_id_reference_map() + datablocks = get_all_referenced_ids(collection, ref_map) + datablocks.add(collection) + for db in datablocks: + if db.library: + # Don't rename linked datablocks. + continue + collision_db = get_storage_of_id(db).get(db.name + suffix) + if collision_db: + collision_db.name += f'{constants.MERGE_DELIMITER}OLD' + try: + db.name += suffix + except: + pass + + +def asset_prefix_name_get(name: str) -> str: + """Returns a string with the asset prefix if it is not already set. + Users can specify a prefix to live on all objects during the + asset creation process. This prefix is stored in the scene. + + Args: + name (str): Name to add prefix to + + Returns: + str: Returns name with prefix + """ + asset_pipe = bpy.context.scene.asset_pipeline + if name.startswith(asset_pipe.prefix + constants.NAME_DELIMITER): + return name + prefix = ( + asset_pipe.prefix + constants.NAME_DELIMITER if asset_pipe.prefix != "" else "" + ) + return prefix + name + + +def task_layer_prefix_name_get(name: str, task_layer_owner: str) -> str: + """Returns a string with the task layer prefix if one is not already set. + Prefix for assets is defined task_layer.json file within TASK_LAYER_TYPES + Will return early if any prefix is found, cannot replace existing prefixes. + + Args: + name (str): Name to add prefix to + task_layer_owner (str): + + Returns: + str: Returns name with prefix + """ + for task_layer_key in config.TASK_LAYER_TYPES: + if name.startswith( + config.TASK_LAYER_TYPES[task_layer_key] + constants.NAME_DELIMITER + ): + return name + prefix = config.TASK_LAYER_TYPES[task_layer_owner] + return prefix + constants.NAME_DELIMITER + name + + +def task_layer_prefix_basename_get(name: str) -> str: + """Get the base of a name if it contains a task layer prefix. + This prefix is set on some Transferable Data items, this functions + removes the prefixes and returns the basename + + Args: + name (str): Original name including prefix + + Returns: + str: Returns name without task layer prefix + """ + for task_layer_key in config.TASK_LAYER_TYPES: + if name.startswith( + config.TASK_LAYER_TYPES[task_layer_key] + constants.NAME_DELIMITER + ): + return name.replace(name.split(constants.NAME_DELIMITER)[0], "")[1:] + return name + + +def task_layer_prefix_transfer_data_update( + transfer_data_item: bpy.types.CollectionProperty, +) -> bool: + """Task Layer Prefix can become out of date with the actual owner of the task layer. + This will update the existing prefixes on transfer_data_item so it can match the + owner of that transfer_data_item. Will update both the transfer_data_item.name and the + name of the actual data the transfer_data_item is referring to. + + Args: + transfer_data_item (bpy.types.CollectionProperty): Transferable Data Item that is named with prefix + + Returns: + bool: Returns True if a change to the name was completed + """ + prefix_types = [constants.MODIFIER_KEY, constants.CONSTRAINT_KEY] + if transfer_data_item.type not in prefix_types: + return + + obj = transfer_data_item.id_data + td_data = data_type_from_transfer_data_key(obj, transfer_data_item.type) + base_name = task_layer_prefix_basename_get(transfer_data_item.name) + prefix = config.TASK_LAYER_TYPES[transfer_data_item.owner] + new_name = prefix + constants.NAME_DELIMITER + base_name + if new_name == transfer_data_item.name or not td_data.get(transfer_data_item.name): + return + + td_data[transfer_data_item.name].name = new_name + transfer_data_item.name = new_name + return True + + +def get_id_type_name(id_type: bpy.types) -> str: + """Return the cosmetic name of a given ID type + + Args: + id_type (bpy.types): An ID type e.g. bpy.types.Object + + Returns: + str: Name of an ID type e.g. bpy.types.Object will return 'Object' + """ + return str(id_type).split("'bpy_types.")[1].replace("'>", "") diff --git a/scripts-blender/addons/asset_pipeline/merge/publish.py b/scripts-blender/addons/asset_pipeline/merge/publish.py new file mode 100644 index 00000000..24165892 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/publish.py @@ -0,0 +1,95 @@ +from pathlib import Path +from .. import constants +import bpy + + +def find_file_version(published_file: Path) -> int: + """Returns the version number from a published file's name + + Args: + file (Path): Path to a publish file, naming convention is + asset_name.v{3-digit_version}.blend` + + Returns: + int: returns current version in filename as integer + """ + return int(published_file.name.split(".")[1].replace("v", "")) + + +def get_next_published_file( + current_file: Path, publish_type=constants.ACTIVE_PUBLISH_KEY +) -> Path: + """Returns the path where the next published file version should be saved to + + Args: + current_file (Path): Current file, which must be a task file at root of asset directory + publish_type (_type_, optional): Publish type, 'publish', 'staged', 'review'. Defaults to 'publish'. + + Returns: + Path: Path where the next published file should be saved to, path doesn't exist yet + """ """""" + last_publish = find_latest_publish(current_file, publish_type) + base_name = bpy.context.scene.asset_pipeline.name + publish_dir = current_file.parent.joinpath(publish_type) + if not last_publish: + new_version_number = 1 + + else: + new_version_number = find_file_version(last_publish) + 1 + new_version = "{0:0=3d}".format(new_version_number) + return publish_dir.joinpath(base_name + f".v" + new_version + ".blend") + + +def find_all_published(current_file: Path, publish_type: str) -> list[Path]: + """Retuns a list of published files of a given type, + each publish type is seperated into it's own folder at the + root of the asset's directory + Args: + current_file (Path): Current file, which must be a task file at root of asset directory + publish_type (_type_, optional): Publish type, 'publish', 'staged', 'review'. Defaults to 'publish'. + + Returns: + list[Path]: list of published files of a given publish type + """ + publish_dir = current_file.parent.joinpath(publish_type) + if not publish_dir.exists(): + return + published_files = list(publish_dir.glob('*.blend')) + published_files.sort(key=find_file_version) + return published_files + + +def find_latest_publish( + current_file: Path, publish_type=constants.ACTIVE_PUBLISH_KEY +) -> Path: + """Returns the path to the latest published file in a given folder + + Args: + current_file (Path): Current file, which must be a task file at root of asset directory + publish_type (_type_, optional): Publish type, 'publish', 'staged', 'review'. Defaults to 'publish'. + + Returns: + Path: Path to latest publish file of a given publish type + """ + published_files = find_all_published(current_file, publish_type) + if published_files: + return published_files[-1] + + +def find_sync_target(current_file: Path) -> Path: + """Returns the latest published file to use as push/pull a.k.a sync target + this will either be the latest active publish, or the latest staged asset if + any asset is staged + + Args: + current_file (Path): Current file, which must be a task file at root of asset directory + + Returns: + Path: Path to latest active or staged publish file + """ """""" + latest_staged = find_latest_publish( + current_file, publish_type=constants.STAGED_PUBLISH_KEY + ) + if latest_staged: + return latest_staged + return find_latest_publish(current_file, publish_type=constants.ACTIVE_PUBLISH_KEY) diff --git a/scripts-blender/addons/asset_pipeline/merge/shared_ids.py b/scripts-blender/addons/asset_pipeline/merge/shared_ids.py new file mode 100644 index 00000000..7feae62b --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/shared_ids.py @@ -0,0 +1,51 @@ +import bpy +from bpy_extras.id_map_utils import get_id_reference_map, get_all_referenced_ids +from .util import get_fundamental_id_type +from .. import constants + + +def get_shared_ids(collection: bpy.types.Collection) -> list[bpy.types.ID]: + """Returns a list of any ID that is not covered by the merge process + + Args: + collection (bpy.types.Collection): Collection that contains data that references 'shared_ids' + + Returns: + list[bpy.types.ID]: List of 'shared_ids' + """ + ref_map = get_id_reference_map() + all_ids_of_coll = get_all_referenced_ids(collection, ref_map) + return [ + id + for id in all_ids_of_coll + if isinstance(id, bpy.types.NodeTree) or isinstance(id, bpy.types.Image) + ] + + +def init_shared_ids(scene: bpy.types.Scene) -> list[bpy.types.ID]: + """Intilizes any ID not covered by the transfer process as an 'shared_id' + and marks all 'shared_ids' without an owner to the current task layer + + Args: + scene (bpy.types.Scene): Scene that contains a the file's asset + + Returns: + list[bpy.types.ID]: A list of new 'shared_ids' owned by the file's task layer + """ + asset_pipe = scene.asset_pipeline + task_layer_key = asset_pipe.get_local_task_layers()[0] + shared_ids = [] + local_col = asset_pipe.asset_collection + for id in get_shared_ids(local_col): + if id.asset_id_owner == 'NONE': + id.asset_id_owner = task_layer_key + shared_ids.append(id) + return shared_ids + + +def get_shared_id_icon(id: bpy.types.ID) -> str: + if bpy.types.NodeTree == get_fundamental_id_type(id): + return constants.GEO_NODE + if bpy.types.Image == get_fundamental_id_type(id): + return constants.IMAGE + return constants.BLANK diff --git a/scripts-blender/addons/asset_pipeline/merge/task_layer.py b/scripts-blender/addons/asset_pipeline/merge/task_layer.py new file mode 100644 index 00000000..4082f490 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/task_layer.py @@ -0,0 +1,106 @@ +import bpy +import contextlib +from .. import constants +from .. import config + + +def get_default_task_layer_owner(td_type: str, name="") -> [str, bool]: + if td_type == constants.ATTRIBUTE_KEY: + if name in config.ATTRIBUTE_DEFAULTS: + return ( + config.ATTRIBUTE_DEFAULTS[name]['default_owner'], + config.ATTRIBUTE_DEFAULTS[name]['auto_surrender'], + ) + return ( + config.TRANSFER_DATA_DEFAULTS[td_type]['default_owner'], + config.TRANSFER_DATA_DEFAULTS[td_type]['auto_surrender'], + ) + + +def get_transfer_data_owner( + asset_pipe: 'bpy.types.AssetPipeline', + td_type_key: str, + name="", +): + default_tl, auto_surrender = get_default_task_layer_owner(td_type_key, name) + if default_tl in asset_pipe.get_local_task_layers(): + # If the default owner is local to the file, don't use auto_surrender + return default_tl, False + else: + # If the default owner is not local, pass auto surrender value + return asset_pipe.get_local_task_layers()[0], auto_surrender + + +def draw_task_layer_selection( + layout: bpy.types.UILayout, + data: bpy.types.CollectionProperty or bpy.types.ID, + show_all_task_layers=False, + show_local_task_layers=False, + text="", + data_owner_name="", + current_data_owner=None, +) -> None: + """Draw an prop search UI for ownership of either OBJ/COL or Task Layer. + It has three modes, 'Show All Task Layers" "Show All Task Layers Greyed Out" and + "Only Show Local Task Layers" + + - When the property is already set to a local task layer show: "Only Show Local Task Layers" + - When a property is owned by an external task layer: "Show All Task Layers Greyed Out" so they user cannot edit it + - When a user is overriding or the object is new (using default ownership): "Show All Task Layers" + Args: + layout (bpy.types.UILayout): Any UI Layout element like self.layout or row + data (bpy.types.CollectionProperty or bpy.types.ID): Object, Collection or Transferable Data Item + show_all_task_layers (bool, optional): Used when user is overriding or default ownership is set. Defaults to False. + show_local_task_layers (bool, optional): Force Showing Local Task Layers Only. Defaults to False. + text (str, optional): Title of prop search. Defaults to "". + data_owner_name(str, optional): Name of Data if it needs to be specified + current_data_owner(str, optional): Property that is named by data_owner_name so it can be checked, property should return a string + """ + + # Set data_owner_name based on type of it hasn't been passed + if data_owner_name == "": + # These rna_type.names are defined by class names in props.py + if data.rna_type.name in ["AssetTransferData", 'AssetTransferDataTemp']: + data_owner_name = "owner" + else: + data_owner_name = "asset_id_owner" + + # Get the current data owner from OBJ/COL or Transferable Data Item if it hasn't been passed + if current_data_owner is None: + current_data_owner = data.get(data_owner_name) + + asset_pipe = bpy.context.scene.asset_pipeline + + if show_all_task_layers == True: + # Show All Task Layers + layout.prop_search( + data, + data_owner_name, + asset_pipe, + 'all_task_layers', + text=text, + ) + return + if ( + current_data_owner not in [tl.name for tl in asset_pipe.local_task_layers] + and not show_local_task_layers + ): + # Show All Task Layers Greyed Out + layout.enabled = False + layout.prop_search( + data, + data_owner_name, + asset_pipe, + 'all_task_layers', + text=text, + ) + return + else: + # Only Show Local Task Layers + layout.prop_search( + data, + data_owner_name, + asset_pipe, + 'local_task_layers', + text=text, + ) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py new file mode 100644 index 00000000..c8786de8 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_core.py @@ -0,0 +1,169 @@ +import bpy + +from .transfer_functions import ( + attributes, + constraints, + modifers, + parent, + shape_keys, + vertex_groups, + materials, +) + +from ... import constants + +from .transfer_util import ( + transfer_data_add_entry, + check_transfer_data_entry, +) + + +def copy_transfer_data_ownership( + transfer_data_item, target_obj: bpy.types.Object +) -> None: + """Copy Transferable Data item to object if non entry exists + + Args: + transfer_data_item: Item of bpy.types.CollectionProperty from source object + target_obj (bpy.types.Object): Object to add Transferable Data item to + """ + transfer_data = target_obj.transfer_data_ownership + matches = check_transfer_data_entry( + transfer_data, + transfer_data_item.name, + transfer_data_item.type, + ) + if len(matches) == 0: + transfer_data_add_entry( + transfer_data, + transfer_data_item.name, + transfer_data_item.type, + transfer_data_item.owner, + transfer_data_item.surrender, + ) + + +def transfer_data_clean(obj): + vertex_groups.vertex_groups_clean(obj) + modifers.modifiers_clean(obj) + constraints.constraints_clean(obj) + shape_keys.shape_keys_clean(obj) + attributes.attribute_clean(obj) + parent.parent_clean(obj) + + +def transfer_data_is_missing(transfer_data_item) -> bool: + """Check if Transferable Data item is missing + + Args: + transfer_data_item: Item of class ASSET_TRANSFER_DATA + + Returns: + bool: bool if item is missing + """ + return bool( + vertex_groups.vertex_group_is_missing(transfer_data_item) + or modifers.modifier_is_missing(transfer_data_item) + or constraints.constraint_is_missing(transfer_data_item) + or shape_keys.shape_key_is_missing(transfer_data_item) + or attributes.attribute_is_missing(transfer_data_item) + or parent.parent_is_missing(transfer_data_item) + ) + + +def init_transfer_data( + scene: bpy.types.Scene, + obj: bpy.types.Object, +): + """Collect Transferable Data Items on a given object + + Args: + obj (bpy.types.Object): Target object for Transferable Data + task_layer_name (str): Name of task layer + temp_transfer_data: Item of class ASSET_TRANSFER_DATA_TEMP + """ + vertex_groups.init_vertex_groups(scene, obj) + materials.init_materials(scene, obj) + modifers.init_modifiers(scene, obj) + constraints.init_constraints(scene, obj) + shape_keys.init_shape_keys(scene, obj) + attributes.init_attributes(scene, obj) + parent.init_parent(scene, obj) + + +def apply_transfer_data(context: bpy.types.Context, transfer_data_map) -> None: + """Apply all Transferable Data from Transferable Data map onto objects. + Copies any Transferable Data owned by local layer onto objects owned by external layers. + Applies Transferable Data from external layers onto objects owned by local layers + + Transfer_data_map is generated by class 'AssetTransferMapping' + + Args: + context (bpy.types.Context): context of .blend file + transfer_data_map: Map generated by class AssetTransferMapping + """ + + for name in transfer_data_map: + temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data + transfer_data = transfer_data_map[name] + transfer_data_item = temp_transfer_data[ + transfer_data.get('transfer_data_item_index') + ] + target_obj = transfer_data.get('target_obj') + source_obj = transfer_data.get('source_obj') + if target_obj is None: + print(f"Failed to Transferable Data for {transfer_data_item.id_data.name}") + continue + if transfer_data_item is None: + continue + if source_obj != target_obj: + if transfer_data_item.type == constants.VERTEX_GROUP_KEY: + print(f"Transfering Data {constants.VERTEX_GROUP_KEY}: {name}") + vertex_groups.transfer_vertex_group( + context=context, + vertex_group_name=transfer_data_item.name, + target_obj=target_obj, + source_obj=source_obj, + ) + if transfer_data_item.type == constants.MODIFIER_KEY: + print(f"Transfering Data {constants.MODIFIER_KEY}: {name}") + modifers.transfer_modifier( + modifier_name=transfer_data_item.name, + target_obj=target_obj, + source_obj=source_obj, + ) + if transfer_data_item.type == constants.CONSTRAINT_KEY: + constraints.transfer_constraint( + constraint_name=transfer_data_item.name, + target_obj=target_obj, + source_obj=source_obj, + ) + if transfer_data_item.type == constants.MATERIAL_SLOT_KEY: + print(f"Transfering Data {constants.MATERIAL_SLOT_KEY}: {name}") + materials.transfer_materials( + target_obj=target_obj, + source_obj=source_obj, + ) + if transfer_data_item.type == constants.SHAPE_KEY_KEY: + shape_keys.transfer_shape_key( + context=context, + target_obj=target_obj, + source_obj=source_obj, + shape_key_name=transfer_data_item.name, + ) + if transfer_data_item.type == constants.ATTRIBUTE_KEY: + attributes.transfer_attribute( + target_obj=target_obj, + source_obj=source_obj, + attribute_name=transfer_data_item.name, + ) + if transfer_data_item.type == constants.PARENT_KEY: + parent.transfer_parent( + target_obj=target_obj, + source_obj=source_obj, + ) + + copy_transfer_data_ownership( + transfer_data_item=transfer_data_item, + target_obj=target_obj, + ) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/attributes.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/attributes.py new file mode 100644 index 00000000..5bee5029 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/attributes.py @@ -0,0 +1,247 @@ +import bpy +import mathutils +import bmesh +import numpy as np +from .transfer_function_util.proximity_core import ( + tris_per_face, + closest_face_to_point, + closest_tri_on_face, + is_obdata_identical, + transfer_corner_data, +) +from ..transfer_util import check_transfer_data_entry +from ...naming import merge_get_basename +from ...task_layer import get_transfer_data_owner +from .... import constants + + +def attributes_get_editable(attributes): + return [ + attribute + for attribute in attributes + if not ( + attribute.is_internal + or attribute.is_required + # Material Index is part of material transfer and should be skipped + or attribute.name == 'material_index' + ) + ] + + +def attribute_clean(obj): + if obj.type != "MESH": + return + attributes = attributes_get_editable(obj.data.attributes) + attributes_to_remove = [] + for attribute in attributes: + matches = check_transfer_data_entry( + obj.transfer_data_ownership, + merge_get_basename(attribute.name), + constants.ATTRIBUTE_KEY, + ) + if len(matches) == 0: + attributes_to_remove.append(attribute.name) + + for attribute_name_to_remove in reversed(attributes_to_remove): + attribute_to_remove = obj.data.attributes.get(attribute_name_to_remove) + print(f"Cleaning attribute {attribute.name}") + obj.data.attributes.remove(attribute_to_remove) + + +def attribute_is_missing(transfer_data_item): + obj = transfer_data_item.id_data + if obj.type != "MESH": + return + attributes = attributes_get_editable(obj.data.attributes) + attribute_names = [attribute.name for attribute in attributes] + if ( + transfer_data_item.type == constants.ATTRIBUTE_KEY + and not transfer_data_item["name"] in attribute_names + ): + return True + + +def init_attributes(scene, obj): + asset_pipe = scene.asset_pipeline + if obj.type != "MESH": + return + transfer_data = obj.transfer_data_ownership + td_type_key = constants.ATTRIBUTE_KEY + for atttribute in attributes_get_editable(obj.data.attributes): + # Only add new ownership transfer_data_item if vertex group doesn't have an owner + matches = check_transfer_data_entry(transfer_data, atttribute.name, td_type_key) + if len(matches) == 0: + task_layer_owner, auto_surrender = get_transfer_data_owner( + asset_pipe, td_type_key, atttribute.name + ) + asset_pipe.add_temp_transfer_data( + name=atttribute.name, + owner=task_layer_owner, + type=td_type_key, + obj=obj, + surrender=auto_surrender, + ) + + +def transfer_attribute( + attribute_name: str, + target_obj: bpy.types.Object, + source_obj: bpy.types.Object, +): + source_attributes = source_obj.data.attributes + target_attributes = target_obj.data.attributes + source_attribute = source_attributes.get(attribute_name) + target_attribute = target_attributes.get(attribute_name) + + if target_attribute: + target_attributes.remove(target_attribute) + + target_attribute = target_attributes.new( + name=attribute_name, + type=source_attribute.data_type, + domain=source_attribute.domain, + ) + + if not is_obdata_identical(source_obj, target_obj): + proximity_transfer_single_attribute( + source_obj, target_obj, source_attribute, target_attribute + ) + return + + for source_data_item in source_attribute.data.items(): + index = source_data_item[0] + source_data = source_data_item[1] + keys = set(source_data.bl_rna.properties.keys()) - set( + bpy.types.Attribute.bl_rna.properties.keys() + ) + for key in list(keys): + target_data = target_attribute.data[index] + setattr(target_data, key, getattr(source_data, key)) + + +def proximity_transfer_single_attribute( + source_obj: bpy.types.Object, + target_obj: bpy.types.Object, + source_attribute: bpy.types.Attribute, + target_attribute: bpy.types.Attribute, +): + # src_dat = source_obj.data + # tgt_dat = target_obj.data + # if type(src_dat) is not type(tgt_dat) or not (src_dat or tgt_dat): + # return False + # if type(tgt_dat) is not bpy.types.Mesh: # TODO: support more types + # return False + + # If target attribute already exists, remove it. + # tgt_attr = tgt_dat.attributes.get(source_attribute.name) + # if tgt_attr is not None: + # try: + # tgt_dat.attributes.remove(tgt_attr) + # except RuntimeError: + # # Built-ins like "position" cannot be removed, and should be skipped. + # return + + # Create target attribute. + # target_attribute = tgt_dat.attributes.new( + # source_attribute.name, source_attribute.data_type, source_attribute.domain + # ) + + data_sfx = { + 'INT8': 'value', + 'INT': 'value', + 'FLOAT': 'value', + 'FLOAT2': 'vector', + 'BOOLEAN': 'value', + 'STRING': 'value', + 'BYTE_COLOR': 'color', + 'FLOAT_COLOR': 'color', + 'FLOAT_VECTOR': 'vector', + } + + data_sfx = data_sfx[source_attribute.data_type] + + # if topo_match: + # # TODO: optimize using foreach_get/set rather than loop + # for i in range(len(source_attribute.data)): + # setattr(tgt_attr.data[i], data_sfx, getattr(source_attribute.data[i], data_sfx)) + # return + + # proximity fallback + if source_attribute.data_type == 'STRING': + # TODO: add NEAREST transfer fallback for attributes without interpolation + print( + f'Proximity based transfer for generic attributes of type STRING not supported yet. Skipping attribute {source_attribute.name} on {target_obj}.' + ) + return + + domain = source_attribute.domain + if ( + domain == 'POINT' + ): # TODO: deduplicate interpolated point domain proximity transfer + bm_source = bmesh.new() + bm_source.from_mesh(source_obj.data) + bm_source.faces.ensure_lookup_table() + + bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) + + tris_dict = tris_per_face(bm_source) + + for i, vert in enumerate(target_obj.data.vertices): + p = vert.co + face = closest_face_to_point(bm_source, p, bvh_tree) + + (tri, point) = closest_tri_on_face(tris_dict, face, p) + if not tri: + continue + weights = mathutils.interpolate.poly_3d_calc( + [tri[i].vert.co for i in range(3)], point + ) + + if data_sfx in ['color']: + vals_weighted = [ + weights[i] + * ( + np.array( + getattr(source_attribute.data[tri[i].vert.index], data_sfx) + ) + ) + for i in range(3) + ] + else: + vals_weighted = [ + weights[i] + * (getattr(source_attribute.data[tri[i].vert.index], data_sfx)) + for i in range(3) + ] + setattr(target_attribute.data[i], data_sfx, sum(np.array(vals_weighted))) + return + elif domain == 'EDGE': + # TODO support proximity fallback for generic edge attributes + print( + f'Proximity based transfer of generic edge attributes not supported yet. Skipping attribute {source_attribute.name} on {target_obj}.' + ) + return + elif domain == 'FACE': + bm_source = bmesh.new() + bm_source.from_mesh(source_obj.data) + bm_source.faces.ensure_lookup_table() + + bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) + for i, face in enumerate(target_obj.data.polygons): + p_target = face.center + closest_face = closest_face_to_point(bm_source, p_target, bvh_tree) + setattr( + target_attribute.data[i], + data_sfx, + getattr(source_attribute.data[closest_face.index], data_sfx), + ) + return + elif domain == 'CORNER': + transfer_corner_data( + source_obj, + target_obj, + source_attribute.data, + target_attribute.data, + data_suffix=data_sfx, + ) + return diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/constraints.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/constraints.py new file mode 100644 index 00000000..3ea5a2c8 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/constraints.py @@ -0,0 +1,96 @@ +import bpy +from ..transfer_util import ( + transfer_data_clean, + transfer_data_item_is_missing, + check_transfer_data_entry, +) +from ...naming import task_layer_prefix_name_get +from .transfer_function_util.drivers import find_drivers, copy_driver +from .transfer_function_util.visibility import override_obj_visability +from ...task_layer import get_transfer_data_owner +from .... import constants + + +def constraints_clean(obj): + transfer_data_clean( + obj=obj, data_list=obj.constraints, td_type_key=constants.CONSTRAINT_KEY + ) + + +def constraint_is_missing(transfer_data_item): + return transfer_data_item_is_missing( + transfer_data_item=transfer_data_item, + td_type_key=constants.CONSTRAINT_KEY, + data_list=transfer_data_item.id_data.constraints, + ) + + +def init_constraints(scene, obj): + td_type_key = constants.CONSTRAINT_KEY + transfer_data = obj.transfer_data_ownership + asset_pipe = scene.asset_pipeline + task_layer_owner, auto_surrender = get_transfer_data_owner( + asset_pipe, + td_type_key, + ) + for const in obj.constraints: + const.name = task_layer_prefix_name_get(const.name, task_layer_owner) + # Only add new ownership transfer_data_item if vertex group doesn't have an owner + matches = check_transfer_data_entry(transfer_data, const.name, td_type_key) + if len(matches) == 0: + asset_pipe.add_temp_transfer_data( + name=const.name, + owner=task_layer_owner, + type=td_type_key, + obj=obj, + surrender=auto_surrender, + ) + + +def transfer_constraint(constraint_name, target_obj, source_obj): + context = bpy.context + # remove old and sync existing modifiers + old_mod = target_obj.constraints.get(constraint_name) + if old_mod: + target_obj.constraints.remove(old_mod) + + # transfer new modifiers + for i, constraint in enumerate(source_obj.constraints): + if constraint.name == constraint_name: + constraint_new = target_obj.constraints.new(constraint.type) + constraint_new.name = constraint.name + # sort new modifier at correct index (default to beginning of the stack) + idx = 0 + if i > 0: + name_prev = source_obj.constraints[i - 1].name + for target_mod_i, target_constraint in enumerate( + target_obj.constraints + ): + if target_constraint.name == name_prev: + idx = target_mod_i + 1 + + if idx != i: + with override_obj_visability(obj=target_obj, scene=context.scene): + with context.temp_override(object=target_obj): + bpy.ops.constraint.move_to_index( + constraint=constraint_new.name, index=idx + ) + constraint_target = target_obj.constraints.get(constraint.name) + props = [ + p.identifier for p in constraint.bl_rna.properties if not p.is_readonly + ] + for prop in props: + value = getattr(constraint, prop) + setattr(constraint_target, prop, value) + + # HACK to cover edge case of armature constraints + if constraint.type == "ARMATURE": + for target_item in constraint.targets: + new_target = constraint_new.targets.new() + new_target.target = target_item.target + new_target.subtarget = target_item.subtarget + + fcurves = find_drivers(source_obj, 'constraints', constraint_name) + + for fcurve in fcurves: + copy_driver(from_fcurve=fcurve, target=target_obj) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/materials.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/materials.py new file mode 100644 index 00000000..264452bb --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/materials.py @@ -0,0 +1,86 @@ +import bpy +from .attributes import transfer_attribute +from ..transfer_util import check_transfer_data_entry +from ...task_layer import get_transfer_data_owner +from .... import constants + + +def materials_clean(obj): + # Material slots cannot use generic transfer_data_clean() function + + matches = check_transfer_data_entry( + obj.transfer_data_ownership, + constants.MATERIAL_TRANSFER_DATA_ITEM_NAME, + constants.MATERIAL_SLOT_KEY, + ) + + # Clear Materials if No Transferable Data is Found + if len(matches) != 0: + return + + if obj.data and hasattr(obj.data, 'materials'): + obj.data.materials.clear() + + +def materials_is_missing(transfer_data_item): + if ( + transfer_data_item.type == constants.MATERIAL_SLOT_KEY + and len(transfer_data_item.id_data.material_slots) == 0 + ): + return True + + +def init_materials(scene, obj): + asset_pipe = scene.asset_pipeline + td_type_key = constants.MATERIAL_SLOT_KEY + name = constants.MATERIAL_TRANSFER_DATA_ITEM_NAME + transfer_data = obj.transfer_data_ownership + + material_objects = [ + 'CURVE', + 'GPENCIL', + 'META', + 'MESH', + 'SURFACE', + 'FONT', + 'VOLUME', + ] + + # Only Execute if Material Slots exist on object + if obj.type not in material_objects: + return + matches = check_transfer_data_entry(transfer_data, name, td_type_key) + # Only add new ownership transfer_data_item if vertex group doesn't have an owner + if len(matches) == 0: + task_layer_owner, auto_surrender = get_transfer_data_owner( + asset_pipe, + td_type_key, + ) + asset_pipe.add_temp_transfer_data( + name=name, + owner=task_layer_owner, + type=td_type_key, + obj=obj, + surrender=auto_surrender, + ) + + +def transfer_materials(target_obj: bpy.types.Object, source_obj): + # Delete all material slots of target object. + target_obj.data.materials.clear() + + # Transfer material slots + for idx in range(len(source_obj.material_slots)): + target_obj.data.materials.append(source_obj.material_slots[idx].material) + target_obj.material_slots[idx].link = source_obj.material_slots[idx].link + + # Transfer active material slot index + target_obj.active_material_index = source_obj.active_material_index + + # Transfer material slot assignments for curve + if target_obj.type == "CURVE": + for spl_to, spl_from in zip(target_obj.data.splines, source_obj.data.splines): + spl_to.material_index = spl_from.material_index + + if source_obj.data.attributes.get(constants.MATERIAL_ATTRIBUTE_NAME): + transfer_attribute(constants.MATERIAL_ATTRIBUTE_NAME, target_obj, source_obj) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/modifers.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/modifers.py new file mode 100644 index 00000000..9e9296fe --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/modifers.py @@ -0,0 +1,112 @@ +import bpy +from .transfer_function_util.drivers import find_drivers, copy_driver +from .transfer_function_util.visibility import override_obj_visability +from ..transfer_util import ( + transfer_data_clean, + transfer_data_item_is_missing, + check_transfer_data_entry, +) +from ...naming import task_layer_prefix_name_get +from ...task_layer import get_transfer_data_owner +from .... import constants + + +def modifiers_clean(obj): + transfer_data_clean( + obj=obj, data_list=obj.modifiers, td_type_key=constants.MODIFIER_KEY + ) + + +def modifier_is_missing(transfer_data_item): + return transfer_data_item_is_missing( + transfer_data_item=transfer_data_item, + td_type_key=constants.MODIFIER_KEY, + data_list=transfer_data_item.id_data.modifiers, + ) + + +def init_modifiers(scene, obj): + asset_pipe = scene.asset_pipeline + td_type_key = constants.MODIFIER_KEY + transfer_data = obj.transfer_data_ownership + task_layer_owner, auto_surrender = get_transfer_data_owner( + asset_pipe, + td_type_key, + ) + + for mod in obj.modifiers: + mod.name = task_layer_prefix_name_get(mod.name, task_layer_owner) + # Only add new ownership transfer_data_item if vertex group doesn't have an owner + matches = check_transfer_data_entry(transfer_data, mod.name, td_type_key) + if len(matches) == 0: + asset_pipe.add_temp_transfer_data( + name=mod.name, + owner=task_layer_owner, + type=td_type_key, + obj=obj, + surrender=auto_surrender, + ) + + +def transfer_modifier(modifier_name, target_obj, source_obj): + # remove old and sync existing modifiers + context = bpy.context + scene = context.scene + old_mod = target_obj.modifiers.get(modifier_name) + if old_mod: + target_obj.modifiers.remove(old_mod) + + # transfer new modifiers + for i, mod in enumerate(source_obj.modifiers): + if mod.name == modifier_name: + mod_new = target_obj.modifiers.new(mod.name, mod.type) + # sort new modifier at correct index (default to beginning of the stack) + idx = 0 + if i > 0: + name_prev = source_obj.modifiers[i - 1].name + for target_mod_i, target_mod in enumerate(target_obj.modifiers): + if target_mod.name == name_prev: + idx = target_mod_i + 1 + with override_obj_visability(obj=target_obj, scene=scene): + with context.temp_override(object=target_obj): + bpy.ops.object.modifier_move_to_index( + modifier=mod_new.name, index=idx + ) + mod_target = target_obj.modifiers.get(mod.name) + props = [p.identifier for p in mod.bl_rna.properties if not p.is_readonly] + for prop in props: + value = getattr(mod, prop) + setattr(mod_target, prop, value) + + # rebind modifiers (corr. smooth, surf. deform, mesh deform) + for mod in target_obj.modifiers: + if mod.type == 'SURFACE_DEFORM': + if not mod.is_bound: + continue + for i in range(2): + with override_obj_visability(obj=target_obj, scene=scene): + with context.temp_override( + object=target_obj, active_object=target_obj + ): + bpy.ops.object.surfacedeform_bind(modifier=mod.name) + elif mod.type == 'MESH_DEFORM': + if not mod.is_bound: + continue + for i in range(2): + with override_obj_visability(obj=target_obj, scene=scene): + with context.temp_override( + object=target_obj, active_object=target_obj + ): + bpy.ops.object.meshdeform_bind(modifier=mod.name) + elif mod.type == 'CORRECTIVE_SMOOTH': + if not mod.is_bind: + continue + for i in range(2): + with override_obj_visability(obj=target_obj, scene=scene): + with context.temp_override( + object=target_obj, active_object=target_obj + ): + bpy.ops.object.correctivesmooth_bind(modifier=mod.name) + fcurves = find_drivers(source_obj, 'modifiers', modifier_name) + for fcurve in fcurves: + copy_driver(from_fcurve=fcurve, target=target_obj) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/parent.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/parent.py new file mode 100644 index 00000000..10d3add0 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/parent.py @@ -0,0 +1,59 @@ +import bpy +from ..transfer_util import check_transfer_data_entry +from ...task_layer import get_transfer_data_owner +from ...naming import merge_get_basename +from .... import constants + + +def parent_clean(obj): + matches = check_transfer_data_entry( + obj.transfer_data_ownership, + merge_get_basename(constants.PARENT_TRANSFER_DATA_ITEM_NAME), + constants.PARENT_KEY, + ) + + if len(matches) != 0: + return + + obj.parent = None + print("Cleaning Parent Relationship") + + +def parent_is_missing(transfer_data_item): + if ( + transfer_data_item.type == constants.PARENT_KEY + and transfer_data_item.id_data.parent == None + ): + return True + + +def init_parent(scene, obj): + asset_pipe = scene.asset_pipeline + td_type_key = constants.PARENT_KEY + name = constants.PARENT_TRANSFER_DATA_ITEM_NAME + transfer_data = obj.transfer_data_ownership + + # Only Execute if Material Slots exist on object + if obj.parent == None: + return + if obj.parent not in list(asset_pipe.asset_collection.all_objects): + raise Exception("Object parent cannot be outside of asset collection") + matches = check_transfer_data_entry(transfer_data, name, td_type_key) + # Only add new ownership transfer_data_item if vertex group doesn't have an owner + if len(matches) == 0: + task_layer_owner, auto_surrender = get_transfer_data_owner( + asset_pipe, + td_type_key, + ) + asset_pipe.add_temp_transfer_data( + name=name, + owner=task_layer_owner, + type=td_type_key, + obj=obj, + surrender=auto_surrender, + ) + + +def transfer_parent(target_obj, source_obj): + target_obj.parent = source_obj.parent + target_obj.matrix_parent_inverse = source_obj.parent.matrix_world.inverted() diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/shape_keys.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/shape_keys.py new file mode 100644 index 00000000..758f5ce1 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/shape_keys.py @@ -0,0 +1,156 @@ +import bpy +import mathutils +import bmesh +import numpy as np +from .transfer_function_util.proximity_core import ( + tris_per_face, + closest_face_to_point, + closest_tri_on_face, +) +from .transfer_function_util.drivers import find_drivers, copy_driver +from ..transfer_util import ( + transfer_data_item_is_missing, + transfer_data_item_init, + check_transfer_data_entry, +) +from ...naming import merge_get_basename +from .... import constants + + +def shape_key_set_active(obj, shape_key_name): + for index, shape_key in enumerate(obj.data.shape_keys.key_blocks): + if shape_key.name == shape_key_name: + obj.active_shape_key_index = index + + +def shape_keys_clean(obj): + if obj.type != "MESH" or obj.data.shape_keys is None: + return + + for shape_key in obj.data.shape_keys.key_blocks: + matches = check_transfer_data_entry( + obj.transfer_data_ownership, + merge_get_basename(shape_key.name), + constants.SHAPE_KEY_KEY, + ) + if len(matches) == 0: + obj.shape_key_remove(shape_key) + + +def shape_key_is_missing(transfer_data_item): + if not transfer_data_item.type == constants.SHAPE_KEY_KEY: + return + obj = transfer_data_item.id_data + if obj.type != 'MESH': + return + if not obj.data.shape_keys: + return True + return transfer_data_item_is_missing( + transfer_data_item=transfer_data_item, + td_type_key=constants.SHAPE_KEY_KEY, + data_list=obj.data.shape_keys.key_blocks, + ) + + +def init_shape_keys(scene, obj): + if obj.type != "MESH" or obj.data.shape_keys is None: + return + + # Check that the order is legal. + # Key Blocks must be ordered after the key they are Relative To. + for i, kb in enumerate(obj.data.shape_keys.key_blocks): + if kb.relative_key: + base_shape_idx = obj.data.shape_keys.key_blocks.find(kb.relative_key.name) + if base_shape_idx > i: + raise Exception( + f'Shape Key "{kb.name}" must be ordered after its base shape "{kb.relative_key.name}" on object "{obj.name}".' + ) + + transfer_data_item_init( + scene=scene, + obj=obj, + data_list=obj.data.shape_keys.key_blocks, + td_type_key=constants.SHAPE_KEY_KEY, + ) + + +def transfer_shape_key( + context: bpy.types.Context, + shape_key_name: str, + target_obj: bpy.types.Object, + source_obj: bpy.types.Object, +): + if not source_obj.data.shape_keys: + return + sk_source = source_obj.data.shape_keys.key_blocks.get(shape_key_name) + assert sk_source + + sk_target = None + if not target_obj.data.shape_keys: + sk_target = target_obj.shape_key_add() + if not sk_target: + sk_target = target_obj.data.shape_keys.key_blocks.get(shape_key_name) + if not sk_target: + sk_target = target_obj.shape_key_add() + + sk_target.name = sk_source.name + sk_target.vertex_group = sk_source.vertex_group + if sk_source.relative_key != sk_source: + relative_key = None + if target_obj.data.shape_keys: + relative_key = target_obj.data.shape_keys.key_blocks.get( + sk_source.relative_key.name + ) + if relative_key: + sk_target.relative_key = relative_key + else: + # If the base shape of one of our shapes was removed by another task layer, + # the result will probably be pretty bad, but it's not a catastrophic failure. + # Proceed with a warning. + print( + f'Warning: Base shape "{sk_source.relative_key.name}" of Key "{sk_source.name}" was removed from "{target_obj.name}"' + ) + + sk_target.slider_min = sk_source.slider_min + sk_target.slider_max = sk_source.slider_max + sk_target.value = sk_source.value + sk_target.mute = sk_source.mute + + bm_source = bmesh.new() + bm_source.from_mesh(source_obj.data) + bm_source.faces.ensure_lookup_table() + + bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) + tris_dict = tris_per_face(bm_source) + for i, vert in enumerate(target_obj.data.vertices): + p = vert.co + face = closest_face_to_point(bm_source, p, bvh_tree) + + (tri, point) = closest_tri_on_face(tris_dict, face, p) + if not tri: + continue + weights = mathutils.interpolate.poly_3d_calc( + [tri[i].vert.co for i in range(3)], point + ) + + vals_weighted = [ + weights[i] + * ( + sk_source.data[tri[i].vert.index].co + - source_obj.data.vertices[tri[i].vert.index].co + ) + for i in range(3) + ] + val = mathutils.Vector(sum(np.array(vals_weighted))) + sk_target.data[i].co = vert.co + val + + if source_obj.data.shape_keys is None: + return + + fcurves = find_drivers( + source_obj.data.shape_keys, + 'key_blocks', + shape_key_name, + ) + for fcurve in fcurves: + copy_driver(from_fcurve=fcurve, target=target_obj.data.shape_keys) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/drivers.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/drivers.py new file mode 100644 index 00000000..505f665d --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/drivers.py @@ -0,0 +1,73 @@ +import bpy +from rigify.utils.misc import copy_attributes + + +def copy_driver( + from_fcurve: bpy.types.FCurve, target: bpy.types.ID, data_path=None, index=None +) -> bpy.types.FCurve: + """Copy an existing FCurve containing a driver to a new ID, by creating a copy + of the existing driver on the target ID. + + Args: + from_fcurve (bpy.types.FCurve): FCurve containing a driver + target (bpy.types.ID): ID that can have drivers added to it + data_path (_type_, optional): Data Path of existing driver. Defaults to None. + index (_type_, optional): array index of the property drive. Defaults to None. + + Returns: + bpy.types.FCurve: Fcurve containing copy of driver on target ID + """ + if not data_path: + data_path = from_fcurve.data_path + + new_fc = None + if index: + new_fc = target.driver_add(data_path, index) + else: + new_fc = target.driver_add(data_path) + + copy_attributes(from_fcurve, new_fc) + copy_attributes(from_fcurve.driver, new_fc.driver) + + # Remove default modifiers, variables, etc. + for m in new_fc.modifiers: + new_fc.modifiers.remove(m) + for v in new_fc.driver.variables: + new_fc.driver.variables.remove(v) + + # Copy modifiers + for m1 in from_fcurve.modifiers: + m2 = new_fc.modifiers.new(type=m1.type) + copy_attributes(m1, m2) + + # Copy variables + for v1 in from_fcurve.driver.variables: + v2 = new_fc.driver.variables.new() + copy_attributes(v1, v2) + for i in range(len(v1.targets)): + copy_attributes(v1.targets[i], v2.targets[i]) + + return new_fc + + +def find_drivers( + id: bpy.types.ID, target_type: str, target_name: str +) -> list[bpy.types.FCurve]: + """_summary_ + + Args: + drivers (list[bpy.types.FCurve]): List or Collection Property containing Fcurves with drivers + target_type (str): Name of data type found in driver data path, e.g. "modifiers" + target_name (str): Name of data found in driver path, e.g. modifier's name + + Returns: + list[bpy.types.FCurve]: List of FCurves containing drivers that match type & name + """ + found_drivers = [] + if id.animation_data is None or id.animation_data.drivers is None: + return found_drivers + drivers = id.animation_data.drivers + for driver in drivers: + if f'{target_type}["{target_name}"]' in driver.data_path: + found_drivers.append(driver) + return found_drivers diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/proximity_core.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/proximity_core.py new file mode 100644 index 00000000..6dd6516e --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/proximity_core.py @@ -0,0 +1,231 @@ +import bpy +import mathutils +import bmesh +import numpy as np + + +def closest_face_to_point(bm_source, p_target, bvh_tree=None): + if not bvh_tree: + bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) + (loc, norm, index, distance) = bvh_tree.find_nearest(p_target) + return bm_source.faces[index] + + +def tris_per_face(bm_source): + tris_source = bm_source.calc_loop_triangles() + tris_dict = dict() + for face in bm_source.faces: + tris_face = [] + for i in range(len(tris_source))[::-1]: + if tris_source[i][0] in face.loops: + tris_face.append(tris_source.pop(i)) + tris_dict[face] = tris_face + return tris_dict + + +def closest_tri_on_face(tris_dict, face, p): + points = [] + dist = [] + tris = [] + for tri in tris_dict[face]: + point = mathutils.geometry.closest_point_on_tri( + p, *[tri[i].vert.co for i in range(3)] + ) + tris.append(tri) + points.append(point) + dist.append((point - p).length) + min_idx = np.argmin(np.array(dist)) + point = points[min_idx] + tri = tris[min_idx] + return (tri, point) + + +def closest_edge_on_face_to_line(face, p1, p2, skip_edges=None): + """Returns edge of a face which is closest to line.""" + for edge in face.edges: + if skip_edges: + if edge in skip_edges: + continue + res = mathutils.geometry.intersect_line_line( + p1, p2, *[edge.verts[i].co for i in range(2)] + ) + if not res: + continue + (p_traversal, p_edge) = res + frac_1 = (edge.verts[1].co - edge.verts[0].co).dot( + p_edge - edge.verts[0].co + ) / (edge.verts[1].co - edge.verts[0].co).length ** 2.0 + frac_2 = (p2 - p1).dot(p_traversal - p1) / (p2 - p1).length ** 2.0 + if (frac_1 >= 0 and frac_1 <= 1) and (frac_2 >= 0 and frac_2 <= 1): + return edge + return None + + +def edge_data_split(edge, data_layer, data_suffix: str): + for vert in edge.verts: + vals = [] + for loop in vert.link_loops: + loops_edge_vert = set([loop for f in edge.link_faces for loop in f.loops]) + if loop not in loops_edge_vert: + continue + dat = data_layer[loop.index] + element = list(getattr(dat, data_suffix)) + if not vals: + vals.append(element) + elif not vals[0] == element: + vals.append(element) + if len(vals) > 1: + return True + return False + + +def interpolate_data_from_face( + bm_source, tris_dict, face, p, data_layer_source, data_suffix='' +): + """Returns interpolated value of a data layer within a face closest to a point.""" + + (tri, point) = closest_tri_on_face(tris_dict, face, p) + if not tri: + return None + weights = mathutils.interpolate.poly_3d_calc( + [tri[i].vert.co for i in range(3)], point + ) + + if not data_suffix: + cols_weighted = [ + weights[i] * np.array(data_layer_source[tri[i].index]) for i in range(3) + ] + col = sum(np.array(cols_weighted)) + else: + cols_weighted = [ + weights[i] * np.array(getattr(data_layer_source[tri[i].index], data_suffix)) + for i in range(3) + ] + col = sum(np.array(cols_weighted)) + return col + + +def transfer_corner_data( + obj_source, obj_target, data_layer_source, data_layer_target, data_suffix='' +): + """ + Transfers interpolated face corner data from data layer of a source object to data layer of a + target object, while approximately preserving data seams (e.g. necessary for UV Maps). + The transfer is face interpolated per target corner within the source face that is closest + to the target corner point and does not have any data seams on the way back to the + source face that is closest to the target face's center. + """ + + bm_source = bmesh.new() + bm_source.from_mesh(obj_source.data) + bm_source.faces.ensure_lookup_table() + bm_target = bmesh.new() + bm_target.from_mesh(obj_target.data) + bm_target.faces.ensure_lookup_table() + + bvh_tree = mathutils.bvhtree.BVHTree.FromBMesh(bm_source) + + tris_dict = tris_per_face(bm_source) + + for face_target in bm_target.faces: + face_target_center = face_target.calc_center_median() + + face_source = closest_face_to_point(bm_source, face_target_center, bvh_tree) + + for corner_target in face_target.loops: + # find nearest face on target compared to face that loop belongs to + p = corner_target.vert.co + + face_source_closest = closest_face_to_point(bm_source, p, bvh_tree) + enclosed = face_source_closest is face_source + face_source_int = face_source + if not enclosed: + # traverse faces between point and face center + traversed_faces = set() + traversed_edges = set() + while face_source_int is not face_source_closest: + traversed_faces.add(face_source_int) + edge = closest_edge_on_face_to_line( + face_source_int, + face_target_center, + p, + skip_edges=traversed_edges, + ) + if edge == None: + break + if len(edge.link_faces) != 2: + break + traversed_edges.add(edge) + + split = edge_data_split(edge, data_layer_source, data_suffix) + if split: + break + + # set new source face to other face belonging to edge + face_source_int = ( + edge.link_faces[1] + if edge.link_faces[1] is not face_source_int + else edge.link_faces[0] + ) + + # avoid looping behaviour + if face_source_int in traversed_faces: + face_source_int = face_source + break + + # interpolate data from selected face + col = interpolate_data_from_face( + bm_source, tris_dict, face_source_int, p, data_layer_source, data_suffix + ) + if col is None: + continue + if not data_suffix: + data_layer_target.data[corner_target.index] = col + else: + setattr(data_layer_target[corner_target.index], data_suffix, list(col)) + return + + +def is_mesh_identical(mesh_a, mesh_b) -> bool: + if len(mesh_a.vertices) != len(mesh_b.vertices): + return False + if len(mesh_a.edges) != len(mesh_b.edges): + return False + if len(mesh_a.polygons) != len(mesh_b.polygons): + return False + for e1, e2 in zip(mesh_a.edges, mesh_b.edges): + for v1, v2 in zip(e1.vertices, e2.vertices): + if v1 != v2: + return False + + return True + + +def is_curve_identical(curve_a: bpy.types.Curve, curve_b: bpy.types.Curve) -> bool: + if len(curve_a.splines) != len(curve_b.splines): + return False + for spline1, spline2 in zip(curve_a.splines, curve_b.splines): + if len(spline1.points) != len(spline2.points): + return False + return True + + +def is_obdata_identical( + a: bpy.types.Object or bpy.types.Mesh, b: bpy.types.Object or bpy.types.Mesh +) -> bool: + """Checks if two objects have matching topology (efficiency over exactness)""" + if type(a) == bpy.types.Object: + a = a.data + if type(b) == bpy.types.Object: + b = b.data + + if type(a) != type(b): + return False + + if type(a) == bpy.types.Mesh: + return is_mesh_identical(a, b) + elif type(a) == bpy.types.Curve: + return is_curve_identical(a, b) + else: + # TODO: Support geometry types other than mesh or curve. + return diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/visibility.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/visibility.py new file mode 100644 index 00000000..f93ccbd5 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/transfer_function_util/visibility.py @@ -0,0 +1,53 @@ +import bpy +import contextlib + +from typing import Optional + + +def get_visibility_driver(obj) -> Optional[bpy.types.FCurve]: + obj = bpy.data.objects.get(obj.name) + assert obj, "Object was renamed while its visibility was being ensured?" + if hasattr(obj, "animation_data") and obj.animation_data: + return obj.animation_data.drivers.find("hide_viewport") + + +@contextlib.contextmanager +def override_obj_visability(obj: bpy.types.Object, scene: bpy.types.Scene): + """Temporarily Change the Visability of an Object so an bpy.ops or other + function that requires the object to be visable can be called. + + Args: + obj (bpy.types.Object): Object to un-hide + scene (bpy.types.Scene): Scene Object is in + """ + hide = obj.hide_get() # eye icon + hide_viewport = obj.hide_viewport # hide viewport + select = obj.hide_select # selectable + + driver = get_visibility_driver(obj) + if driver: + driver_mute = driver.mute + + try: + obj.hide_set(False) + obj.hide_viewport = False + obj.hide_select = False + if driver: + driver.mute = True + + assigned_to_scene_root = False + if obj.name not in scene.collection.objects: + assigned_to_scene_root = True + scene.collection.objects.link(obj) + + yield + + finally: + obj.hide_set(hide) + obj.hide_viewport = hide_viewport + obj.hide_select = select + if driver: + driver.mute = driver_mute + + if assigned_to_scene_root and obj.name in scene.collection.objects: + scene.collection.objects.unlink(obj) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py new file mode 100644 index 00000000..ed9bbd8f --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_functions/vertex_groups.py @@ -0,0 +1,193 @@ +import bpy +from mathutils import Vector, kdtree +from typing import Dict, Tuple, List +from ..transfer_util import ( + transfer_data_clean, + transfer_data_item_is_missing, + transfer_data_item_init, +) +from .... import constants + + +def vertex_groups_clean(obj): + transfer_data_clean( + obj=obj, data_list=obj.vertex_groups, td_type_key=constants.VERTEX_GROUP_KEY + ) + + +def vertex_group_is_missing(transfer_data_item): + return transfer_data_item_is_missing( + transfer_data_item=transfer_data_item, + td_type_key=constants.VERTEX_GROUP_KEY, + data_list=transfer_data_item.id_data.vertex_groups, + ) + + +def init_vertex_groups(scene, obj): + transfer_data_item_init( + scene=scene, + obj=obj, + data_list=obj.vertex_groups, + td_type_key=constants.VERTEX_GROUP_KEY, + ) + + +def transfer_vertex_group( + context, + vertex_group_name: str, + target_obj: bpy.types.Object, + source_obj: bpy.types.Object, +): + if target_obj == source_obj: + return + + if not source_obj.vertex_groups.get(vertex_group_name): + print(f"ERROR Vertex Group {vertex_group_name} not found in {source_obj.name}") + return + + precalc_and_transfer_single_group( + source_obj, target_obj, vertex_group_name, expand=2 + ) + + +def precalc_and_transfer_single_group(source_obj, target_obj, vgroup_name, expand=2): + """Convenience function to transfer a single group. For transferring multiple groups, + this is very inefficient and shouldn't be used. + + Instead, you should: + - build_kd_tree ONCE per source mesh. + - build_vert_influence_map and transfer_vertex_groups ONCE per object pair. + """ + + # Remove group from the target obj if it already exists. + tgt_vg = target_obj.vertex_groups.get(vgroup_name) + if tgt_vg: + target_obj.vertex_groups.remove(tgt_vg) + + kd_tree = build_kdtree(source_obj.data) + vert_influence_map = build_vert_influence_map( + source_obj, target_obj, kd_tree, expand + ) + transfer_vertex_groups( + source_obj, + target_obj, + vert_influence_map, + [source_obj.vertex_groups[vgroup_name]], + ) + + +def build_kdtree(mesh): + kd = kdtree.KDTree(len(mesh.vertices)) + for i, v in enumerate(mesh.vertices): + kd.insert(v.co, i) + kd.balance() + return kd + + +def build_vert_influence_map(obj_from, obj_to, kd_tree, expand=2): + verts_of_edge = { + i: (e.vertices[0], e.vertices[1]) for i, e in enumerate(obj_from.data.edges) + } + + edges_of_vert: Dict[int, List[int]] = {} + for edge_idx, edge in enumerate(obj_from.data.edges): + for vert_idx in edge.vertices: + if vert_idx not in edges_of_vert: + edges_of_vert[vert_idx] = [] + edges_of_vert[vert_idx].append(edge_idx) + + # A mapping from target vertex index to a list of source vertex indicies and + # their influence. + # This can be pre-calculated once per object pair, to minimize re-calculations + # of subsequent transferring of individual vertex groups. + vert_influence_map: List[int, List[Tuple[int, float]]] = {} + for i, dest_vert in enumerate(obj_to.data.vertices): + vert_influence_map[i] = get_source_vert_influences( + dest_vert, obj_from, kd_tree, expand, edges_of_vert, verts_of_edge + ) + + return vert_influence_map + + +def get_source_vert_influences( + target_vert, obj_from, kd_tree, expand=2, edges_of_vert={}, verts_of_edge={} +) -> List[Tuple[int, float]]: + _coord, idx, dist = get_nearest_vert(target_vert.co, kd_tree) + source_vert_indices = [idx] + + if dist == 0: + # If the vertex position is a perfect match, just use that one vertex with max influence. + return [(idx, 1)] + + for i in range(0, expand): + new_indices = [] + for vert_idx in source_vert_indices: + for edge in edges_of_vert[vert_idx]: + vert_other = other_vert_of_edge(edge, vert_idx, verts_of_edge) + if vert_other not in source_vert_indices: + new_indices.append(vert_other) + source_vert_indices.extend(new_indices) + + distances: List[Tuple[int, float]] = [] + distance_total = 0 + for src_vert_idx in source_vert_indices: + distance = (target_vert.co - obj_from.data.vertices[src_vert_idx].co).length + distance_total += distance + distances.append((src_vert_idx, distance)) + + # Calculate influences such that the total of all influences adds up to 1.0, + # and the influence is inversely correlated with the distance. + parts = [1 / (dist / distance_total) for idx, dist in distances] + parts_sum = sum(parts) + + influences = [ + (idx, 1 if dist == 0 else part / parts_sum) + for part, dist in zip(parts, distances) + ] + + return influences + + +def get_nearest_vert( + coords: Vector, kd_tree: kdtree.KDTree +) -> Tuple[Vector, int, float]: + """Return coordinate, index, and distance of nearest vert to coords in kd_tree.""" + return kd_tree.find(coords) + + +def other_vert_of_edge( + edge: int, vert: int, verts_of_edge: Dict[int, Tuple[int, int]] +) -> int: + verts = verts_of_edge[edge] + assert vert in verts, f"Vert {vert} not part of edge {edge}." + return verts[0] if vert == verts[1] else verts[1] + + +def transfer_vertex_groups(obj_from, obj_to, vert_influence_map, src_vgroups): + """Transfer src_vgroups in obj_from to obj_to using a pre-calculated vert_influence_map.""" + + for src_vg in src_vgroups: + target_vg = obj_to.vertex_groups.get(src_vg.name) + if target_vg == None: + target_vg = obj_to.vertex_groups.new(name=src_vg.name) + + for i, dest_vert in enumerate(obj_to.data.vertices): + source_verts = vert_influence_map[i] + + # Vertex Group Name : Weight + vgroup_weights = {} + + for src_vert_idx, influence in source_verts: + for group in obj_from.data.vertices[src_vert_idx].groups: + group_idx = group.group + vg = obj_from.vertex_groups[group_idx] + if vg not in src_vgroups: + continue + if vg.name not in vgroup_weights: + vgroup_weights[vg.name] = 0 + vgroup_weights[vg.name] += vg.weight(src_vert_idx) * influence + + # Assign final weights of this vertex in the vertex groups. + for vg_name in vgroup_weights.keys(): + target_vg = obj_to.vertex_groups.get(vg_name) + target_vg.add([dest_vert.index], vgroup_weights[vg_name], 'REPLACE') diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_ui.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_ui.py new file mode 100644 index 00000000..bda723e1 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_ui.py @@ -0,0 +1,102 @@ +import bpy +from ... import constants +from ..task_layer import draw_task_layer_selection + + +def draw_transfer_data_type( + layout: bpy.types.UILayout, transfer_data: bpy.types.CollectionProperty +) -> None: + """Draw UI Element for items of a Transferable Data type""" + asset_pipe = bpy.context.scene.asset_pipeline + if transfer_data == []: + return + name, icon, ui_bool = constants.TRANSFER_DATA_TYPES[transfer_data[0].type] + box = layout.box() + row = box.row() + row.prop( + asset_pipe, + ui_bool, + icon=icon, + text="", + ) + row.label(text=name) + if not bool(asset_pipe.get(ui_bool)): + return + + for transfer_data_item in transfer_data: + row = box.row() + row.label(text=f"{transfer_data_item.name}: ") + + if transfer_data_item.get("surrender"): + enabled = ( + False + if transfer_data_item.owner in asset_pipe.get_local_task_layers() + else True + ) + # Disable entire row if the item is surrender (prevents user from un-surrendering) + row.enabled = enabled + col = row.column() + col.operator( + "assetpipe.update_surrendered_transfer_data" + ).transfer_data_item_name = transfer_data_item.name + + # New Row inside a column because draw_task_layer_selection() will enable/disable the entire row + # Only need this to affect itself and the "surrender" property + col = row.column() + task_layer_row = col.row() + + draw_task_layer_selection( + layout=task_layer_row, + data=transfer_data_item, + ) + surrender_icon = ( + "ORPHAN_DATA" if transfer_data_item.get("surrender") else "HEART" + ) + task_layer_row.prop( + transfer_data_item, "surrender", text="", icon=surrender_icon + ) + + +def draw_transfer_data( + transfer_data: bpy.types.CollectionProperty, layout: bpy.types.UILayout +) -> None: + """Draw UI List of Transferable Data""" + vertex_groups = [] + # vertex_colors = [] + material_slots = [] + modifiers = [] + constraints = [] + # uv_layers = [] + shape_keys = [] + attributes = [] + parent = [] + + for transfer_data_item in transfer_data: + if transfer_data_item.type == constants.VERTEX_GROUP_KEY: + vertex_groups.append(transfer_data_item) + # if transfer_data_item.type == constants.VERTEX_COLOR_KEY: + # vertex_colors.append(transfer_data_item) + if transfer_data_item.type == constants.MATERIAL_SLOT_KEY: + material_slots.append(transfer_data_item) + if transfer_data_item.type == constants.MODIFIER_KEY: + modifiers.append(transfer_data_item) + if transfer_data_item.type == constants.CONSTRAINT_KEY: + constraints.append(transfer_data_item) + # if transfer_data_item.type == constants.UV_LAYERS_KEY: + # uv_layers.append(transfer_data_item) + if transfer_data_item.type == constants.SHAPE_KEY_KEY: + shape_keys.append(transfer_data_item) + if transfer_data_item.type == constants.ATTRIBUTE_KEY: + attributes.append(transfer_data_item) + if transfer_data_item.type == constants.PARENT_KEY: + parent.append(transfer_data_item) + + draw_transfer_data_type(layout, vertex_groups) + # draw_transfer_data_type(layout, vertex_colors) + draw_transfer_data_type(layout, modifiers) + draw_transfer_data_type(layout, material_slots) + draw_transfer_data_type(layout, constraints) + # draw_transfer_data_type(layout, uv_layers) + draw_transfer_data_type(layout, shape_keys) + draw_transfer_data_type(layout, attributes) + draw_transfer_data_type(layout, parent) diff --git a/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py new file mode 100644 index 00000000..a3ca1b8b --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/merge/transfer_data/transfer_util.py @@ -0,0 +1,123 @@ +import bpy +from ..naming import merge_get_basename +from ..task_layer import get_transfer_data_owner + + +def check_transfer_data_entry( + transfer_data: bpy.types.CollectionProperty, key: str, td_type_key: str +) -> set: + """Verifies if Transferable Data entry exists + + Args: + ownership (bpy.types.CollectionProperty): Transferable Data of an object + key (str): Name of item that is being verified + td_type_key (str): Type of Transferable Data + + Returns: + set: Returns set of matches where name is found in ownership + """ + existing_items = [ + transfer_data_item.name + for transfer_data_item in transfer_data + if transfer_data_item.type == td_type_key + ] + return set([key]).intersection(set(existing_items)) + + +def transfer_data_add_entry( + transfer_data: bpy.types.CollectionProperty, + name: str, + td_type_key: str, + task_layer_name: str, + surrender: bool, +): + """Add entry to Transferable Data ownership + + Args: + ownership (bpy.types.CollectionProperty): Transferable Data of an object + name (str): Name of new Transferable Data item + td_type_key (str): Type of Transferable Data + task_layer_name (str): Name of current task layer + """ + transfer_data_item = transfer_data.add() + transfer_data_item.name = name + transfer_data_item.owner = task_layer_name + transfer_data_item.type = td_type_key + transfer_data_item.surrender = surrender + return transfer_data_item + + +def transfer_data_clean( + obj: bpy.types.Object, data_list: bpy.types.CollectionProperty, td_type_key: str +): + """Removes data if a transfer_data_item doesn't exist but the data does exist + Args: + obj (bpy.types.Object): Object containing Transferable Data + data_list (bpy.types.CollectionProperty): Collection Property containing a type of possible Transferable Data e.g. obj.modifiers + td_type_key (str): Key for the Transferable Data type + """ + for item in data_list: + matches = check_transfer_data_entry( + obj.transfer_data_ownership, + merge_get_basename(item.name), + td_type_key, + ) + if len(matches) == 0: + data_list.remove(item) + + +def transfer_data_item_is_missing( + transfer_data_item, data_list: bpy.types.CollectionProperty, td_type_key: str +) -> bool: + """Returns true if a transfer_data_item exists the data doesn't exist + + Args: + transfer_data_item (_type_): Item of Transferable Data + data_list (bpy.types.CollectionProperty): Collection Property containing a type of possible Transferable Data e.g. obj.modifiers + td_type_key (str): Key for the Transferable Data type + Returns: + bool: Returns True if transfer_data_item is missing + """ + if transfer_data_item.type == td_type_key and not data_list.get( + transfer_data_item["name"] + ): + return True + + +"""Intilize Transferable Data to a temporary collection property, used + to draw a display of new Transferable Data to the user before merge process. +""" + + +def transfer_data_item_init( + scene: bpy.types.Scene, + obj: bpy.types.Object, + data_list: bpy.types.CollectionProperty, + td_type_key: str, +): + """_summary_ + + Args: + scene (bpy.types.Scene): Scene that contains a the file's asset + obj (bpy.types.Object): Object containing possible Transferable Data + data_list (bpy.types.CollectionProperty): Collection Property containing a type of possible Transferable Data e.g. obj.modifiers + td_type_key (str): Key for the Transferable Data type + """ + asset_pipe = scene.asset_pipeline + transfer_data = obj.transfer_data_ownership + + for item in data_list: + # Only add new ownership transfer_data_item if vertex group doesn't have an owner + matches = check_transfer_data_entry(transfer_data, item.name, td_type_key) + if len(matches) == 0: + task_layer_owner, auto_surrender = get_transfer_data_owner( + asset_pipe, + td_type_key, + ) + asset_pipe.add_temp_transfer_data( + name=item.name, + owner=task_layer_owner, + type=td_type_key, + obj=obj, + surrender=auto_surrender, + ) diff --git a/scripts-blender/addons/asset_pipeline/util.py b/scripts-blender/addons/asset_pipeline/merge/util.py similarity index 58% rename from scripts-blender/addons/asset_pipeline/util.py rename to scripts-blender/addons/asset_pipeline/merge/util.py index dbffa1bc..4aef3458 100644 --- a/scripts-blender/addons/asset_pipeline/util.py +++ b/scripts-blender/addons/asset_pipeline/merge/util.py @@ -1,188 +1,119 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -from typing import List, Dict, Union, Any, Set, Optional, Tuple, Generator - -import bpy -from bpy import types -import addon_utils - - -def redraw_ui() -> None: - """ - Forces blender to redraw the UI. - """ - for screen in bpy.data.screens: - for area in screen.areas: - area.tag_redraw() - - -def get_addon_prefs() -> bpy.types.AddonPreferences: - return bpy.context.preferences.addons[__package__].preferences - - -def is_file_saved() -> bool: - return bool(bpy.data.filepath) - - -def traverse_collection_tree( - collection: bpy.types.Collection, -) -> Generator[bpy.types.Collection, None, None]: - yield collection - for child in collection.children: - yield from traverse_collection_tree(child) - - -def del_collection(collection: bpy.types.Collection) -> None: - collection.user_clear() - bpy.data.collections.remove(collection) - -def unlink_collections_recursive( - parent_coll: bpy.types.Collection, - bad_colls: Set[bpy.types.Collection] - ): - for child_coll in parent_coll.children: - if child_coll in bad_colls: - parent_coll.children.unlink(child_coll) - child_coll.use_fake_user = False - else: - unlink_collections_recursive(child_coll, bad_colls) - -def is_addon_active(module_name, context=None): - """Returns whether an addon is loaded and enabled in the current workspace.""" - if not context: - context=bpy.context - is_enabled_in_prefs = addon_utils.check(module_name)[1] - if is_enabled_in_prefs and context.workspace.use_filter_by_owner: - is_enabled_in_workspace = module_name in context.workspace.owner_ids - return is_enabled_in_workspace - - return is_enabled_in_prefs - - -def reset_armature_pose( - rig: bpy.types.Object, - reset_transforms=True, - reset_properties=True, -): - bones = rig.pose.bones - for pb in bones: - if reset_transforms: - pb.location = ((0, 0, 0)) - pb.rotation_euler = ((0, 0, 0)) - pb.rotation_quaternion = ((1, 0, 0, 0)) - pb.scale = ((1, 1, 1)) - - if reset_properties and len(pb.keys()) > 0: - rna_properties = [prop.identifier for prop in pb.bl_rna.properties if prop.is_runtime] - - # Reset custom property values to their default value - for key in pb.keys(): - if key.startswith("$"): continue - if key in rna_properties: continue # Addon defined property. - - ui_data = None - try: - ui_data = pb.id_properties_ui(key) - if not ui_data: continue - ui_data = ui_data.as_dict() - if not 'default' in ui_data: continue - except TypeError: - # Some properties don't support UI data, and so don't have a default value. (like addon PropertyGroups) - pass - - if not ui_data: continue - - if type(pb[key]) not in (float, int, bool): continue - pb[key] = ui_data['default'] - - -ID_INFO = { - (types.WindowManager, 'WINDOWMANAGER', 'window_managers'), - (types.Scene, 'SCENE', 'scenes'), - (types.World, 'WORLD', 'worlds'), - (types.Collection, 'COLLECTION', 'collections'), - - (types.Armature, 'ARMATURE', 'armatures'), - (types.Mesh, 'MESH', 'meshes'), - (types.Camera, 'CAMERA', 'cameras'), - (types.Lattice, 'LATTICE', 'lattices'), - (types.Light, 'LIGHT', 'lights'), - (types.Speaker, 'SPEAKER', 'speakers'), - (types.Volume, 'VOLUME', 'volumes'), - (types.GreasePencil, 'GREASEPENCIL', 'grease_pencils'), - (types.Curve, 'CURVE', 'curves'), - (types.LightProbe, 'LIGHT_PROBE', 'lightprobes'), - - (types.MetaBall, 'METABALL', 'metaballs'), - (types.Object, 'OBJECT', 'objects'), - (types.Action, 'ACTION', 'actions'), - (types.Key, 'KEY', 'shape_keys'), - (types.Sound, 'SOUND', 'sounds'), - - (types.Material, 'MATERIAL', 'materials'), - (types.NodeTree, 'NODETREE', 'node_groups'), - (types.Image, 'IMAGE', 'images'), - - (types.Mask, 'MASK', 'masks'), - (types.FreestyleLineStyle, 'LINESTYLE', 'linestyles'), - (types.Library, 'LIBRARY', 'libraries'), - (types.VectorFont, 'FONT', 'fonts'), - (types.CacheFile, 'CACHE_FILE', 'cache_files'), - (types.PointCloud, 'POINT_CLOUD', 'pointclouds'), - (types.Curves, 'HAIR_CURVES', 'hair_curves'), - (types.Text, 'TEXT', 'texts'), - # (types.Simulation, 'SIMULATION', 'simulations'), - (types.ParticleSettings, 'PARTICLE', 'particles'), - (types.Palette, 'PALETTE', 'palettes'), - (types.PaintCurve, 'PAINT_CURVE', 'paint_curves'), - (types.MovieClip, 'MOVIE_CLIP', 'movieclips'), - - (types.WorkSpace, 'WORKSPACE', 'workspaces'), - (types.Screen, 'SCREEN', 'screens'), - (types.Brush, 'BRUSH', 'brushes'), - (types.Texture, 'TEXTURE', 'textures'), -} - -# Map datablock Python classes to their string representation. -ID_CLASS_TO_IDENTIFIER: Dict[type, Tuple[str, int]] = dict( - [(tup[0], (tup[1])) for tup in ID_INFO] -) - -# Map datablock Python classes to the name of their bpy.data container. -ID_CLASS_TO_STORAGE_NAME: Dict[type, str] = dict( - [(tup[0], (tup[2])) for tup in ID_INFO] -) - -def get_fundamental_id_type(datablock: bpy.types.ID) -> Any: - """Certain datablocks have very specific types. - This function should return their fundamental type, ie. parent class.""" - for id_type in ID_CLASS_TO_IDENTIFIER.keys(): - if isinstance(datablock, id_type): - return id_type - - -def get_storage_of_id(datablock: bpy.types.ID) -> 'bpy_prop_collection': - """Return the storage collection property of the datablock. - Eg. for an object, returns bpy.data.objects. - """ - - fundamental_type = get_fundamental_id_type(datablock) - return getattr(bpy.data, ID_CLASS_TO_STORAGE_NAME[fundamental_type]) +# ***** BEGIN GPL LICENSE BLOCK ***** +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# +# ***** END GPL LICENCE BLOCK ***** +# +# (c) 2021, Blender Foundation - Paul Golter + +from typing import Dict, Any, Tuple, Generator +from .. import constants +import bpy +from bpy import types + +ID_INFO = { + (types.WindowManager, 'WINDOWMANAGER', 'window_managers'), + (types.Scene, 'SCENE', 'scenes'), + (types.World, 'WORLD', 'worlds'), + (types.Collection, 'COLLECTION', 'collections'), + (types.Armature, 'ARMATURE', 'armatures'), + (types.Mesh, 'MESH', 'meshes'), + (types.Camera, 'CAMERA', 'cameras'), + (types.Lattice, 'LATTICE', 'lattices'), + (types.Light, 'LIGHT', 'lights'), + (types.Speaker, 'SPEAKER', 'speakers'), + (types.Volume, 'VOLUME', 'volumes'), + (types.GreasePencil, 'GREASEPENCIL', 'grease_pencils'), + (types.Curve, 'CURVE', 'curves'), + (types.LightProbe, 'LIGHT_PROBE', 'lightprobes'), + (types.MetaBall, 'METABALL', 'metaballs'), + (types.Object, 'OBJECT', 'objects'), + (types.Action, 'ACTION', 'actions'), + (types.Key, 'KEY', 'shape_keys'), + (types.Sound, 'SOUND', 'sounds'), + (types.Material, 'MATERIAL', 'materials'), + (types.NodeTree, 'NODETREE', 'node_groups'), + (types.Image, 'IMAGE', 'images'), + (types.Mask, 'MASK', 'masks'), + (types.FreestyleLineStyle, 'LINESTYLE', 'linestyles'), + (types.Library, 'LIBRARY', 'libraries'), + (types.VectorFont, 'FONT', 'fonts'), + (types.CacheFile, 'CACHE_FILE', 'cache_files'), + (types.PointCloud, 'POINT_CLOUD', 'pointclouds'), + (types.Curves, 'HAIR_CURVES', 'hair_curves'), + (types.Text, 'TEXT', 'texts'), + # (types.Simulation, 'SIMULATION', 'simulations'), + (types.ParticleSettings, 'PARTICLE', 'particles'), + (types.Palette, 'PALETTE', 'palettes'), + (types.PaintCurve, 'PAINT_CURVE', 'paint_curves'), + (types.MovieClip, 'MOVIE_CLIP', 'movieclips'), + (types.WorkSpace, 'WORKSPACE', 'workspaces'), + (types.Screen, 'SCREEN', 'screens'), + (types.Brush, 'BRUSH', 'brushes'), + (types.Texture, 'TEXTURE', 'textures'), +} + +# Map datablock Python classes to their string representation. +ID_CLASS_TO_IDENTIFIER: Dict[type, Tuple[str, int]] = dict( + [(tup[0], (tup[1])) for tup in ID_INFO] +) + +# Map datablock Python classes to the name of their bpy.data container. +ID_CLASS_TO_STORAGE_NAME: Dict[type, str] = dict( + [(tup[0], (tup[2])) for tup in ID_INFO] +) + + +def get_fundamental_id_type(datablock: bpy.types.ID) -> Any: + """Certain datablocks have very specific types. + This function should return their fundamental type, ie. parent class.""" + for id_type in ID_CLASS_TO_IDENTIFIER.keys(): + if isinstance(datablock, id_type): + return id_type + + +def get_storage_of_id(datablock: bpy.types.ID) -> 'bpy_prop_collection': + """Return the storage collection property of the datablock. + Eg. for an object, returns bpy.data.objects. + """ + + fundamental_type = get_fundamental_id_type(datablock) + return getattr(bpy.data, ID_CLASS_TO_STORAGE_NAME[fundamental_type]) + + +def traverse_collection_tree( + collection: bpy.types.Collection, +) -> Generator[bpy.types.Collection, None, None]: + yield collection + for child in collection.children: + yield from traverse_collection_tree(child) + + +def data_type_from_transfer_data_key(obj: bpy.types.Object, td_type: str): + """Returns the data on an object that is referred to by the Transferable Data type""" + if td_type == constants.VERTEX_GROUP_KEY: + return obj.vertex_groups + if td_type == constants.MODIFIER_KEY: + return obj.modifiers + if td_type == constants.CONSTRAINT_KEY: + return obj.constraints + if td_type == constants.MATERIAL_SLOT_KEY: + return obj.material_slots + if td_type == constants.SHAPE_KEY_KEY: + return obj.data.shape_keys.key_blocks + if td_type == constants.ATTRIBUTE_KEY: + return obj.data.attributes + if td_type == constants.PARENT_KEY: + return obj.parent diff --git a/scripts-blender/addons/asset_pipeline/ops.py b/scripts-blender/addons/asset_pipeline/ops.py new file mode 100644 index 00000000..75f3450b --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/ops.py @@ -0,0 +1,874 @@ +import bpy +import os +from pathlib import Path +from . import constants +from . import config +from .prefs import get_addon_prefs +from .merge.naming import task_layer_prefix_transfer_data_update +from .merge.task_layer import ( + draw_task_layer_selection, +) +from .merge.publish import get_next_published_file, find_all_published +from .images import save_images +from .sync import ( + sync_invoke, + sync_draw, + sync_execute_update_ownership, + sync_execute_prepare_sync, + sync_execute_pull, + sync_execute_push, +) + + +class ASSETPIPE_OT_create_new_asset(bpy.types.Operator): + bl_idname = "assetpipe.create_new_asset" + bl_label = "Create New Asset" + bl_description = """Create a new Asset Files and Folders at a given directory""" + + _name = "" + _prefix = "" + _json_path = None + _asset_pipe = None + + create_files: bpy.props.BoolProperty( + name="Create Files for Unselected Task Layers", default=True + ) + + @classmethod + def poll(cls, context: bpy.types.Context) -> bool: + asset_pipe = context.scene.asset_pipeline + if asset_pipe.new_file_mode == "KEEP": + if not asset_pipe.asset_collection: + cls.poll_message_set("Missing Top Level Collection") + return False + else: + if asset_pipe.name == "" or asset_pipe.dir == "": + cls.poll_message_set("Asset Name and Directory must be valid") + return False + return True + + def invoke(self, context: bpy.types.Context, event): + # Dynamically Create Task Layer Bools + self._asset_pipe = context.scene.asset_pipeline + + config.verify_json_data(Path(self._asset_pipe.task_layer_config_type)) + + all_task_layers = self._asset_pipe.all_task_layers + all_task_layers.clear() + + for task_layer_key in config.TASK_LAYER_TYPES: + if task_layer_key == "NONE": + continue + new_task_layer = all_task_layers.add() + new_task_layer.name = task_layer_key + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + box = self.layout.box() + all_task_layers = self._asset_pipe.all_task_layers + + box.label(text="Choose Which Task Layers will be local the current file") + for task_layer_bool in all_task_layers: + box.prop(task_layer_bool, "is_local", text=task_layer_bool.name) + self.layout.prop(self, "create_files") + + def _asset_name_set(self, context) -> None: + if self._asset_pipe.new_file_mode == "KEEP": + asset_col = self._asset_pipe.asset_collection + name = ( + asset_col.name + if constants.NAME_DELIMITER not in asset_col.name + else asset_col.name.split(constants.NAME_DELIMITER, 1)[1] + ) + prefix = ( + "" + if constants.NAME_DELIMITER not in asset_col.name + else asset_col.name.split(constants.NAME_DELIMITER, 1)[0] + ) + + else: + name = self._asset_pipe.name + prefix = self._asset_pipe.prefix + + # Set to easily access these properties + self._name = name + self._prefix = prefix + + # Store these in the asset pipeline props group + self._asset_pipe.name = name + self._asset_pipe.prefix = prefix + + def _asset_dir_get(self, context) -> str: + if self._asset_pipe.new_file_mode == "KEEP": + return Path(bpy.data.filepath).parent.__str__() + + else: + user_dir = bpy.path.abspath(self._asset_pipe.dir) + return os.path.join(user_dir, self._name) + + def _load_task_layers(self, context): + all_task_layers = self._asset_pipe.all_task_layers + local_tls = [] + for task_layer_bool in all_task_layers: + if task_layer_bool.is_local: + local_tls.append(task_layer_bool.name) + + if not any(task_layer_bool.is_local for task_layer_bool in all_task_layers): + self.report( + {'ERROR'}, + "Please select at least one task layer to be local to the current file", + ) + return {'CANCELLED'} + return local_tls + + def _create_publish_directories(self, context, asset_directory): + for publish_type in constants.PUBLISH_KEYS: + new_dir_path = os.path.join(asset_directory, publish_type) + if os.path.exists(new_dir_path): + self.report( + {'ERROR'}, + f"Directory for '{publish_type}' already exists", + ) + return {'CANCELLED'} + os.mkdir(new_dir_path) + + def _asset_collection_get(self, context, local_tls): + if self._asset_pipe.new_file_mode == "KEEP": + asset_col = self._asset_pipe.asset_collection + for col in asset_col.children: + col.asset_id_owner = local_tls[0] + else: + bpy.data.collections.new(self._name) + asset_col = bpy.data.collections.get(self._name) + context.scene.collection.children.link(asset_col) + self._asset_pipe.asset_collection = asset_col + return asset_col + + def _remove_collections(self, context): + # Remove Data From task layer Files except for asset_collection + for col in bpy.data.collections: + if not col == self._asset_pipe.asset_collection: + bpy.data.collections.remove(col) + for obj in bpy.data.objects: + bpy.data.objects.remove(obj) + + bpy.ops.outliner.orphans_purge( + do_local_ids=True, do_linked_ids=False, do_recursive=True + ) + + def _task_layer_collections_set(self, context, asset_col): + for task_layer_key in config.TASK_LAYER_TYPES: + bpy.data.collections.new(task_layer_key) + task_layer_col = bpy.data.collections.get(task_layer_key) + task_layer_col.asset_id_owner = task_layer_key + asset_col.children.link(task_layer_col) + + def _first_file_create(self, context, local_tls, asset_directory) -> str: + self._asset_pipe.is_asset_pipeline_file = True + + asset_col = self._asset_collection_get(context, local_tls) + self._task_layer_collections_set(context, asset_col) + + if bpy.data.filepath != "": + first_file_name = Path(bpy.data.filepath).name + else: + first_file_name = self._name + "." + local_tls[0].lower() + ".blend" + + first_file = os.path.join(asset_directory, first_file_name) + + self._asset_pipe.set_local_task_layers(local_tls) + + bpy.ops.wm.save_as_mainfile(filepath=first_file, copy=True) + return first_file + + def _task_layer_file_create(self, context, task_layer_key, asset_directory): + name = self._name + "." + task_layer_key.lower() + ".blend" + self._asset_pipe.set_local_task_layers([task_layer_key]) + self._task_layer_collections_set(context, self._asset_pipe.asset_collection) + + task_layer_file = os.path.join(asset_directory, name) + bpy.ops.wm.save_as_mainfile(filepath=task_layer_file, copy=True) + + def _publish_file_create(self, context, asset_directory): + publish_path = os.path.join(asset_directory, constants.ACTIVE_PUBLISH_KEY) + name = self._name + "." + "v001" + ".blend" + self._asset_pipe.asset_collection.asset_mark() + publish_file = os.path.join(publish_path, name) + bpy.ops.wm.save_as_mainfile(filepath=publish_file, copy=True) + + def execute(self, context: bpy.types.Context): + self._asset_name_set(context) + asset_directory = self._asset_dir_get(context) + local_tls = self._load_task_layers(context) + + if not os.path.exists(asset_directory): + os.mkdir(asset_directory) + + self._create_publish_directories(context, asset_directory) + + # Save Task Layer Config File + config.write_json_file( + asset_path=Path(asset_directory), + source_file_path=Path(self._asset_pipe.task_layer_config_type), + ) + + if self._asset_pipe.new_file_mode == "BLANK": + self._remove_collections(context) + + starting_file = self._first_file_create(context, local_tls, asset_directory) + + for task_layer_key in config.TASK_LAYER_TYPES: + if task_layer_key == "NONE" or task_layer_key in local_tls: + continue + self._remove_collections(context) + self._task_layer_file_create(context, task_layer_key, asset_directory) + + # Create intial publish based on task layers. + self._remove_collections(context) + self._publish_file_create(context, asset_directory) + if starting_file: + bpy.ops.wm.open_mainfile(filepath=starting_file) + return {'FINISHED'} + + +class ASSETPIPE_OT_update_ownership(bpy.types.Operator): + bl_idname = "assetpipe.update_ownership" + bl_label = "Update Ownership" + bl_description = """Update the Ownership of Objects and Transferable Data""" + + _temp_transfer_data = None + _invalid_objs = [] + _other_ids = [] + + expand: bpy.props.BoolProperty( + name="Show New Transferable Data", + default=False, + description="Show New Transferable Data", + ) + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + sync_invoke(self, context) + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + sync_draw(self, context) + + def execute(self, context: bpy.types.Context): + sync_execute_update_ownership(self, context) + return {'FINISHED'} + + +class ASSETPIPE_OT_sync_pull(bpy.types.Operator): + bl_idname = "assetpipe.sync_pull" + bl_label = "Pull from Publish" + bl_description = """Pull Task Layers from the published sync target""" + + _temp_transfer_data = None + _invalid_objs = [] + _other_ids = [] + _temp_dir: Path = None + _current_file: Path = None + _task_layer_key: str = "" + _sync_target: Path = None + + expand: bpy.props.BoolProperty( + name="Show New Transferable Data", + default=False, + description="Show New Transferable Data", + ) + save: bpy.props.BoolProperty( + name="Save File & Images", + default=True, + description="Save Current File and Images before Push", + ) + + @classmethod + def poll(cls, context: bpy.types.Context) -> bool: + if context.mode == 'OBJECT': + return True + cls.poll_message_set("Pull is only avaliable in Object Mode") + return False + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + sync_invoke(self, context) + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + self.layout.prop(self, "save") + sync_draw(self, context) + + def execute(self, context: bpy.types.Context): + if self.save: + save_images() + bpy.ops.wm.save_mainfile() + # Find current task Layer + sync_execute_update_ownership(self, context) + sync_execute_prepare_sync(self, context) + sync_execute_pull(self, context) + return {'FINISHED'} + + +class ASSETPIPE_OT_sync_push(bpy.types.Operator): + bl_idname = "assetpipe.sync_push" + bl_label = "Push from Publish" + bl_description = """Push the current Task Layer to the published sync target""" + + _temp_transfer_data = None + _invalid_objs = [] + _other_ids = [] + _temp_dir: Path = None + _current_file: Path = None + _task_layer_key: str = "" + _sync_target: Path = None + + expand: bpy.props.BoolProperty( + name="Show New Transferable Data", + default=False, + description="Show New Transferable Data", + ) + pull: bpy.props.BoolProperty( + name="Pull before Pushing", + default=True, + description="Pull in any new data from the Published file before Pushing", + ) + + save: bpy.props.BoolProperty( + name="Save File & Images", + default=True, + description="Save Current File and Images before Push", + ) + + @classmethod + def poll(cls, context: bpy.types.Context) -> bool: + if context.mode == 'OBJECT': + return True + cls.poll_message_set("Push is only avaliable in Object Mode") + return False + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + sync_invoke(self, context) + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + prefs = get_addon_prefs() + if prefs.is_advanced_mode: + self.layout.prop(self, "pull") + self.layout.prop(self, "save") + sync_draw(self, context) + + def execute(self, context: bpy.types.Context): + if self.save: + save_images() + bpy.ops.wm.save_mainfile() + + # Find current task Layer + sync_execute_update_ownership(self, context) + sync_execute_prepare_sync(self, context) + + if self.pull: + sync_execute_pull(self, context) + bpy.ops.wm.save_mainfile(filepath=self._current_file.__str__()) + + sync_execute_push(self, context) + return {'FINISHED'} + + +class ASSETPIPE_OT_publish_new_version(bpy.types.Operator): + bl_idname = "assetpipe.publish_new_version" + bl_label = "Publish New Version" + bl_description = """Create a new Published Version in the Publish Area""" + + publish_types: bpy.props.EnumProperty( + name="Type", + items=constants.PUBLISH_TYPES, + ) + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + layout = self.layout + layout.prop(self, "publish_types") + + def execute(self, context: bpy.types.Context): + if bpy.data.is_dirty: + self.report( + {'ERROR'}, + "Please save the current file and/or Pull from last publish before creating new Publish", + ) + return {'CANCELLED'} + + current_file = Path(bpy.data.filepath) + + if self.publish_types == constants.ACTIVE_PUBLISH_KEY: + context.scene.asset_pipeline.asset_collection.asset_mark() + + push_targets = find_all_published( + Path(bpy.data.filepath), constants.ACTIVE_PUBLISH_KEY + ) + + for file in push_targets: + file_path = Path(file.__str__()) + + bpy.ops.wm.open_mainfile(filepath=file_path.__str__()) + + # Clear old Assets + context.scene.asset_pipeline.asset_collection.asset_clear() + bpy.ops.wm.save_as_mainfile(filepath=file_path.__str__()) + + # Re-open Current File to use as source for Publish + bpy.ops.wm.open_mainfile(filepath=current_file.__str__()) + new_file_path = get_next_published_file(current_file, self.publish_types) + + # Save Latest Publish File & Mark as Asset + context.scene.asset_pipeline.asset_collection.asset_mark() + bpy.ops.wm.save_as_mainfile(filepath=new_file_path.__str__(), copy=True) + context.scene.asset_pipeline.asset_collection.asset_clear() + return {'FINISHED'} + + +class ASSETPIPE_OT_reset_ownership(bpy.types.Operator): + bl_idname = "assetpipe.reset_ownership" + bl_label = "Reset Ownership" + bl_description = ( + """Reset the Object owner and Transferable Data on selected object(s)""" + ) + + @classmethod + def poll(cls, context: bpy.types.Context) -> bool: + if len(context.selected_objects) == 0: + cls.poll_message_set("No Objects Selected") + return False + return True + + def execute(self, context: bpy.types.Context): + objs = context.selected_objects + for obj in objs: + obj.asset_id_owner = "NONE" + obj.transfer_data_ownership.clear() + self.report( + {'INFO'}, + f"'{obj.name}' ownership data cleared ", + ) + return {'FINISHED'} + + +class ASSETPIPE_OT_update_local_task_layers(bpy.types.Operator): + bl_idname = "assetpipe.update_local_task_layers" + bl_label = "Update Local Task Layers" + bl_description = """Change the Task Layers that are Local to your file""" + + @classmethod + def poll(cls, context: bpy.types.Context) -> bool: + asset_pipe = context.scene.asset_pipeline + new_local_tl = [ + tl.name for tl in asset_pipe.all_task_layers if tl.is_local == True + ] + local_tl = [tl.name for tl in asset_pipe.local_task_layers] + if new_local_tl == local_tl: + cls.poll_message_set("Local Task Layers already match current selection") + return False + return True + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + layout = self.layout + layout.alert = True + layout.label( + text="Caution, this only affects current file.", + icon="ERROR", + ) + layout.label( + text="Two files owning the same task layer can break merge process." + ) + + def execute(self, context: bpy.types.Context): + asset_pipe = context.scene.asset_pipeline + all_task_layers = asset_pipe.all_task_layers + local_tl = [tl.name for tl in all_task_layers if tl.is_local == True] + asset_pipe.set_local_task_layers(local_tl) + return {'FINISHED'} + + +class ASSETPIPE_OT_revert_file(bpy.types.Operator): + bl_idname = "assetpipe.revert_file" + bl_label = "Revert File" + bl_description = ( + """Revert File to Pre-Sync State. Revert will not affect Published files""" + ) + + _temp_file = "" + _source_file = "" + + def execute(self, context: bpy.types.Context): + asset_pipe = context.scene.asset_pipeline + self._temp_file = asset_pipe.temp_file + self._source_file = asset_pipe.source_file + + if not Path(self._temp_file).exists(): + self.report( + {'ERROR'}, + "Revert failed; no file found", + ) + return {'CANCELLED'} + + bpy.ops.wm.open_mainfile(filepath=self._temp_file) + bpy.ops.wm.save_as_mainfile(filepath=self._source_file) + return {'FINISHED'} + + +class ASSETPIPE_OT_fix_prefixes(bpy.types.Operator): + bl_idname = "assetpipe.fix_prefixes" + bl_label = "Fix Prefixes" + bl_description = """Fix Prefixes for Modifiers and Constraints so they match Transferable Data Owner on selected object(s)""" + + _updated_prefix = False + + @classmethod + def poll(cls, context: bpy.types.Context) -> bool: + if len(context.selected_objects) == 0: + cls.poll_message_set("No Objects Selected") + return False + return True + + def execute(self, context: bpy.types.Context): + objs = context.selected_objects + for obj in objs: + transfer_data_items = obj.transfer_data_ownership + for transfer_data_item in transfer_data_items: + if task_layer_prefix_transfer_data_update(transfer_data_item): + self.report( + {'INFO'}, + f"Renamed {transfer_data_item.type} on '{obj.name}'", + ) + self._updated_prefix = True + + if not self._updated_prefix: + self.report( + {'WARNING'}, + f"No Prefixes found to update", + ) + + return {'FINISHED'} + + +class ASSETPIPE_OT_update_surrendered_object(bpy.types.Operator): + bl_idname = "assetpipe.update_surrendered_object" + bl_label = "Claim Surrendered" + bl_description = """Claim Surrended Object Owner""" + + _obj = None + _old_onwer = "" + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + self._obj = context.active_object + self._old_onwer = self._obj.asset_id_owner + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + layout = self.layout + row = layout.row() + + draw_task_layer_selection( + layout=row, + data=self._obj, + show_all_task_layers=False, + show_local_task_layers=True, + ) + + def execute(self, context: bpy.types.Context): + if self._obj.asset_id_owner == self._old_onwer: + self.report( + {'ERROR'}, + f"Object Owner was not updated", + ) + return {'CANCELLED'} + self._obj.asset_id_surrender = False + return {'FINISHED'} + + +class ASSETPIPE_OT_update_surrendered_transfer_data(bpy.types.Operator): + bl_idname = "assetpipe.update_surrendered_transfer_data" + bl_label = "Claim Surrendered" + bl_description = """Claim Surrended Transferable Data Owner""" + + transfer_data_item_name: bpy.props.StringProperty( + name="Transferable Data Item Name" + ) + + _surrendered_transfer_data = None + _old_onwer = "" + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + obj = context.active_object + for transfer_data_item in obj.transfer_data_ownership: + if transfer_data_item.name == self.transfer_data_item_name: + self._surrendered_transfer_data = transfer_data_item + self._old_onwer = self._surrendered_transfer_data.owner + print(f"Found Surrended Item: {self._surrendered_transfer_data.name}") + return context.window_manager.invoke_props_dialog(self, width=400) + + def draw(self, context: bpy.types.Context): + layout = self.layout + row = layout.row() + + draw_task_layer_selection( + layout=row, + data=self._surrendered_transfer_data, + show_local_task_layers=True, + ) + + def execute(self, context: bpy.types.Context): + if self._surrendered_transfer_data.owner == self._old_onwer: + self.report( + {'ERROR'}, + f"Transferable Data Owner was not updated", + ) + return {'CANCELLED'} + self._surrendered_transfer_data.surrender = False + task_layer_prefix_transfer_data_update(self._surrendered_transfer_data) + return {'FINISHED'} + + +class ASSETPIPE_OT_batch_ownership_change(bpy.types.Operator): + bl_idname = "assetpipe.batch_ownership_change" + bl_label = "Batch Set Ownership" + bl_description = """Re-Assign Ownership in a batch operation""" + + name_filter: bpy.props.StringProperty( + name="Filter by Name", + description="Filter Object or Transferable Data items by name", + default="", + ) + + data_source: bpy.props.EnumProperty( + name="Objects", + items=( + ('SELECT', "Selected", "Update Selected Objects Only"), + ('ALL', "All", "Update All Objects"), + ), + ) + + data_type: bpy.props.EnumProperty( + name="Ownership Type", + items=( + ( + 'OBJECT', + "Object", + "Update Owner of Objects", + ), + ( + 'TRANSFER_DATA', + "Transferable Data", + "Update Owner of Transferable Data within Objects", + ), + ), + ) + + filter_owners: bpy.props.EnumProperty( + name="Owner Filter", + items=( + ('LOCAL', "If Locally Owned", "Only data that is owned locally"), + ('OWNED', "If Owned By Any", "Only data that already have assignment"), + ('ALL', "No Filter", "Set Ownership on any data, even without an owner"), + ), + ) + + avaliable_owners: bpy.props.EnumProperty( + name="Avaliable Owners", + items=( + ('LOCAL', "Local Task Layers", "Only show local task layers as options"), + ( + 'ALL', + "All Task Layers", + "Show all task layers as options", + ), + ), + ) + + transfer_data_type: bpy.props.EnumProperty( + name="Type Filter", items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS + ) + owner_selection: bpy.props.StringProperty(name="Set Owner") + surrender_selection: bpy.props.BoolProperty( + name="Set Surrender", + default=False, + description="Surrender can only be set on objects/Transferable Data that are locally owned. Ownership cannot be changed while surrendering", + ) + + def _filter_by_name(self, context, unfiltered_list: []): + if self.name_filter == "": + return unfiltered_list + return [item for item in unfiltered_list if self.name_filter in item.name] + + def _get_transfer_data_to_update(self, context): + asset_pipe = context.scene.asset_pipeline + objs = self._get_objects(context) + transfer_data_items_to_update = [] + if self.data_type == "TRANSFER_DATA": + for obj in objs: + filtered_transfer_data = self._filter_by_name( + context, obj.transfer_data_ownership + ) + for transfer_data_item in filtered_transfer_data: + if self.transfer_data_type != "NONE": + if transfer_data_item.type == self.transfer_data_type: + transfer_data_items_to_update.append(transfer_data_item) + else: + transfer_data_items_to_update.append(transfer_data_item) + if self.filter_owners == "LOCAL": + return [ + item + for item in transfer_data_items_to_update + if item.owner in asset_pipe.get_local_task_layers() + ] + return transfer_data_items_to_update + + def _get_objects(self, context): + asset_objs = context.scene.asset_pipeline.asset_collection.all_objects + selected_asset_objs = [ + obj for obj in asset_objs if obj in context.selected_objects + ] + return asset_objs if self.data_source == "ALL" else selected_asset_objs + + def _get_filtered_objects(self, context): + asset_pipe = context.scene.asset_pipeline + objs = self._get_objects(context) + if self.filter_owners == "LOCAL" and self.data_type == "OBJECT": + return [ + item + for item in self._filter_by_name(context, objs) + if item.asset_id_owner in asset_pipe.get_local_task_layers() + ] + if self.filter_owners == "OWNED" and self.data_type == "OBJECT": + return [ + item + for item in self._filter_by_name(context, objs) + if item.asset_id_owner != "NONE" + ] + return self._filter_by_name(context, objs) + + def invoke(self, context: bpy.types.Context, event: bpy.types.Event): + if not get_addon_prefs().is_advanced_mode: + self.filter_owners = 'LOCAL' + self.avaliable_owners = 'LOCAL' + return context.window_manager.invoke_props_dialog(self, width=500) + + def draw(self, context: bpy.types.Context): + prefs = get_addon_prefs() + advanced_mode = prefs.is_advanced_mode + grey_out = True + if self.surrender_selection and self.data_type == "TRANSFER_DATA": + grey_out = False + self.filter_owners = "LOCAL" + + layout = self.layout + layout.use_property_split = True + layout.row(align=True).prop(self, "data_source", expand=True) + + layout.prop(self, "data_type", expand=True) + + filter_owner_row = layout.row() + filter_owner_row.enabled = grey_out + if advanced_mode: + filter_owner_row.prop(self, "filter_owners") + + if self.data_type == "TRANSFER_DATA": + layout.prop(self, "transfer_data_type") + layout.prop(self, "name_filter", text="Name Filter") + + if self.avaliable_owners == "LOCAL": + show_local = True + show_all_task_layers = False + else: + show_local = False + show_all_task_layers = True + + layout.separator() + + owner_row = layout.row(align=True) + owner_row.enabled = grey_out + + draw_task_layer_selection( + layout=owner_row, + data=self, + data_owner_name='owner_selection', + current_data_owner=self.owner_selection, + show_all_task_layers=show_all_task_layers, + show_local_task_layers=show_local, + text="Set To", + ) + + if advanced_mode: + owner_row.prop(self, "avaliable_owners", text="") + + if self.data_type == "TRANSFER_DATA": + layout.prop(self, "surrender_selection", expand=True) + + objs = self._get_filtered_objects(context) + if self.data_type == "OBJECT": + data_type_name = "Object(s)" + length = len(objs) if objs else 0 + else: + transfer_data_items_to_update = self._get_transfer_data_to_update(context) + data_type_name = "Transferable Data Item(s)" + length = ( + len(transfer_data_items_to_update) + if transfer_data_items_to_update + else 0 + ) + bottom_label = layout.row() + bottom_label_split = bottom_label.split(factor=0.4) + bottom_label_split.row() + bottom_label_split.label(text=f"Change Ownership on {length} {data_type_name}") + + def execute(self, context: bpy.types.Context): + asset_pipe = context.scene.asset_pipeline + objs = self._get_filtered_objects(context) + + if self.data_type == "OBJECT": + for obj in objs: + obj.asset_id_owner = self.owner_selection + else: + transfer_data_items_to_update = self._get_transfer_data_to_update(context) + + for transfer_data_item_to_update in transfer_data_items_to_update: + if self.surrender_selection: + if ( + transfer_data_item_to_update.owner + in asset_pipe.get_local_task_layers() + ): + transfer_data_item_to_update.surrender = True + continue + transfer_data_item_to_update.owner = self.owner_selection + task_layer_prefix_transfer_data_update(transfer_data_item_to_update) + + return {'FINISHED'} + + +classes = ( + ASSETPIPE_OT_update_ownership, + ASSETPIPE_OT_sync_push, + ASSETPIPE_OT_sync_pull, + ASSETPIPE_OT_publish_new_version, + ASSETPIPE_OT_create_new_asset, + ASSETPIPE_OT_reset_ownership, + ASSETPIPE_OT_update_local_task_layers, + ASSETPIPE_OT_revert_file, + ASSETPIPE_OT_fix_prefixes, + ASSETPIPE_OT_update_surrendered_object, + ASSETPIPE_OT_update_surrendered_transfer_data, + ASSETPIPE_OT_batch_ownership_change, +) + + +def register(): + for i in classes: + bpy.utils.register_class(i) + + +def unregister(): + for i in classes: + bpy.utils.unregister_class(i) diff --git a/scripts-blender/addons/asset_pipeline/prefs.py b/scripts-blender/addons/asset_pipeline/prefs.py index 1ee2699e..d13789f0 100644 --- a/scripts-blender/addons/asset_pipeline/prefs.py +++ b/scripts-blender/addons/asset_pipeline/prefs.py @@ -1,94 +1,48 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import logging -from typing import Optional, Any, Set, Tuple, List -from pathlib import Path - -import bpy - - -logger = logging.getLogger(name="BSP") - - -class BSP_addon_preferences(bpy.types.AddonPreferences): - - bl_idname = __package__ - - def get_prod_task_layers_module_path(self) -> str: - if not self.prod_config_dir: - return "" - - return Path(self.prod_config_dir).joinpath("task_layers.py").as_posix() - - prod_config_dir: bpy.props.StringProperty( # type: ignore - name="Production Config Directory", - default="", - subtype="DIR_PATH", - ) - - prod_task_layers_module: bpy.props.StringProperty( # type: ignore - name="Production Task Layers Module", - default="", - get=get_prod_task_layers_module_path, - ) - - def is_prod_task_layers_module_path_valid(self) -> bool: - path = self.get_prod_task_layers_module_path() - if not path: - return False - - if not Path(path).exists(): - return False - return True - - def draw(self, context: bpy.types.Context) -> None: - layout: bpy.types.UILayout = self.layout - - # Production Settings. - box = layout.box() - box.label(text="Production", icon="FILEBROWSER") - - # Production Config Dir. - row = box.row(align=True) - row.prop(self, "prod_config_dir") - - # Production Task Layers Module. - icon = "NONE" - row = box.row(align=True) - - if not self.is_prod_task_layers_module_path_valid(): - icon = "ERROR" - - row.prop(self, "prod_task_layers_module", icon=icon) - - -# ----------------REGISTER--------------. - -classes = [BSP_addon_preferences] - - -def register() -> None: - for cls in classes: - bpy.utils.register_class(cls) - - -def unregister() -> None: - for cls in reversed(classes): - bpy.utils.unregister_class(cls) +import bpy +from . import constants + + +def get_addon_prefs(): + return bpy.context.preferences.addons[constants.ADDON_NAME].preferences + + +class ASSET_PIPELINE_addon_preferences(bpy.types.AddonPreferences): + bl_idname = __package__ + + custom_task_layers_dir: bpy.props.StringProperty( # type: ignore + name="Custom Task Layers", + description="Specify directory to add additonal Task Layer Presets to use as templates when cerating new assets", + default="", + subtype="DIR_PATH", + ) + + save_images_path: bpy.props.StringProperty( # type: ignore + name="Save Images Path", + description="Path to save un-saved images to, if left blank images will save in a called 'images' folder relative to the asset", + default="", + subtype="DIR_PATH", + ) + + is_advanced_mode: bpy.props.BoolProperty( + name="Advanced Mode", + description="Show Advanced Options in Asset Pipeline Panels", + default=False, + ) + + def draw(self, context): + self.layout.prop(self, "custom_task_layers_dir") + self.layout.prop(self, "save_images_path") + self.layout.prop(self, "is_advanced_mode") + + +classes = (ASSET_PIPELINE_addon_preferences,) + + +def register(): + for cls in classes: + bpy.utils.register_class(cls) + + +def unregister(): + for cls in reversed(classes): + bpy.utils.unregister_class(cls) diff --git a/scripts-blender/addons/asset_pipeline/prop_utils.py b/scripts-blender/addons/asset_pipeline/prop_utils.py deleted file mode 100644 index 482e3540..00000000 --- a/scripts-blender/addons/asset_pipeline/prop_utils.py +++ /dev/null @@ -1,37 +0,0 @@ -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -# - -from typing import List, Dict, Union, Any, Optional, Tuple, Generator - -import bpy - - -def get_property_group_items( - property_group: bpy.types.PropertyGroup, -) -> Generator[Tuple[str, bpy.types.Property], None, None]: - - for i in range(len(property_group.bl_rna.properties.items())): - item = property_group.bl_rna.properties.items()[i] - iname, iprop = item - - if iname in ["rna_type", "bl_rna", "name"]: - continue - - yield item diff --git a/scripts-blender/addons/asset_pipeline/props.py b/scripts-blender/addons/asset_pipeline/props.py index 60f24870..1a5a1ffa 100644 --- a/scripts-blender/addons/asset_pipeline/props.py +++ b/scripts-blender/addons/asset_pipeline/props.py @@ -1,445 +1,174 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import os -from typing import Optional, Dict, Any, List, Tuple - -from pathlib import Path - import bpy +from typing import List +from . import constants +from .config import get_task_layer_presets_path +from pathlib import Path +from .prefs import get_addon_prefs -try: - from .util import is_addon_active - import blender_kitsu.cache - kitsu_available = True -except: - kitsu_available = False -from . import constants, builder, asset_files, lib_util -from .builder.metadata import MetadataAsset, MetadataTaskLayer -from .asset_files import AssetPublish - -import logging - -logger = logging.getLogger("BSP") +""" NOTE Items in these properties groups should be generated by a function that finds the +avaliable task layers from the task_layer.json file that needs to be created. +""" -class FailedToGetAssetPublish(Exception): - pass +def get_task_layer_presets(self, context): + prefs = get_addon_prefs() + user_tls = Path(prefs.custom_task_layers_dir) + + presets_dir = get_task_layer_presets_path() + items = [] + + for file in presets_dir.glob('*.json'): + items.append((file.__str__(), file.name.replace(".json", ""), file.name)) + if user_tls.exists() and user_tls.is_dir(): + for file in user_tls.glob('*.json'): + items.append((file.__str__(), file.name.replace(".json", ""), file.name)) + return items -class BSP_ASSET_asset_collection(bpy.types.PropertyGroup): - """ - Collection Properties for Blender Studio Asset Collections - """ +class AssetTransferData(bpy.types.PropertyGroup): + """Properties to track transferable data on an object""" - # Global is asset identifier. - is_asset: bpy.props.BoolProperty( # type: ignore - name="Is Asset", + owner: bpy.props.StringProperty(name="Owner", default="NONE") + type: bpy.props.EnumProperty( + name="Transferable Data Type", + items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS, + ) + surrender: bpy.props.BoolProperty(name="Surrender Ownership", default=False) + + +class AssetTransferDataTemp(bpy.types.PropertyGroup): + """Class used when finding new ownership data so it can be drawn + with the same method as the existing ownership data from ASSET_TRANSFER_DATA""" + + owner: bpy.props.StringProperty(name="OwneAr", default="NONE") + type: bpy.props.EnumProperty( + name="Transferable Data Type", + items=constants.TRANSFER_DATA_TYPES_ENUM_ITEMS, + ) + surrender: bpy.props.BoolProperty(name="Surrender Ownership", default=False) + obj: bpy.props.PointerProperty(type=bpy.types.Object) + + +class TaskLayerSettings(bpy.types.PropertyGroup): + is_local: bpy.props.BoolProperty(name="Task Layer is Local", default=False) + + +class AssetPipeline(bpy.types.PropertyGroup): + """Properties to manage the status of asset pipeline files""" + + is_asset_pipeline_file: bpy.props.BoolProperty( + name="Asset Pipeline File", + description="Asset Pipeline Files are used in the asset pipeline, if file is not asset pipeline file user will be prompted to create a new asset", default=False, - description="Controls if this Collection is recognized as an official Asset", + ) + is_depreciated: bpy.props.BoolProperty( + name="Depreciated", + description="Depreciated files do not recieve any updates when syncing from a task layer", + default=False, + ) + asset_collection: bpy.props.PointerProperty( + type=bpy.types.Collection, + name="Asset", + description="Top Level Collection of the Asset, all other collections of the asset will be children of this collection", ) - # Asset identification properties. - # We use entity_ prefix as blender uses .id as built in attribute already, which - # might be confusing. - entity_parent_id: bpy.props.StringProperty(name="Asset Type ID") # type: ignore - entity_parent_name: bpy.props.StringProperty(name="Asset Type") # type: ignore - entity_name: bpy.props.StringProperty(name="Asset Name") # type: ignore - entity_id: bpy.props.StringProperty(name="Asset ID") # type: ignore - project_id: bpy.props.StringProperty(name="Project ID") # type: ignore + temp_transfer_data: bpy.props.CollectionProperty(type=AssetTransferDataTemp) - # For Asset Publish. - is_publish: bpy.props.BoolProperty( # type: ignore - name="Is Publish", - description="Controls if this Collection is an Asset Publish to distinguish it from a 'working' Collection", - ) - version: bpy.props.StringProperty(name="Asset Version") # type: ignore - publish_path: bpy.props.StringProperty(name="Asset Publish") # type: ignore + def add_temp_transfer_data(self, name, owner, type, obj, surrender): + new_transfer_data = self.temp_transfer_data + transfer_data_item = new_transfer_data.add() + transfer_data_item.name = name + transfer_data_item.owner = owner + transfer_data_item.type = type + transfer_data_item.obj = obj + transfer_data_item.surrender = surrender - # Other properties, useful for external scripts. - rig: bpy.props.PointerProperty(type=bpy.types.Armature, name="Rig") # type: ignore + ## NEW FILE - # Metadata for Asset Builder. - transfer_suffix: bpy.props.StringProperty(name="Transfer Suffix") # type: ignore - - # Display properties that can't be set by User in UI. - displ_entity_name: bpy.props.StringProperty(name="Asset Name", get=lambda self: self.entity_name) # type: ignore - displ_entity_id: bpy.props.StringProperty(name="Asset ID", get=lambda self: self.entity_id) # type: ignore - - displ_is_publish: bpy.props.BoolProperty(name="Is Publish", get=lambda self: self.is_publish) # type: ignore - displ_version: bpy.props.StringProperty(name="Asset Version", get=lambda self: self.version) # type: ignore - displ_publish_path: bpy.props.StringProperty(name="Asset Path", get=lambda self: self.publish_path) # type: ignore - - def clear(self) -> None: - """ - Gets called when uninitializing an Asset Collection for example. - """ - - self.is_asset = False - - self.entity_parent_id = "" - self.entity_parent_name = "" - self.entity_name = "" - self.entity_id = "" - self.project_id = "" - - self.is_publish = False - self.version = "" - - self.rig = None - - self.transfer_suffix = "" - - def gen_metadata_class(self) -> MetadataAsset: - # These keys represent all mandatory arguments for the data class metadata.MetaAsset - # The idea is, to be able to construct a MetaAsst from this dict. - # Note: This function will most likely only be called when creating the first asset version - # to get some data to start with. - keys = [ - "entity_name", - "entity_id", - "entity_parent_id", - "entity_parent_name", - "project_id", - "version", - ] - d = {} - for key in keys: - - # MetaAsset tries to mirror Kitsu data structure as much as possible. - # Remove entity_ prefix. - if key.startswith("entity_"): - d[key.replace("entity_", "")] = getattr(self, key) - else: - d[key] = getattr(self, key) - - # Set status to default asset status. - d["status"] = constants.DEFAULT_ASSET_STATUS - return MetadataAsset.from_dict(d) - - def update_props_by_asset_publish(self, asset_publish: AssetPublish) -> None: - self.is_publish = True - self.version = asset_publish.get_version() - self.status = asset_publish.metadata.meta_asset.status.name - - def get_asset_publish(self) -> AssetPublish: - if not self.is_publish: - raise FailedToGetAssetPublish( - f"The collection {self.id_data.name} is not an asset publish" - ) - - # Will throw error if item is not lib. - lib = lib_util.get_item_lib(self.id_data) - - return AssetPublish(Path(os.path.abspath(bpy.path.abspath(lib.filepath)))) - - -class BSP_task_layer(bpy.types.PropertyGroup): - - """ - Property Group that can represent a minimal TaskLayer. - Note: It misses properties compared to MetadataTaskLayer class, contains only the ones - needed for internal use. Also contains 'use' attribute to avoid creating a new property group - to mimic more the TaskLayer TaskLayerConfig setup. - Is used in BSP_ASSET_scene_properties as collection property. - """ - - task_layer_id: bpy.props.StringProperty( # type: ignore - name="Task Layer ID", - description="Unique Key that is used to query a Task Layer in TaskLayerAssembly.get_task_layer_config()", - ) - task_layer_name: bpy.props.StringProperty( # type: ignore - name="Task Layer Name", - ) - - is_locked: bpy.props.BoolProperty( # type: ignore - name="Is Locked", - ) - - use: bpy.props.BoolProperty( # type: ignore - name="Use", - ) - - def as_dict(self) -> Dict[str, Any]: - return { - "use": self.use, - "is_locked": self.is_locked, - "task_layer_id": self.task_layer_id, - "task_layer_name": self.task_layer_name, - } - - -class BSP_asset_file(bpy.types.PropertyGroup): - - """ - Property Group that can represent a minimal version of an Asset File. - """ - - path_str: bpy.props.StringProperty( # type: ignore - name="Path", - ) - task_layers: bpy.props.CollectionProperty(type=BSP_task_layer) # type: ignore - - status: bpy.props.StringProperty(name="Status") # type: ignore - - returncode_publish: bpy.props.IntProperty( - name="Return Code", - description=( - "This code represents the return code of the subprocess that gets " - "started when publishing. Is used to display a warning in UI if something went wrong" + new_file_mode: bpy.props.EnumProperty( + name="New File Mode", + items=( + ('KEEP', "Current File", "Setup the Existing File/Directory as an Asset"), + ('BLANK', "Blank File", "Create a New Blank Asset in a New Directory"), ), - default=-1, ) - @property - def path(self) -> Optional[Path]: - if not self.path_str: - return None - return Path(self.path_str) - - def as_dict(self) -> Dict[str, Any]: - return {"path": self.path} - - def add_task_layer_from_metaclass(self, metadata_task_layer: MetadataTaskLayer): - item = self.task_layers.add() - # TODO: could be made more procedural. - item.task_layer_id = metadata_task_layer.id - item.task_layer_name = metadata_task_layer.name - item.is_locked = metadata_task_layer.is_locked - - def update_props_by_asset_publish(self, asset_publish: AssetPublish) -> None: - self.name = asset_publish.path.name - self.path_str = asset_publish.path.as_posix() - self.status = asset_publish.metadata.meta_asset.status.name - - # Clear task layers. - self.task_layers.clear() - - # Add task layers. - for tl in asset_publish.metadata.meta_task_layers: - self.add_task_layer_from_metaclass(tl) - - -class BSP_ASSET_imported_asset_collection(bpy.types.PropertyGroup): - - # XXX: This is not a pointer due to a bug where disabled/excluded collections - # that have a pointer from the scene cause them to be partially evaluated. - collection_name: bpy.props.StringProperty(name="Collection Name", description="Name of the imported asset collection") # type: ignore - @property - def collection(self): - return bpy.data.collections.get(self.collection_name) - @collection.setter - def collection(self, value): - self.collection_name = value.name - - asset_publishes: bpy.props.CollectionProperty(type=BSP_asset_file) # type: ignore - - def get_asset_publishes_as_bl_enum( - self, context: bpy.types.Context - ) -> List[Tuple[str, str, str]]: - return [ - (p.name, asset_files.get_file_version(p.path), "") - for p in self.asset_publishes - ] - - target_publish: bpy.props.EnumProperty(items=get_asset_publishes_as_bl_enum) # type: ignore - - -class BSP_undo_context(bpy.types.PropertyGroup): - - """ """ - - files_created: bpy.props.CollectionProperty(type=BSP_asset_file) # type: ignore - - def add_step_asset_publish_create(self, asset_publish: AssetPublish) -> None: - item = self.files_created.add() - item.name = asset_publish.path.name - item.path_str = asset_publish.path.as_posix() - - def clear(self): - self.files_created.clear() - - -class BSP_task_layer_lock_plan(bpy.types.PropertyGroup): - - """ - Property Group that can represent a minimal version of a TaskLayerLockPlan. - """ - - path_str: bpy.props.StringProperty( # type: ignore - name="Path", + dir: bpy.props.StringProperty( + name="Directory", + description="Target Path for new asset files", + subtype="DIR_PATH", ) - task_layers: bpy.props.CollectionProperty(type=BSP_task_layer) # type: ignore + name: bpy.props.StringProperty(name="Name", description="Name for new Asset") - @property - def path(self) -> Optional[Path]: - if not self.path_str: - return None - return Path(self.path_str) - - -class BSP_ASSET_scene_properties(bpy.types.PropertyGroup): - """Scene Properties for Asset Pipeline""" - - def update_asset_collection(self, context): - """There should only be one asset collection per file, so before - initializing another asset collection, wipe any asset collection - data in the entire file. - """ - - for coll in bpy.data.collections: - # Clear Asset Collection attributes. - coll.bsp_asset.clear() - - if not self.asset_collection: - return - - bsp_asset = self.asset_collection.bsp_asset - bsp_asset.entity_name = self.asset_collection.name.split("-")[-1].title() - - # Unitialize Asset Context. - builder.ASSET_CONTEXT = None - - if kitsu_available and is_addon_active("blender_kitsu", context): - # Get active asset. - asset = blender_kitsu.cache.asset_active_get() - asset_type = blender_kitsu.cache.asset_type_active_get() - - if asset: - # Set Asset Collection attributes. - bsp_asset.is_asset = True - bsp_asset.entity_id = asset.id - bsp_asset.entity_name = asset.name - bsp_asset.project_id = asset.project_id - bsp_asset.entity_parent_id = asset_type.id - bsp_asset.entity_parent_name = asset_type.name - - logger.info( - f"Initiated Collection: {self.asset_collection.name} as Kitsu Asset: {asset.name}" - ) - - logger.info(f"Initiated Collection: {self.asset_collection.name}") - - # Init Asset Context. - if bpy.ops.bsp_asset.create_asset_context.poll(): - bpy.ops.bsp_asset.create_asset_context() - - # asset_collection: bpy.props.PointerProperty( - # type=bpy.types.Collection, - # name="Asset Collection", - # update=update_asset_collection, - # ) - @property - def asset_collection(self): - return bpy.data.collections.get(self.asset_collection_name) - - @asset_collection.setter - def asset_collection(self, value): - self.asset_collection_name = value.name - - asset_collection_name: bpy.props.StringProperty(name="Asset Collection", update=update_asset_collection) - - is_publish_in_progress: bpy.props.BoolProperty() # type: ignore - are_task_layers_pushed: bpy.props.BoolProperty() # type: ignore - - task_layers_push: bpy.props.CollectionProperty(type=BSP_task_layer) # type: ignore - task_layers_pull: bpy.props.CollectionProperty(type=BSP_task_layer) # type: ignore - - def task_layers(self, context): - return ( - [(tl.name, tl.name, tl.name) for tl in builder.PROD_CONTEXT.task_layers] - if builder.PROD_CONTEXT - else [] - ) - - asset_publishes: bpy.props.CollectionProperty(type=BSP_asset_file) # type: ignore - - task_layers_push_index: bpy.props.IntProperty(name="Task Layers Owned Index", min=0) # type: ignore - task_layers_pull_index: bpy.props.IntProperty(name="Task Layers Pull Index", min=0) # type: ignore - asset_publishes_index: bpy.props.IntProperty(name="Asset Publishes Index", min=0) # type: ignore - task_layer_lock_plans_index: bpy.props.IntProperty(name="Task Layer Lock Plans Index", min=0) # type: ignore - - undo_context: bpy.props.PointerProperty(type=BSP_undo_context) # type: ignore - - task_layer_lock_plans: bpy.props.CollectionProperty(type=BSP_task_layer_lock_plan) # type: ignore - - imported_asset_collections: bpy.props.CollectionProperty(type=BSP_ASSET_imported_asset_collection) # type: ignore - imported_asset_collections_index: bpy.props.IntProperty(min=0) # type: ignore - - -def get_asset_publish_source_path(context: bpy.types.Context) -> str: - if not builder.ASSET_CONTEXT: - return "" - - if not builder.ASSET_CONTEXT.asset_publishes: - return "" - - return builder.ASSET_CONTEXT.asset_publishes[-1].path.name - - -class BSP_ASSET_tmp_properties(bpy.types.PropertyGroup): - - # Asset publish source - asset_publish_source_path: bpy.props.StringProperty( # type: ignore - name="Source", get=get_asset_publish_source_path + prefix: bpy.props.StringProperty( + name="Prefix", description="Prefix for new Asset", default="" ) - new_asset_version: bpy.props.BoolProperty( # type: ignore - name="New Version", - description="Controls if new Version should be created when starting the publish", + task_layer_config_type: bpy.props.EnumProperty( + name="Task Layer Preset", + items=get_task_layer_presets, + ) + + temp_file: bpy.props.StringProperty(name="Pre-Sync Backup") + source_file: bpy.props.StringProperty(name="File that started Sync") + sync_error: bpy.props.BoolProperty(name="Sync Error", default=False) + + all_task_layers: bpy.props.CollectionProperty(type=TaskLayerSettings) + local_task_layers: bpy.props.CollectionProperty(type=TaskLayerSettings) + + def set_local_task_layers(self, task_layer_keys: List[str]): + # Update Local Task Layers for New File + self.local_task_layers.clear() + for task_layer in self.all_task_layers: + if task_layer.name in task_layer_keys: + new_local_task_layer = self.local_task_layers.add() + new_local_task_layer.name = task_layer.name + + def get_local_task_layers(self): + return [task_layer.name for task_layer in self.local_task_layers] + + # UI BOOLS: used to show/hide Transferable Data elements + # The names are also hard coded in constants.py under TRANSFER_DATA_TYPES + # any changes will need to be reflected both here and in that enum + group_vertex_ui_bool: bpy.props.BoolProperty( + name="Show/Hide Vertex Groups", default=False + ) + modifier_ui_bool: bpy.props.BoolProperty(name="Show/Hide Modifiers", default=False) + constraint_ui_bool: bpy.props.BoolProperty( + name="Show/Hide Constraints", default=False + ) + material_ui_bool: bpy.props.BoolProperty(name="Show/Hide Materials", default=False) + shapekey_ui_bool: bpy.props.BoolProperty(name="Show/Hide Shape Keys", default=False) + attribute_ui_bool: bpy.props.BoolProperty( + name="Show/Hide Attributes", default=False + ) + file_parent_ui_bool: bpy.props.BoolProperty(name="Show/Hide Parent", default=False) + + +classes = ( + AssetTransferData, + AssetTransferDataTemp, + TaskLayerSettings, + AssetPipeline, +) + + +def register(): + for i in classes: + bpy.utils.register_class(i) + bpy.types.Object.transfer_data_ownership = bpy.props.CollectionProperty( + type=AssetTransferData + ) + bpy.types.Scene.asset_pipeline = bpy.props.PointerProperty(type=AssetPipeline) + bpy.types.ID.asset_id_owner = bpy.props.StringProperty(name="Owner", default="NONE") + bpy.types.ID.asset_id_surrender = bpy.props.BoolProperty( + name="Surrender Ownership", default=False ) -# ----------------REGISTER--------------. - -classes = [ - BSP_task_layer, - BSP_asset_file, - BSP_undo_context, - BSP_ASSET_asset_collection, - BSP_task_layer_lock_plan, - BSP_ASSET_imported_asset_collection, - BSP_ASSET_scene_properties, - BSP_ASSET_tmp_properties, -] - - -def register() -> None: - for cls in classes: - bpy.utils.register_class(cls) - - # Collection Asset Pipeline Properties. - bpy.types.Collection.bsp_asset = bpy.props.PointerProperty( - type=BSP_ASSET_asset_collection - ) - - # Scene Asset Pipeline Properties. - bpy.types.Scene.bsp_asset = bpy.props.PointerProperty( - type=BSP_ASSET_scene_properties - ) - - # Window Manager Properties. - bpy.types.WindowManager.bsp_asset = bpy.props.PointerProperty( - type=BSP_ASSET_tmp_properties - ) - - -def unregister() -> None: - for cls in reversed(classes): - bpy.utils.unregister_class(cls) +def unregister(): + for i in classes: + bpy.utils.unregister_class(i) + del bpy.types.Object.transfer_data_ownership + del bpy.types.Scene.asset_pipeline + del bpy.types.ID.asset_id_owner diff --git a/scripts-blender/addons/asset_pipeline/sync.py b/scripts-blender/addons/asset_pipeline/sync.py new file mode 100644 index 00000000..c9c626e2 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/sync.py @@ -0,0 +1,187 @@ +import bpy +from pathlib import Path +from .merge.publish import ( + find_sync_target, + find_all_published, +) +from .merge.shared_ids import init_shared_ids +from .merge.core import ( + ownership_get, + ownership_set, + get_invalid_objects, + merge_task_layer, +) +from .merge.transfer_data.transfer_ui import draw_transfer_data +from .merge.shared_ids import get_shared_id_icon +from . import constants +from . import config +from .merge.task_layer import draw_task_layer_selection + + +def sync_poll(cls, context): + if any([img.is_dirty for img in bpy.data.images]): + cls.poll_message_set("Please save unsaved Images") + return False + if bpy.data.is_dirty: + cls.poll_message_set("Please save current .blend file") + return False + return True + + +def sync_invoke(self, context): + self._temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data + self._temp_transfer_data.clear() + self._invalid_objs.clear() + + asset_pipe = context.scene.asset_pipeline + local_col = asset_pipe.asset_collection + if not local_col: + self.report({'ERROR'}, "Top level collection could not be found") + return {'CANCELLED'} + # TODO Check if file contains a valid task layer + # task_layer_key = context.scene.asset_pipeline.task_layer_name + # if task_layer_key == "NONE": + # self.report({'ERROR'}, "Current File Name doesn't contain valid task layer") + # return {'CANCELLED'} + + ownership_get(local_col, context.scene) + + self._invalid_objs = get_invalid_objects(asset_pipe, local_col) + self._shared_ids = init_shared_ids(context.scene) + + +def sync_draw(self, context): + layout = self.layout + row = layout.row() + + if len(self._invalid_objs) != 0: + box = layout.box() + box.alert = True + box.label(text="Sync will clear Invalid Objects:", icon="ERROR") + for obj in self._invalid_objs: + box.label(text=obj.name, icon="OBJECT_DATA") + + if len(self._shared_ids) != 0: + box = layout.box() + box.label(text="New 'Shared IDs' found") + for id in self._shared_ids: + row = box.row() + row.label(text=id.name, icon=get_shared_id_icon(id)) + draw_task_layer_selection( + layout=row, + data=id, + ) + + if len(self._temp_transfer_data) == 0: + layout.label(text="No new local Transferable Data found") + else: + layout.label(text="New local Transferable Data will be Pushed to Publish") + row = layout.row() + row.prop(self, "expand", text="", icon="COLLAPSEMENU", toggle=False) + row.label(text="Show New Transferable Data") + objs = [transfer_data_item.obj for transfer_data_item in self._temp_transfer_data] + + if not self.expand: + return + + for obj in set(objs): + obj_ownership = [ + transfer_data_item + for transfer_data_item in self._temp_transfer_data + if transfer_data_item.obj == obj + ] + box = layout.box() + box.label(text=obj.name, icon="OBJECT_DATA") + draw_transfer_data(obj_ownership, box) + + +def sync_execute_update_ownership(self, context): + temp_transfer_data = context.scene.asset_pipeline.temp_transfer_data + ownership_set(temp_transfer_data) + + +def sync_execute_prepare_sync(self, context): + asset_pipe = context.scene.asset_pipeline + self._current_file = Path(bpy.data.filepath) + self._temp_dir = Path(bpy.app.tempdir).parent + self._task_layer_keys = asset_pipe.get_local_task_layers() + # TODO Check if file contains a valid task layer + # if self._task_layer_key == "NONE": + # self.report({'ERROR'}, "Current File Name doesn't contain valid task layer") + # return {'CANCELLED'} + + self._sync_target = find_sync_target(self._current_file) + if not self._sync_target.exists(): + self.report({'ERROR'}, "Sync Target could not be determined") + return {'CANCELLED'} + + for obj in self._invalid_objs: + bpy.data.objects.remove(obj) + + +def create_temp_file_backup(self, context): + temp_file = self._temp_dir.joinpath( + self._current_file.name.replace(".blend", "") + "_Asset_Pipe_Backup.blend" + ) + context.scene.asset_pipeline.temp_file = temp_file.__str__() + return temp_file.__str__() + + +def update_temp_file_paths(self, context, temp_file_path): + asset_pipe = context.scene.asset_pipeline + asset_pipe.temp_file = temp_file_path + asset_pipe.source_file = self._current_file.__str__() + + +def sync_execute_pull(self, context): + temp_file_path = create_temp_file_backup(self, context) + update_temp_file_paths(self, context, temp_file_path) + bpy.ops.wm.save_as_mainfile(filepath=temp_file_path, copy=True) + + error_msg = merge_task_layer( + context, + local_tls=self._task_layer_keys, + external_file=self._sync_target, + ) + + if error_msg: + context.scene.asset_pipeline.sync_error = True + self.report({'ERROR'}, error_msg) + return {'CANCELLED'} + + +def sync_execute_push(self, context): + temp_file_path = create_temp_file_backup(self, context) + push_targets = find_all_published(self._current_file, constants.ACTIVE_PUBLISH_KEY) + + if self._sync_target not in push_targets: + push_targets.append(self._sync_target) + + for file in push_targets: + file_path = file.__str__() + bpy.ops.wm.open_mainfile(filepath=file_path) + + update_temp_file_paths(self, context, temp_file_path) + + # SKIP DEPRECIATED FILES + if context.scene.asset_pipeline.is_depreciated: + continue + + local_tls = [ + task_layer + for task_layer in config.TASK_LAYER_TYPES + if task_layer not in self._task_layer_keys + ] + + error_msg = merge_task_layer( + context, + local_tls=local_tls, + external_file=self._current_file, + ) + if error_msg: + context.scene.asset_pipeline.sync_error = True + self.report({'ERROR'}, error_msg) + return {'CANCELLED'} + + bpy.ops.wm.save_as_mainfile(filepath=file_path) + bpy.ops.wm.open_mainfile(filepath=self._current_file.__str__()) diff --git a/scripts-blender/addons/asset_pipeline/sys_utils.py b/scripts-blender/addons/asset_pipeline/sys_utils.py deleted file mode 100644 index f9071dfb..00000000 --- a/scripts-blender/addons/asset_pipeline/sys_utils.py +++ /dev/null @@ -1,68 +0,0 @@ -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# ##### END GPL LICENSE BLOCK ##### - -# - -# This file was made by Jeroen Bakker in the shot-builder repository: -# https://developer.blender.org/diffusion/BSTS/browse/master/shot-builder/shot_builder/sys_utils - - -import sys -import pathlib -import logging -from typing import List, Dict, Union, Any, Optional - -logger = logging.getLogger("BSP") - - -class SystemPathInclude: - """ - Resource class to temporary include system paths to `sys.paths`. - - Usage: - ``` - paths = [pathlib.Path("/home/guest/my_python_scripts")] - with SystemPathInclude(paths) as t: - import my_module - reload(my_module) - ``` - - It is possible to nest multiple SystemPathIncludes. - """ - - def __init__(self, paths_to_add: List[pathlib.Path]): - # TODO: Check if all paths exist and are absolute. - self.__paths = paths_to_add - self.__original_sys_path: List[str] = [] - - def __enter__(self): - self.__original_sys_path = sys.path - new_sys_path = [] - for path_to_add in self.__paths: - # Do not add paths that are already in the sys path. - # Report this to the logger as this might indicate wrong usage. - path_to_add_str = str(path_to_add) - if path_to_add_str in self.__original_sys_path: - logger.warn(f"{path_to_add_str} already added to `sys.path`") - continue - new_sys_path.append(path_to_add_str) - new_sys_path.extend(self.__original_sys_path) - sys.path = new_sys_path - - def __exit__(self, exc_type, exc_value, exc_traceback): - sys.path = self.__original_sys_path diff --git a/scripts-blender/addons/asset_pipeline/task_layer_configs/Character.json b/scripts-blender/addons/asset_pipeline/task_layer_configs/Character.json new file mode 100644 index 00000000..9951df62 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/task_layer_configs/Character.json @@ -0,0 +1,47 @@ +{ + "TASK_LAYER_TYPES": { + "Modeling": "MOD", + "Rigging": "RIG", + "Shading": "SHD" + }, + "TRANSFER_DATA_DEFAULTS": { + "GROUP_VERTEX": { + "default_owner": "Rigging", + "auto_surrender": false + }, + "MODIFIER": { + "default_owner": "Rigging", + "auto_surrender": false + }, + "CONSTRAINT": { + "default_owner": "Rigging", + "auto_surrender": false + }, + "MATERIAL": { + "default_owner": "Shading", + "auto_surrender": true + }, + "SHAPE_KEY": { + "default_owner": "Modeling", + "auto_surrender": false + }, + "ATTRIBUTE": { + "default_owner": "Rigging", + "auto_surrender": false + }, + "PARENT": { + "default_owner": "Rigging", + "auto_surrender": false + } + }, + "ATTRIBUTE_DEFAULTS": { + "sharp_face": { + "default_owner": "Modeling", + "auto_surrender": true + }, + "UVMap": { + "default_owner": "Shading", + "auto_surrender": true + } + } +} \ No newline at end of file diff --git a/scripts-blender/addons/asset_pipeline/task_layer_configs/Set.json b/scripts-blender/addons/asset_pipeline/task_layer_configs/Set.json new file mode 100644 index 00000000..27657d38 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/task_layer_configs/Set.json @@ -0,0 +1,47 @@ +{ + "TASK_LAYER_TYPES": { + "Modeling": "MOD", + "Rigging": "RIG", + "Shading": "SHD" + }, + "TRANSFER_DATA_DEFAULTS": { + "GROUP_VERTEX": { + "default_owner": "Modeling", + "auto_surrender": false + }, + "MODIFIER": { + "default_owner": "Modeling", + "auto_surrender": false + }, + "CONSTRAINT": { + "default_owner": "Modeling", + "auto_surrender": false + }, + "MATERIAL": { + "default_owner": "Shading", + "auto_surrender": true + }, + "SHAPE_KEY": { + "default_owner": "Modeling", + "auto_surrender": false + }, + "ATTRIBUTE": { + "default_owner": "Modeling", + "auto_surrender": false + }, + "PARENT": { + "default_owner": "Modeling", + "auto_surrender": false + } + }, + "ATTRIBUTE_DEFAULTS": { + "sharp_face": { + "default_owner": "Modeling", + "auto_surrender": true + }, + "UVMap": { + "default_owner": "Shading", + "auto_surrender": true + } + } +} \ No newline at end of file diff --git a/scripts-blender/addons/asset_pipeline/tests/__init__.py b/scripts-blender/addons/asset_pipeline/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/scripts-blender/addons/asset_pipeline/tests/test_blender_studio_pipeline.py b/scripts-blender/addons/asset_pipeline/tests/test_blender_studio_pipeline.py deleted file mode 100644 index f8b3ba35..00000000 --- a/scripts-blender/addons/asset_pipeline/tests/test_blender_studio_pipeline.py +++ /dev/null @@ -1,5 +0,0 @@ -from asset_pipeline import __version__ - - -def test_version(): - assert __version__ == "0.1.0" diff --git a/scripts-blender/addons/asset_pipeline/ui.py b/scripts-blender/addons/asset_pipeline/ui.py new file mode 100644 index 00000000..f887fe65 --- /dev/null +++ b/scripts-blender/addons/asset_pipeline/ui.py @@ -0,0 +1,175 @@ +import bpy + +from pathlib import Path +from .merge.transfer_data.transfer_ui import draw_transfer_data +from .merge.task_layer import draw_task_layer_selection +from .config import verify_json_data +from .prefs import get_addon_prefs +from . import constants + + +class ASSETPIPE_PT_sync(bpy.types.Panel): + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = 'Asset Pipe 2' + bl_label = "Asset Management" + + def draw(self, context: bpy.types.Context) -> None: + layout = self.layout + asset_pipe = context.scene.asset_pipeline + if not asset_pipe.is_asset_pipeline_file: + layout.prop(asset_pipe, "new_file_mode", expand=True) + layout.prop(asset_pipe, "task_layer_config_type") + if asset_pipe.new_file_mode == "BLANK": + layout.prop(asset_pipe, "name") + layout.prop(asset_pipe, "prefix") + layout.prop(asset_pipe, "dir") + else: + layout.prop(asset_pipe, "asset_collection") + layout.operator("assetpipe.create_new_asset") + return + + if not Path(bpy.data.filepath).exists: + layout.label(text="File is not saved", icon="ERROR") + return + + if asset_pipe.sync_error or asset_pipe.asset_collection.name.endswith( + constants.LOCAL_SUFFIX + ): + layout.alert = True + row = layout.row() + row.label(text="Merge Process has Failed", icon='ERROR') + row.operator("assetpipe.revert_file", text="Revert", icon="FILE_TICK") + return + + # TODO Move this call out of the UI because we keep re-loading this file every draw + if not verify_json_data(): + layout.label(text="Task Layer Config is invalid", icon="ERROR") + return + + layout.label(text="Local Task Layers:") + box = layout.box() + row = box.row(align=True) + for task_layer in asset_pipe.local_task_layers: + row.label(text=task_layer.name) + + layout.prop(asset_pipe, "asset_collection") + + layout.operator("assetpipe.sync_push", text="Push to Publish", icon="TRIA_UP") + layout.operator( + "assetpipe.sync_pull", text="Pull from Publish", icon="TRIA_DOWN" + ) + layout.separator() + layout.operator("assetpipe.publish_new_version", icon="PLUS") + layout.separator() + layout.operator("assetpipe.batch_ownership_change") + # TODO Find new way to determine if we are in a published file more explicitly + # if asset_pipe.is_asset_pipeline_file and asset_pipe.task_layer_name == "NONE": + # asset_pipe = context.scene.asset_pipeline + # box = layout.box() + # box.label(text="Published File Settings") + # box.prop(asset_pipe, "is_depreciated") + + +class ASSETPIPE_PT_sync_advanced(bpy.types.Panel): + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = 'Asset Pipe 2' + bl_label = "Advanced" + bl_parent_id = "ASSETPIPE_PT_sync" + bl_options = {'DEFAULT_CLOSED'} + + @classmethod + def poll(cls, context: bpy.types.Context) -> bool: + prefs = get_addon_prefs() + return prefs.is_advanced_mode + + def draw(self, context: bpy.types.Context) -> None: + layout = self.layout + box = layout.box() + box.operator("assetpipe.update_ownership", text="Update Ownership") + box.operator("assetpipe.reset_ownership", icon="LOOP_BACK") + box = layout.box() + box.operator("assetpipe.fix_prefixes", icon="CHECKMARK") + box.operator("assetpipe.revert_file", icon="FILE_TICK") + + # Task Layer Updater + box = layout.box() + box.label(text="Change Local Task Layers") + + row = box.row() + asset_pipe = context.scene.asset_pipeline + all_task_layers = asset_pipe.all_task_layers + for task_layer in all_task_layers: + row.prop(task_layer, "is_local", text=task_layer.name) + box.operator("assetpipe.update_local_task_layers") + + +class ASSETPIPE_PT_ownership_inspector(bpy.types.Panel): + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = 'Asset Pipe 2' + bl_label = "Ownership Inspector" + + def draw(self, context: bpy.types.Context) -> None: + layout = self.layout + asset_pipe = context.scene.asset_pipeline + scene = context.scene + if not asset_pipe.is_asset_pipeline_file: + layout.label(text="Open valid 'Asset Pipeline' file", icon="ERROR") + return + + if context.collection in list(asset_pipe.asset_collection.children): + col = context.collection + row = layout.row() + row.label( + text=f"{col.name}: ", + icon="OUTLINER_COLLECTION", + ) + draw_task_layer_selection(layout=row, data=col) + + if not context.active_object: + layout.label(text="Set an Active Object to Inspect", icon="OBJECT_DATA") + return + obj = context.active_object + transfer_data = obj.transfer_data_ownership + layout = layout.box() + row = layout.row() + row.label(text=f"{obj.name}: ", icon="OBJECT_DATA") + + if obj.get("asset_id_surrender"): + enabled = ( + False + if obj.asset_id_owner in asset_pipe.get_local_task_layers() + else True + ) + row.enabled = enabled + col = row.column() + col.operator("assetpipe.update_surrendered_object") + + # New Row inside a column because draw_task_layer_selection() will enable/disable the entire row + # Only need this to affect itself and the "surrender" property + col = row.column() + task_layer_row = col.row() + + draw_task_layer_selection(layout=task_layer_row, data=obj) + surrender_icon = "ORPHAN_DATA" if obj.get("asset_id_surrender") else "HEART" + task_layer_row.prop(obj, "asset_id_surrender", text="", icon=surrender_icon) + draw_transfer_data(transfer_data, layout) + + +classes = ( + ASSETPIPE_PT_sync, + ASSETPIPE_PT_sync_advanced, + ASSETPIPE_PT_ownership_inspector, +) + + +def register(): + for i in classes: + bpy.utils.register_class(i) + + +def unregister(): + for i in classes: + bpy.utils.unregister_class(i) diff --git a/scripts-blender/addons/asset_pipeline/updater/__init__.py b/scripts-blender/addons/asset_pipeline/updater/__init__.py deleted file mode 100644 index 7f0fe0a4..00000000 --- a/scripts-blender/addons/asset_pipeline/updater/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import importlib - -from typing import List, Dict, Union, Any, Set, Optional - -from . import ops, ui -from .asset_updater import AssetUpdater - -# Initialize variables. -ASSET_UPDATER = AssetUpdater() - - -# ----------------REGISTER--------------. - - -def reload() -> None: - global ops - global ui - - importlib.reload(ops) - importlib.reload(ui) - - -def register() -> None: - ops.register() - ui.register() - - -def unregister() -> None: - ui.unregister() - ops.unregister() diff --git a/scripts-blender/addons/asset_pipeline/updater/asset_updater.py b/scripts-blender/addons/asset_pipeline/updater/asset_updater.py deleted file mode 100644 index bbd6f4bb..00000000 --- a/scripts-blender/addons/asset_pipeline/updater/asset_updater.py +++ /dev/null @@ -1,71 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple, Callable -from pathlib import Path - -import bpy - -from .. import lib_util - -logger = logging.getLogger("BSP") - - -class AssetUpdater: - def __init__(self): - self._asset_collections: Set[bpy.types.Collection] = set() - - def collect_asset_collections_in_scene( - self, context: bpy.types.Context - ) -> List[bpy.types.Collection]: - """ - Collects all asset collections that have coll.bsp_asset.is_publish==True in current scene. - Only collects them if they are linked in or library overwritten. - """ - self._asset_collections.clear() - - for coll in context.scene.collection.children_recursive: - - # If item is not coming from a library: Skip. - if lib_util.is_item_local(coll): - continue - - if coll.bsp_asset.is_publish: - self._asset_collections.add(coll) - - @property - def asset_collections(self) -> Set[bpy.types.Collection]: - return self._asset_collections - - def update_asset_collection_libpath( - self, asset_collection: bpy.types.Collection, new_libpath: Path - ) -> bpy.types.Collection: - coll_name = asset_collection.name - lib = lib_util.get_item_lib(asset_collection) - self.update_libpath(lib, new_libpath) - return bpy.data.collections[coll_name] - - def update_libpath(self, lib: bpy.types.Library, new_libpath: Path) -> None: - bpy.ops.wm.lib_relocate( - library=lib.name, - directory=new_libpath.parent.as_posix(), - filename=new_libpath.name, - ) diff --git a/scripts-blender/addons/asset_pipeline/updater/ops.py b/scripts-blender/addons/asset_pipeline/updater/ops.py deleted file mode 100644 index 983a3059..00000000 --- a/scripts-blender/addons/asset_pipeline/updater/ops.py +++ /dev/null @@ -1,137 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import os -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from pathlib import Path - -import bpy -from bpy.app.handlers import persistent - -from . import opsdata - -from .. import util, lib_util, updater - - -class BSP_ASSET_UPDATER_collect_assets(bpy.types.Operator): - bl_idname = "bsp_asset.collect_assets" - bl_label = "Collect Assets" - bl_description = "Scans Scene for imported Assets" - - def execute(self, context: bpy.types.Context) -> Set[str]: - - # Initialize Asset Updater and scan for scene. - updater.ASSET_UPDATER.collect_asset_collections_in_scene(context) - - # Populate context with collected asset collections. - opsdata.populate_context_with_imported_asset_colls( - context, updater.ASSET_UPDATER - ) - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - -class BSP_ASSET_UPDATER_update_asset(bpy.types.Operator): - bl_idname = "bsp_asset.update_asset" - bl_label = "Update Assets" - bl_description = "Updates Asset to target version that is selected in the list view" - - index: bpy.props.IntProperty(name="Index", min=0) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - prop_group = context.scene.bsp_asset.imported_asset_collections[self.index] - - collection: bpy.types.Collection = prop_group.collection - target_publish: str = prop_group.target_publish - asset_file: bpy.types.PropertyGroup = prop_group.asset_publishes[target_publish] - # asset_publish = AssetPublish(asset_file.path) - lib = lib_util.get_item_lib(collection) - libpath = Path(os.path.abspath(bpy.path.abspath(lib.filepath))) - - # Check if same version is loaded. - if Path(bpy.path.abspath(asset_file.path_str)) == libpath: - self.report({"WARNING"}, f"{libpath.name} is already loaded") - # lib.reload() # Crashes blender? TODO: report - return {"CANCELLED"} - - # Collection pointer gets lost after this operation. - updater.ASSET_UPDATER.update_asset_collection_libpath( - collection, asset_file.path - ) - - # TODO: save this to metadata file, so we can inspect this information - # without opening a blend file. - - # Redraw UI. - util.redraw_ui() - - return {"FINISHED"} - - -class BSP_ASSET_UPDATER_update_all(bpy.types.Operator): - bl_idname = "bsp_asset.update_all" - bl_label = "Update All Assets" - bl_description = ( - "Updates all Assets to target version that is selected in the list view" - ) - - def execute(self, context: bpy.types.Context) -> Set[str]: - - for idx, item in enumerate(context.scene.bsp_asset.imported_asset_collections): - bpy.ops.bsp_asset.update_asset(index=idx) - - return {"FINISHED"} - - -@persistent -def collect_assets_in_scene(_): - bpy.ops.bsp_asset.collect_assets() - - -# ----------------REGISTER--------------. - -classes = [ - BSP_ASSET_UPDATER_collect_assets, - BSP_ASSET_UPDATER_update_asset, - BSP_ASSET_UPDATER_update_all, -] - - -def register() -> None: - for cls in classes: - bpy.utils.register_class(cls) - - # Handlers. - bpy.app.handlers.load_post.append(collect_assets_in_scene) - - -def unregister() -> None: - - # Handlers. - bpy.app.handlers.load_post.remove(collect_assets_in_scene) - - for cls in reversed(classes): - bpy.utils.unregister_class(cls) diff --git a/scripts-blender/addons/asset_pipeline/updater/opsdata.py b/scripts-blender/addons/asset_pipeline/updater/opsdata.py deleted file mode 100644 index e7914c38..00000000 --- a/scripts-blender/addons/asset_pipeline/updater/opsdata.py +++ /dev/null @@ -1,99 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter - -import logging - -from typing import List, Dict, Union, Any, Set, Optional, Tuple -from pathlib import Path - -import bpy - -from .asset_updater import AssetUpdater - -from ..asset_files import AssetPublish -from ..asset_status import AssetStatus - - -logger = logging.getLogger("BSP") - - -def add_imported_asset_coll_to_context( - context: bpy.types.Context, asset_coll: bpy.types.Collection -) -> None: - - asset_publish: AssetPublish = asset_coll.bsp_asset.get_asset_publish() - - # Add item. - item = context.scene.bsp_asset.imported_asset_collections.add() - - # Set collection property. - item.collection = asset_coll - - # Collect all publishes on disk for that asset collection. - asset_dir = asset_publish.asset_dir - for publish in asset_dir.get_asset_publishes(): - - # Dont' offer asset publishes that are still in review. - # But still append the current imported version (if its in review state) - if ( - publish.metadata.meta_asset.status == AssetStatus.REVIEW - and asset_publish != publish - ): - logger.debug( - "Asset-Updater: %s skip %s as status is %s", - asset_publish.metadata.meta_asset.name, - publish.path.name, - AssetStatus.REVIEW.name, - ) - continue - - item_publish = item.asset_publishes.add() - item_publish.update_props_by_asset_publish(publish) - logger.debug( - "Asset-Updater: %s found: %s", - asset_publish.metadata.meta_asset.name, - publish.path.name, - ) - - # Set enum property to latest version. - if item.asset_publishes: - item.target_publish = item.asset_publishes[-1].name - - -def populate_context_with_imported_asset_colls( - context: bpy.types.Context, asset_updater: AssetUpdater -) -> None: - def sorting_keys(coll: bpy.types.Collection) -> Tuple[bool, str]: - """ - This sorting functions moves assets that are deprecated to the top and sorts - the rest of the collections in alphabetical order. - """ - asset_publish: AssetPublish = coll.bsp_asset.get_asset_publish() - return ( - asset_publish.metadata.meta_asset.status != AssetStatus.DEPRECATED, - coll.name, - ) - - context.scene.bsp_asset.imported_asset_collections.clear() - - asset_collections = sorted(asset_updater.asset_collections, key=sorting_keys) - # Add asset publishes. - for asset_coll in asset_collections: - add_imported_asset_coll_to_context(context, asset_coll) diff --git a/scripts-blender/addons/asset_pipeline/updater/ui.py b/scripts-blender/addons/asset_pipeline/updater/ui.py deleted file mode 100644 index 4943b689..00000000 --- a/scripts-blender/addons/asset_pipeline/updater/ui.py +++ /dev/null @@ -1,142 +0,0 @@ -# ***** BEGIN GPL LICENSE BLOCK ***** -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# ***** END GPL LICENCE BLOCK ***** -# -# (c) 2021, Blender Foundation - Paul Golter -from pathlib import Path -from typing import List, Dict, Union, Any, Set, Optional - -import bpy - -from .ops import ( - BSP_ASSET_UPDATER_collect_assets, - BSP_ASSET_UPDATER_update_asset, - BSP_ASSET_UPDATER_update_all, -) - -from .. import constants, lib_util -from ..asset_status import AssetStatus - - -def draw_imported_asset_collections_in_scene( - self: bpy.types.Panel, - context: bpy.types.Context, - disable: bool = False, - box: Optional[bpy.types.UILayout] = None, -) -> bpy.types.UILayout: - layout: bpy.types.UILayout = self.layout - - if not box: - box = layout.box() - row = box.row(align=True) - row.label(text="Asset Collections") - row.operator( - BSP_ASSET_UPDATER_collect_assets.bl_idname, icon="FILE_REFRESH", text="" - ) - - # Ui-list. - row = box.row() - row.template_list( - "BSP_UL_imported_asset_collections", - "imported_asset_collections_list", - context.scene.bsp_asset, - "imported_asset_collections", - context.scene.bsp_asset, - "imported_asset_collections_index", - rows=constants.DEFAULT_ROWS, - type="DEFAULT", - ) - if disable: - row.enabled = False - - return box - - -class BSP_ASSET_UPDATER_main_panel: - bl_category = "Asset Updater" - bl_label = "Asset Updater" - bl_space_type = "VIEW_3D" - bl_region_type = "UI" - - -class BSP_ASSET_UPDATER_PT_vi3d_assets(BSP_ASSET_UPDATER_main_panel, bpy.types.Panel): - def draw(self, context: bpy.types.Context) -> None: - - layout: bpy.types.UILayout = self.layout - box = draw_imported_asset_collections_in_scene(self, context) - - box.operator( - BSP_ASSET_UPDATER_update_all.bl_idname, - text="Update All", - icon="FILE_REFRESH", - ) - return - - -class BSP_UL_imported_asset_collections(bpy.types.UIList): - def draw_item( - self, context, layout, data, item, icon, active_data, active_propname, index - ): - # item: props.SET_imported_asset_collection - - layout: bpy.types.UILayout = layout - coll = item.collection - if self.layout_type in {"DEFAULT", "COMPACT"}: - - base_split = layout.split(factor=0.3, align=True) - - # Asset name. - base_split.label(text=coll.bsp_asset.entity_name) - - icon = "NONE" - - lib = lib_util.get_item_lib(coll) - loaded_asset_publish = item.asset_publishes[Path(lib.filepath).name] - - # If the currently loaded asset publish has deprecated status, display warning icon. - if loaded_asset_publish.status == AssetStatus.DEPRECATED.name: - icon = "ERROR" - - # Asset version. - base_split.label(text=coll.bsp_asset.version, icon=icon) - - # Target version. - base_split.prop(item, "target_publish", text="") - - # Update operator. - base_split.operator( - BSP_ASSET_UPDATER_update_asset.bl_idname, text="", icon="FILE_REFRESH" - ).index = index - - elif self.layout_type in {"GRID"}: - layout.alignment = "CENTER" - layout.label(text=coll.bsp_asset.entity_name) - - -# ----------------REGISTER--------------. - -classes = [BSP_UL_imported_asset_collections, BSP_ASSET_UPDATER_PT_vi3d_assets] - - -def register() -> None: - for cls in classes: - bpy.utils.register_class(cls) - - -def unregister() -> None: - for cls in reversed(classes): - bpy.utils.unregister_class(cls)