Move Anim_Setup
module into Blender_Kitsu
#5
112
anim_setup/.gitignore
vendored
112
anim_setup/.gitignore
vendored
@ -1,112 +0,0 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
.venv
|
||||
.venv*
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# IDE settings
|
||||
.vscode/
|
||||
|
||||
# utility bat files:
|
||||
*jump_in_venv.bat
|
||||
|
||||
#local tests
|
||||
tests/local*
|
@ -1,24 +0,0 @@
|
||||
# anim-setup
|
||||
anim-setup is a Blender Add-on that automates the setup of animation scenes for the Sprite-Fright project.
|
||||
## Installation
|
||||
Download or clone this repository.
|
||||
In the root project folder you will find the 'anim_setup' folder. Place this folder in your Blender addons directory or create a sym link to it.
|
||||
|
||||
After install you need to configure the addon in the addon preferences.
|
||||
|
||||
## Features
|
||||
The addon relies on the correct naming of asset and camera actions in the corresponding previs file of the shot.
|
||||
Check the <a href="https://www.notion.so/Animation-Setup-Checklist-ba4d044ec2354b8baae2b3472b757569"> Animation Setup Checklist</a>.
|
||||
|
||||
Operators of the addon:
|
||||
- Setup Workspace for animation
|
||||
- Load latest edit from edit export directory
|
||||
- Import camera action from the previs file
|
||||
- Import actions for found assets from previs file
|
||||
- Shift animation of camera and asset actions to start at layout cut in
|
||||
- Create missing actions for found assets in scene
|
||||
|
||||
## Development
|
||||
In the project root you will find a `pyproject.toml` and `peotry.lock` file.
|
||||
With `poetry` you can easily generate a virtual env for the project which should get you setup quickly.
|
||||
Basic Usage: https://python-poetry.org/docs/basic-usage/
|
@ -1,57 +0,0 @@
|
||||
import bpy
|
||||
|
||||
from . import asglobals
|
||||
from . import prefs
|
||||
from . import kitsu
|
||||
from . import props
|
||||
from . import opsdata
|
||||
from . import ops
|
||||
from . import ui
|
||||
from .log import LoggerFactory
|
||||
|
||||
logger = LoggerFactory.getLogger(__name__)
|
||||
|
||||
bl_info = {
|
||||
"name": "Anim Setup",
|
||||
"author": "Paul Golter",
|
||||
"description": "Blender addon to setup animation scenes for the spritefright project",
|
||||
"blender": (3, 0, 0),
|
||||
"version": (0, 1, 0),
|
||||
"location": "View3D",
|
||||
"warning": "",
|
||||
"doc_url": "",
|
||||
"tracker_url": "",
|
||||
"category": "Generic",
|
||||
}
|
||||
|
||||
_need_reload = "ops" in locals()
|
||||
|
||||
if _need_reload:
|
||||
import importlib
|
||||
|
||||
asglobals = importlib.reload(asglobals)
|
||||
prefs = importlib.reload(prefs)
|
||||
kitsu = importlib.reload(kitsu)
|
||||
props = importlib.reload(props)
|
||||
opsdata = importlib.reload(opsdata)
|
||||
ops = importlib.reload(ops)
|
||||
ui = importlib.reload(ui)
|
||||
|
||||
|
||||
def register():
|
||||
prefs.register()
|
||||
props.register()
|
||||
ops.register()
|
||||
ui.register()
|
||||
logger.info("Registered anim-setup")
|
||||
|
||||
|
||||
def unregister():
|
||||
ui.unregister()
|
||||
ops.unregister()
|
||||
props.unregister()
|
||||
prefs.unregister()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -1,17 +0,0 @@
|
||||
PROJECT_NAME = "SpriteFright"
|
||||
PROJECT_ID = "fc77c0b9-bb76-41c3-b843-c9b156f9b3ec"
|
||||
ACTION_ASSETS = [
|
||||
"CH-ellie",
|
||||
"CH-jay",
|
||||
"CH-phil",
|
||||
"CH-rex",
|
||||
"CH-elder_sprite",
|
||||
"CH-victoria",
|
||||
"CH-bird",
|
||||
"PR-bbq_grill",
|
||||
"PR-boombox",
|
||||
"PR-tree_chasm",
|
||||
"PR-log_bridge_trunk"
|
||||
]
|
||||
MULTI_ASSETS = ["CH-sprite"]
|
||||
HIDE_COLLS = ["mushrooms_center", "treetop_leaves"]
|
@ -1,315 +0,0 @@
|
||||
from __future__ import annotations
|
||||
import requests
|
||||
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from .log import LoggerFactory
|
||||
|
||||
logger = LoggerFactory.getLogger()
|
||||
|
||||
|
||||
class KitsuException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class KitsuConnector:
|
||||
def __init__(self, preferences: "AS_AddonPreferences"):
|
||||
self._preferences = preferences
|
||||
self.__access_token = ""
|
||||
self.__validate()
|
||||
self.__authorize()
|
||||
|
||||
def __validate(self) -> None:
|
||||
self._preferences.kitsu._validate()
|
||||
|
||||
def __authorize(self) -> None:
|
||||
kitsu_pref = self._preferences.kitsu
|
||||
backend = kitsu_pref.backend
|
||||
email = kitsu_pref.email
|
||||
password = kitsu_pref.password
|
||||
|
||||
logger.info(f"authorize {email} against {backend}")
|
||||
response = requests.post(
|
||||
url=f"{backend}/auth/login", data={"email": email, "password": password}
|
||||
)
|
||||
if response.status_code != 200:
|
||||
self.__access_token = ""
|
||||
raise KitsuException(
|
||||
f"unable to authorize (status code={response.status_code})"
|
||||
)
|
||||
json_response = response.json()
|
||||
self.__access_token = json_response["access_token"]
|
||||
|
||||
def api_get(self, api: str) -> Any:
|
||||
kitsu_pref = self._preferences.kitsu
|
||||
backend = kitsu_pref.backend
|
||||
|
||||
response = requests.get(
|
||||
url=f"{backend}{api}",
|
||||
headers={"Authorization": f"Bearer {self.__access_token}"},
|
||||
)
|
||||
if response.status_code != 200:
|
||||
raise KitsuException(
|
||||
f"unable to call kitsu (api={api}, status code={response.status_code})"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
@classmethod
|
||||
def fetch_first(
|
||||
cls, json_response: Dict[str, Any], filter: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
|
||||
if not isinstance(json_response, list):
|
||||
raise ValueError(
|
||||
f"Failed to fetch one, excpected list object: {json_response}"
|
||||
)
|
||||
|
||||
for item in json_response:
|
||||
matches = 0
|
||||
for f in filter:
|
||||
if f in item and item[f] == filter[f]:
|
||||
matches += 1
|
||||
|
||||
if matches == len(filter):
|
||||
return item
|
||||
|
||||
logger.error("Filter had no match %s on json response.", str(filter))
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def fetch_all(
|
||||
cls, json_response: Dict[str, Any], filter: Dict[str, Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
|
||||
if not isinstance(json_response, list):
|
||||
raise ValueError(
|
||||
f"Failed to fetch all, excpected list object: {json_response}"
|
||||
)
|
||||
|
||||
valid_items: List[Dict[str, Any]] = []
|
||||
|
||||
for item in json_response:
|
||||
matches = 0
|
||||
for f in filter:
|
||||
if f in item and item[f] == filter[f]:
|
||||
matches += 1
|
||||
|
||||
if matches == len(filter):
|
||||
valid_items.append(item)
|
||||
|
||||
return valid_items
|
||||
|
||||
|
||||
class ProjectList(KitsuConnector):
|
||||
"""
|
||||
Class to get object oriented representation of backend productions data structure.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._projects: List[Project] = []
|
||||
self._init_projects()
|
||||
|
||||
@property
|
||||
def names(self) -> List[str]:
|
||||
return [p.name for p in self._projects]
|
||||
|
||||
@property
|
||||
def projects(self) -> List[Project]:
|
||||
return self._projects
|
||||
|
||||
def _init_projects(self) -> None:
|
||||
api_url = "data/projects"
|
||||
|
||||
for project in self.api_get(api_url):
|
||||
self._projects.append(Project(**project))
|
||||
|
||||
|
||||
@dataclass
|
||||
class Project(KitsuConnector):
|
||||
"""
|
||||
Class to get object oriented representation of backend project data structure.
|
||||
Can shortcut some functions from gazu api because active project is given through class instance.
|
||||
Has multiple constructor functions (by_name, by_id, init>by_dict)
|
||||
"""
|
||||
|
||||
id: str = ""
|
||||
created_at: str = ""
|
||||
updated_at: str = ""
|
||||
name: str = ""
|
||||
code: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
shotgun_id: Optional[str] = None
|
||||
data: None = None
|
||||
has_avatar: bool = False
|
||||
fps: Optional[str] = None
|
||||
ratio: Optional[str] = None
|
||||
resolution: Optional[str] = None
|
||||
production_type: str = ""
|
||||
start_date: Optional[str] = None
|
||||
end_date: Optional[str] = None
|
||||
man_days: Optional[str] = None
|
||||
nb_episodes: int = 0
|
||||
episode_span: int = 0
|
||||
project_status_id: str = ""
|
||||
type: str = ""
|
||||
project_status_name: str = ""
|
||||
file_tree: Dict[str, Any] = field(default_factory=dict)
|
||||
team: List[Any] = field(default_factory=list)
|
||||
asset_types: List[Any] = field(default_factory=list)
|
||||
task_types: List[Any] = field(default_factory=list)
|
||||
task_statuses: List[Any] = field(default_factory=list)
|
||||
|
||||
@classmethod
|
||||
def by_id(cls, connector: KitsuConnector, project_id: str) -> Project:
|
||||
api_url = f"data/projects/{project_id}"
|
||||
project_dict = connector.api_get(api_url)
|
||||
return cls(**project_dict)
|
||||
|
||||
# SEQUENCES
|
||||
# ---------------
|
||||
|
||||
def get_sequence(self, connector: KitsuConnector, seq_id: str) -> Sequence:
|
||||
return Sequence.by_id(connector, seq_id)
|
||||
|
||||
def get_sequence_by_name(
|
||||
self, connector: KitsuConnector, seq_name: str
|
||||
) -> Optional[Sequence]:
|
||||
return Sequence.by_name(connector, self, seq_name)
|
||||
|
||||
def get_sequences_all(self, connector: KitsuConnector) -> List[Sequence]:
|
||||
api_url = f"data/projects/{self.id}/sequences"
|
||||
seq_dicts = connector.api_get(api_url)
|
||||
|
||||
sequences = [Sequence(**s) for s in seq_dicts]
|
||||
return sorted(sequences, key=lambda x: x.name)
|
||||
|
||||
# SHOT
|
||||
# ---------------
|
||||
|
||||
def get_shot(self, connector: KitsuConnector, shot_id: str) -> Shot:
|
||||
return Shot.by_id(connector, shot_id)
|
||||
|
||||
def get_shots_all(self, connector: KitsuConnector) -> List[Shot]:
|
||||
api_url = f"data/projects/{self.id}/shots"
|
||||
shot_dicts = connector.api_get(api_url)
|
||||
|
||||
shots = [Shot(**s) for s in shot_dicts]
|
||||
return sorted(shots, key=lambda x: x.name)
|
||||
|
||||
def get_shot_by_name(
|
||||
self, connector: KitsuConnector, sequence: Sequence, name: str
|
||||
) -> Optional[Shot]:
|
||||
all_shots = self.get_shots_all(connector)
|
||||
return Shot.by_name(connector, sequence, name)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Sequence(KitsuConnector):
|
||||
"""
|
||||
Class to get object oriented representation of backend sequence data structure.
|
||||
Has multiple constructor functions (by_name, by_id, init>by_dict)
|
||||
"""
|
||||
|
||||
id: str = ""
|
||||
created_at: str = ""
|
||||
updated_at: str = ""
|
||||
name: str = ""
|
||||
code: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
shotgun_id: Optional[str] = None
|
||||
canceled: bool = False
|
||||
nb_frames: Optional[int] = None
|
||||
project_id: str = ""
|
||||
entity_type_id: str = ""
|
||||
parent_id: str = ""
|
||||
source_id: Optional[str] = None
|
||||
preview_file_id: Optional[str] = None
|
||||
data: Optional[Dict[str, Any]] = None
|
||||
type: str = ""
|
||||
project_name: str = ""
|
||||
|
||||
@classmethod
|
||||
def by_id(cls, connector: KitsuConnector, seq_id: str) -> Sequence:
|
||||
api_url = f"data/sequences/{seq_id}"
|
||||
seq_dict = connector.api_get(seq_id)
|
||||
return cls(**seq_dict)
|
||||
|
||||
@classmethod
|
||||
def by_name(
|
||||
cls, connector: KitsuConnector, project: Project, seq_name: str
|
||||
) -> Optional[Sequence]:
|
||||
api_url = f"data/projects/{project.id}/sequences"
|
||||
seq_dicts = connector.api_get(api_url)
|
||||
seq_dict = connector.fetch_first(seq_dicts, {"name": seq_name})
|
||||
|
||||
# Can be None if name not found.
|
||||
if not seq_dict:
|
||||
return None
|
||||
|
||||
return cls(**seq_dict)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Shot(KitsuConnector):
|
||||
"""
|
||||
Class to get object oriented representation of backend shot data structure.
|
||||
Has multiple constructor functions (by_name, by_id, init>by_dict
|
||||
"""
|
||||
|
||||
id: str = ""
|
||||
created_at: str = ""
|
||||
updated_at: str = ""
|
||||
name: str = ""
|
||||
canceled: bool = False
|
||||
code: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
entity_type_id: str = ""
|
||||
episode_id: Optional[str] = None
|
||||
episode_name: str = ""
|
||||
fps: str = ""
|
||||
frame_in: str = ""
|
||||
frame_out: str = ""
|
||||
nb_frames: int = 0
|
||||
parent_id: str = ""
|
||||
preview_file_id: Optional[str] = None
|
||||
project_id: str = ""
|
||||
project_name: str = ""
|
||||
sequence_id: str = ""
|
||||
sequence_name: str = ""
|
||||
source_id: Optional[str] = None
|
||||
shotgun_id: Optional[str] = None
|
||||
type: str = ""
|
||||
data: Dict[str, Any] = field(default_factory=dict)
|
||||
tasks: List[Dict[str, Any]] = field(default_factory=list)
|
||||
|
||||
@classmethod
|
||||
def by_id(cls, connector: KitsuConnector, shot_id: str) -> Shot:
|
||||
api_url = f"data/shots/{shot_id}"
|
||||
shot_dict = connector.api_get(shot_id)
|
||||
return cls(**shot_dict)
|
||||
|
||||
@classmethod
|
||||
def by_name(
|
||||
cls, connector: KitsuConnector, sequence: Sequence, shot_name: str
|
||||
) -> Optional[Shot]:
|
||||
api_url = f"data/projects/{sequence.project_id}/shots"
|
||||
shot_dicts = connector.api_get(api_url)
|
||||
shot_dict = connector.fetch_first(
|
||||
shot_dicts, {"parent_id": sequence.id, "name": shot_name}
|
||||
)
|
||||
|
||||
# Can be None if name not found.
|
||||
if not shot_dict:
|
||||
return None
|
||||
|
||||
return cls(**shot_dict)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.id)
|
@ -1,16 +0,0 @@
|
||||
import logging
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
|
||||
class LoggerFactory:
|
||||
|
||||
"""
|
||||
Utility class to streamline logger creation
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def getLogger(name=__name__):
|
||||
name = name
|
||||
logger = logging.getLogger(name)
|
||||
return logger
|
@ -1,835 +0,0 @@
|
||||
import re
|
||||
from pathlib import Path
|
||||
import types
|
||||
from typing import Container, Dict, List, Set, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
from .log import LoggerFactory
|
||||
from .kitsu import KitsuConnector, Shot, Project, Sequence
|
||||
from . import opsdata, prefs, asglobals
|
||||
|
||||
logger = LoggerFactory.getLogger()
|
||||
|
||||
|
||||
def ui_redraw() -> None:
|
||||
"""Forces blender to redraw the UI."""
|
||||
for screen in bpy.data.screens:
|
||||
for area in screen.areas:
|
||||
area.tag_redraw()
|
||||
|
||||
|
||||
class AS_OT_create_actions(bpy.types.Operator):
|
||||
bl_idname = "as.create_action"
|
||||
bl_label = "Create action"
|
||||
bl_description = (
|
||||
"Creates action for all found assets that have no assigned yet. "
|
||||
"Names them following the blender-studio convention"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: bpy.types.Context) -> bool:
|
||||
act_coll = context.view_layer.active_layer_collection.collection
|
||||
return bool(bpy.data.filepath and act_coll)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
assigned: List[bpy.types.Action] = []
|
||||
created: List[bpy.types.Action] = []
|
||||
failed: List[bpy.types.Collection] = []
|
||||
collections = opsdata.get_valid_collections(context)
|
||||
exists: List[bpy.types.Collection] = []
|
||||
|
||||
if not collections:
|
||||
self.report({"WARNING"}, "No valid collections available")
|
||||
return {"CANCELLED"}
|
||||
|
||||
for coll in collections:
|
||||
print("\n")
|
||||
rig = opsdata.find_rig(coll)
|
||||
|
||||
if not rig:
|
||||
logger.warning(f"{coll.name} contains no rig.")
|
||||
failed.append(coll)
|
||||
continue
|
||||
|
||||
# Create animation data if not existent.
|
||||
if not rig.animation_data:
|
||||
rig.animation_data_create()
|
||||
logger.info("%s created animation data", rig.name)
|
||||
|
||||
# If action already exists check for fake user and then continue.
|
||||
if rig.animation_data.action:
|
||||
logger.info("%s already has an action assigned", rig.name)
|
||||
|
||||
if not rig.animation_data.action.use_fake_user:
|
||||
rig.animation_data.action.use_fake_user = True
|
||||
logger.info("%s assigned existing action fake user", rig.name)
|
||||
exists.append(coll)
|
||||
continue
|
||||
|
||||
# Create new action.
|
||||
action_name_new = opsdata.gen_action_name(coll)
|
||||
try:
|
||||
action = bpy.data.actions[action_name_new]
|
||||
except KeyError:
|
||||
action = bpy.data.actions.new(action_name_new)
|
||||
logger.info("Created action: %s", action.name)
|
||||
created.append(action)
|
||||
else:
|
||||
logger.info("Action %s already exists. Will take that.", action.name)
|
||||
|
||||
# Assign action.
|
||||
rig.animation_data.action = action
|
||||
logger.info("%s assigned action %s", rig.name, action.name)
|
||||
|
||||
# Add fake user.
|
||||
action.use_fake_user = True
|
||||
assigned.append(action)
|
||||
|
||||
self.report(
|
||||
{"INFO"},
|
||||
"Actions: Created %s | Assigned %s | Exists %s | Failed %s"
|
||||
% (len(created), len(assigned), len(exists), len(failed)),
|
||||
)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AS_OT_setup_workspaces(bpy.types.Operator):
|
||||
bl_idname = "as.setup_workspaces"
|
||||
bl_label = "Setup Workspace"
|
||||
bl_description = "Sets up the workspaces for the animation task"
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
|
||||
# Remove non anim workspaces.
|
||||
for ws in bpy.data.workspaces:
|
||||
if ws.name != "Animation":
|
||||
bpy.ops.workspace.delete({"workspace": ws})
|
||||
|
||||
self.report({"INFO"}, "Deleted non Animation workspaces")
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AS_OT_load_latest_edit(bpy.types.Operator):
|
||||
bl_idname = "as.load_latest_edit"
|
||||
bl_label = "Load edit"
|
||||
bl_description = (
|
||||
"Loads latest edit from shot_preview_folder "
|
||||
"Shifts edit so current shot starts at 3d_in metadata shot key from Kitsu"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def can_load_edit(cls, context: bpy.types.Context) -> bool:
|
||||
"""Check if shared dir and VSE area are available"""
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
edit_export_path = Path(addon_prefs.edit_export_path)
|
||||
|
||||
# Needs to be run in sequence editor area
|
||||
# TODO: temporarily create a VSE area if not available.
|
||||
area_override = None
|
||||
for area in bpy.context.screen.areas:
|
||||
if area.type == "SEQUENCE_EDITOR":
|
||||
area_override = area
|
||||
|
||||
return bool(area_override and edit_export_path)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: bpy.types.Context) -> bool:
|
||||
return cls.can_load_edit(context)
|
||||
|
||||
@classmethod
|
||||
def description(cls, context, properties):
|
||||
if cls.can_load_edit(context):
|
||||
return "Load latest edit from shared folder"
|
||||
else:
|
||||
return "Shared folder not set, or VSE area not available in this workspace"
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
edit_export_path = Path(addon_prefs.edit_export_path)
|
||||
strip_channel = 1
|
||||
latest_file = self._get_latest_edit(context)
|
||||
if not latest_file:
|
||||
self.report(
|
||||
{"ERROR"}, f"Found no edit file in: {edit_export_path.as_posix()}"
|
||||
)
|
||||
strip_filepath = latest_file.as_posix()
|
||||
strip_frame_start = 101
|
||||
|
||||
# Needs to be run in sequence editor area.
|
||||
area_override = None
|
||||
for area in bpy.context.screen.areas:
|
||||
if area.type == "SEQUENCE_EDITOR":
|
||||
area_override = area
|
||||
|
||||
if not area_override:
|
||||
self.report({"ERROR"}, "No sequence editor are found")
|
||||
return {"CANCELLED"}
|
||||
|
||||
override = bpy.context.copy()
|
||||
override["area"] = area_override
|
||||
|
||||
bpy.ops.sequencer.movie_strip_add(
|
||||
override,
|
||||
filepath=strip_filepath,
|
||||
relative_path=False,
|
||||
frame_start=strip_frame_start,
|
||||
channel=strip_channel,
|
||||
fit_method="FIT",
|
||||
)
|
||||
|
||||
# Get sequence name.
|
||||
seqname = opsdata.get_sequence_from_file()
|
||||
if not seqname:
|
||||
self.report({"ERROR"}, "Failed to retrieve seqname from current file.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Get shotname.
|
||||
shotname = opsdata.get_shot_name_from_file()
|
||||
if not shotname:
|
||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Setup connector and get data from kitsu.
|
||||
connector = KitsuConnector(addon_prefs)
|
||||
project = Project.by_id(connector, addon_prefs.kitsu.project_id)
|
||||
sequence = project.get_sequence_by_name(connector, seqname)
|
||||
|
||||
if not sequence:
|
||||
self.report({"ERROR"}, f"Failed to find {seqname} on kitsu.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
shot = project.get_shot_by_name(connector, sequence, shotname)
|
||||
|
||||
if not shot:
|
||||
self.report({"ERROR"}, f"Failed to find shot {shotname} on kitsu.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Update shift frame range prop.
|
||||
frame_in = shot.data["frame_in"]
|
||||
frame_out = shot.data["frame_out"]
|
||||
frame_3d_in = shot.data["3d_in"]
|
||||
frame_3d_offset = frame_3d_in - 101
|
||||
|
||||
if not frame_in:
|
||||
self.report(
|
||||
{"ERROR"}, f"On kitsu 'frame_in' is not defined for shot {shotname}."
|
||||
)
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Set sequence strip start kitsu data.
|
||||
for strip in context.scene.sequence_editor.sequences_all:
|
||||
strip.frame_start = -frame_in + (strip_frame_start * 2) + frame_3d_offset
|
||||
|
||||
self.report({"INFO"}, f"Loaded latest edit: {latest_file.name}")
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
def _get_latest_edit(self, context: bpy.types.Context):
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
|
||||
edit_export_path = Path(addon_prefs.edit_export_path)
|
||||
|
||||
files_list = [
|
||||
f
|
||||
for f in edit_export_path.iterdir()
|
||||
if f.is_file() and self._is_valid_edit_name(f.name)
|
||||
]
|
||||
files_list = sorted(files_list, reverse=True)
|
||||
|
||||
return files_list[0]
|
||||
|
||||
def _is_valid_edit_name(self, filename: str) -> bool:
|
||||
pattern = r"sf-edit-v\d\d\d.mp4"
|
||||
|
||||
match = re.search(pattern, filename)
|
||||
if match:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class AS_OT_import_camera(bpy.types.Operator):
|
||||
bl_idname = "as.import_camera"
|
||||
bl_label = "Import Camera"
|
||||
bl_description = "Imports camera rig and makes library override"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: bpy.types.Context) -> bool:
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
|
||||
# Import camera rig and make override.
|
||||
camera_rig_path = addon_prefs.camera_rig_path
|
||||
if not camera_rig_path:
|
||||
self.report({"ERROR"}, "Failed to import camera rig")
|
||||
return {"CANCELLED"}
|
||||
|
||||
cam_lib_coll = opsdata.import_data_from_lib(
|
||||
"collections",
|
||||
"CA-camera_rig",
|
||||
camera_rig_path,
|
||||
)
|
||||
opsdata.instance_coll_to_scene_and_override(context, cam_lib_coll)
|
||||
cam_coll = bpy.data.collections[cam_lib_coll.name, None]
|
||||
|
||||
self.report({"INFO"}, f"Imported camera: {cam_coll.name}")
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AS_OT_import_camera_action(bpy.types.Operator):
|
||||
bl_idname = "as.import_camera_action"
|
||||
bl_label = "Import Camera Action"
|
||||
bl_description = (
|
||||
"Imports camera action of previs file that matches current shot and assigns it"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: bpy.types.Context) -> bool:
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
|
||||
try:
|
||||
cam_coll = bpy.data.collections["CA-camera_rig", None]
|
||||
except KeyError:
|
||||
self.report({"ERROR"}, f"Camera collection CA-camera_rig is not imported")
|
||||
return {"CANCELELD"}
|
||||
|
||||
# Import camera action from previz file.
|
||||
|
||||
# Get shotname and previs filepath.
|
||||
shotname = opsdata.get_shot_name_from_file()
|
||||
if not shotname:
|
||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
previs_path = opsdata.get_previs_file(context)
|
||||
if not previs_path:
|
||||
self.report({"ERROR"}, "Failed to find previz file")
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Check if cam action name exists in previs library.
|
||||
cam_action_name_new = opsdata.get_cam_action_name_from_lib(
|
||||
shotname, previs_path
|
||||
)
|
||||
if not cam_action_name_new:
|
||||
self.report(
|
||||
{"ERROR"},
|
||||
f"Camera action: {cam_action_name_new} not found in lib: {previs_path.name}",
|
||||
)
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Import cam action data block.
|
||||
cam_action = opsdata.import_data_from_lib(
|
||||
"actions", cam_action_name_new, previs_path, link=False
|
||||
)
|
||||
|
||||
# Find rig to assing action to.
|
||||
rig = opsdata.find_rig(cam_coll)
|
||||
if not rig:
|
||||
self.report({"WARNING"}, f"{cam_coll.name} contains no rig.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Assign action.
|
||||
rig.animation_data.action = cam_action
|
||||
logger.info("%s assigned action %s", rig.name, cam_action.name)
|
||||
|
||||
# Add fake user.
|
||||
cam_action.use_fake_user = True
|
||||
|
||||
# Ensure version suffix to action data bloc.
|
||||
opsdata.ensure_name_version_suffix(cam_action)
|
||||
|
||||
self.report({"INFO"}, f"{rig.name} imported camera action: {cam_action.name}")
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AS_OT_import_asset_actions(bpy.types.Operator):
|
||||
"""Imports asset action of previs file that matches current shot and assigns it"""
|
||||
|
||||
bl_idname = "as.import_asset_actions"
|
||||
bl_label = "Import Asset Actions"
|
||||
bl_description = (
|
||||
"For each found asset tries to find action in previs file. "
|
||||
"Imports it to current file, renames it, adds fake user and assigns it"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: bpy.types.Context) -> bool:
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
|
||||
succeeded = []
|
||||
failed = []
|
||||
actions_imported = []
|
||||
renamed_actions = []
|
||||
|
||||
# Get shotname and previs filepath.
|
||||
shotname = opsdata.get_shot_name_from_file()
|
||||
if not shotname:
|
||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
previs_path = opsdata.get_previs_file(context)
|
||||
if not previs_path:
|
||||
self.report({"ERROR"}, "Failed to find previz file")
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Check if cam action name exists in previs library.
|
||||
action_candidates: Dict[str, List[str]] = {}
|
||||
asset_colls = []
|
||||
|
||||
with bpy.data.libraries.load(
|
||||
previs_path.as_posix(), relative=True, link=False
|
||||
) as (
|
||||
data_from,
|
||||
data_to,
|
||||
):
|
||||
|
||||
for asset in asglobals.ACTION_ASSETS:
|
||||
|
||||
# Check if asset is in current scene.
|
||||
try:
|
||||
coll = bpy.data.collections[asset]
|
||||
except KeyError:
|
||||
# can continue here if not in scene we
|
||||
# cant load action anyway
|
||||
continue
|
||||
else:
|
||||
logger.info("Found asset in scene: %s", coll.name)
|
||||
asset_colls.append(coll)
|
||||
|
||||
# Find if actions exists for that asset in previs file.
|
||||
asset_name = opsdata.find_asset_name(asset)
|
||||
for action in data_from.actions:
|
||||
if action.startswith(f"ANI-{asset_name}."):
|
||||
|
||||
# Create key if not existent yet.
|
||||
if asset not in action_candidates:
|
||||
action_candidates[asset] = []
|
||||
|
||||
# Append action to that asset.
|
||||
action_candidates[asset].append(action)
|
||||
|
||||
# Load and assign actions for asset colls.
|
||||
for coll in asset_colls:
|
||||
|
||||
# Find rig.
|
||||
rig = opsdata.find_rig(coll)
|
||||
if not rig:
|
||||
logger.warning("%s contains no rig.", coll.name)
|
||||
continue
|
||||
|
||||
# Check if action was found in previs file for that asset.
|
||||
if not coll.name in action_candidates:
|
||||
logger.warning("%s no action found in previs file", coll.name)
|
||||
continue
|
||||
else:
|
||||
logger.info(
|
||||
"%s found actions in previs file: %s",
|
||||
asset,
|
||||
str(action_candidates[coll.name]),
|
||||
)
|
||||
|
||||
# Check if multiple actions are in the prvis file for that asset.
|
||||
if len(action_candidates[coll.name]) > 1:
|
||||
logger.warning(
|
||||
"%s Multiple actions found in previs file: %s",
|
||||
coll.name,
|
||||
str(action_candidates[coll.name]),
|
||||
)
|
||||
continue
|
||||
|
||||
# Import action from previs file.
|
||||
actions = action_candidates[coll.name]
|
||||
action = opsdata.import_data_from_lib(
|
||||
"actions", actions[0], previs_path, link=False
|
||||
)
|
||||
if not action:
|
||||
continue
|
||||
|
||||
actions_imported.append(action)
|
||||
|
||||
# Create animation data if not existent.
|
||||
if not rig.animation_data:
|
||||
rig.animation_data_create()
|
||||
logger.info("%s created animation data", rig.name)
|
||||
|
||||
# Assign action.
|
||||
rig.animation_data.action = action
|
||||
logger.info("%s assigned action %s", rig.name, action.name)
|
||||
|
||||
# Add fake user.
|
||||
action.use_fake_user = True
|
||||
|
||||
# Rename actions.
|
||||
action_name_new = opsdata.gen_action_name(coll)
|
||||
try:
|
||||
action_existing = bpy.data.actions[action_name_new]
|
||||
except KeyError:
|
||||
# Action does not exists can rename.
|
||||
old_name = action.name
|
||||
action.name = action_name_new
|
||||
logger.info("Renamed action: %s to %s", old_name, action.name)
|
||||
renamed_actions.append(action)
|
||||
else:
|
||||
# Action name already exists in this scene.
|
||||
logger.info(
|
||||
"Failed to rename action action %s to %s. Already exists",
|
||||
action.name,
|
||||
action_name_new,
|
||||
)
|
||||
continue
|
||||
|
||||
self.report(
|
||||
{"INFO"},
|
||||
f"Found Assets: {len(asset_colls)} | Imported Actions: {len(actions_imported)} | Renamed Actions: {len(renamed_actions)}",
|
||||
)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AS_OT_import_multi_assets(bpy.types.Operator):
|
||||
bl_idname = "as.import_multi_assets"
|
||||
bl_label = "Import Multi Assets"
|
||||
bl_description = (
|
||||
"For each found multi asset tries to find action in previs file. "
|
||||
"Imports it to current file, renames it, adds fake user and assigns it"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: bpy.types.Context) -> bool:
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
actions_imported = []
|
||||
new_colls = []
|
||||
|
||||
# Get shotname and previs filepath.
|
||||
shotname = opsdata.get_shot_name_from_file()
|
||||
if not shotname:
|
||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
previs_path = opsdata.get_previs_file(context)
|
||||
if not previs_path:
|
||||
self.report({"ERROR"}, "Failed to find previz file")
|
||||
return {"CANCELLED"}
|
||||
|
||||
# Check if cam action name exists in previs library.
|
||||
action_candidates: Dict[str, List[str]] = {}
|
||||
asset_colls: List[bpy.types.Collection] = []
|
||||
|
||||
with bpy.data.libraries.load(
|
||||
previs_path.as_posix(), relative=True, link=False
|
||||
) as (
|
||||
data_from,
|
||||
data_to,
|
||||
):
|
||||
data_from_actions: List[str] = data_from.actions
|
||||
data_from_actions.sort()
|
||||
|
||||
# Find all sprites actions.
|
||||
for asset in asglobals.MULTI_ASSETS:
|
||||
# Check if asset is in current scene.
|
||||
try:
|
||||
coll = bpy.data.collections[asset]
|
||||
except KeyError:
|
||||
# Can continue here if not in scene we
|
||||
# cant load action anyway.
|
||||
continue
|
||||
else:
|
||||
logger.info("Found asset in scene: %s", coll.name)
|
||||
asset_colls.append(coll)
|
||||
|
||||
# Find if actions exists for that asset in previs file.
|
||||
asset_name = opsdata.find_asset_name(asset)
|
||||
for action in data_from_actions:
|
||||
if action.startswith(f"ANI-{asset_name}"):
|
||||
|
||||
# Create key if not existent yet.
|
||||
if asset not in action_candidates:
|
||||
action_candidates[asset] = []
|
||||
|
||||
# Append action to that asset.
|
||||
action_candidates[asset].append(action)
|
||||
|
||||
# Load and assign actions for asset colls.
|
||||
color_tag: str = ""
|
||||
for coll in asset_colls:
|
||||
|
||||
# Check if action was found in previs file for that asset.
|
||||
if not coll.name in action_candidates:
|
||||
logger.warning("%s no action found in previs file", coll.name)
|
||||
continue
|
||||
else:
|
||||
logger.info(
|
||||
"%s found actions in previs file: %s",
|
||||
asset,
|
||||
str(action_candidates[coll.name]),
|
||||
)
|
||||
|
||||
# Create duplicate for each action.
|
||||
for idx, action_candidate in enumerate(action_candidates[coll.name]):
|
||||
|
||||
# First index use existing collection that was already created by shot builder.
|
||||
if idx == 0:
|
||||
new_coll = bpy.data.collections[asset, None]
|
||||
logger.info("First index will use existing coll: %s", new_coll.name)
|
||||
color_tag = new_coll.color_tag # Take color from first collection.
|
||||
else:
|
||||
ref_coll = opsdata.get_ref_coll(coll)
|
||||
new_coll = ref_coll.override_hierarchy_create(
|
||||
context.scene, context.view_layer, reference=coll
|
||||
)
|
||||
new_coll.color_tag = color_tag
|
||||
logger.info("Created new override collection: %s", new_coll.name)
|
||||
new_colls.append(new_coll)
|
||||
|
||||
# Find rig of new coll.
|
||||
rig = opsdata.find_rig(new_coll)
|
||||
if not rig:
|
||||
logger.warning("%s contains no rig.", coll.name)
|
||||
continue
|
||||
|
||||
# Import action.
|
||||
action = opsdata.import_data_from_lib(
|
||||
"actions", action_candidate, previs_path, link=False
|
||||
)
|
||||
if not action:
|
||||
continue
|
||||
|
||||
actions_imported.append(action)
|
||||
|
||||
# Create animation data if not existent.
|
||||
if not rig.animation_data:
|
||||
rig.animation_data_create()
|
||||
logger.info("%s created animation data", rig.name)
|
||||
|
||||
# Assign action.
|
||||
rig.animation_data.action = action
|
||||
logger.info("%s assigned action %s", rig.name, action.name)
|
||||
|
||||
self.report(
|
||||
{"INFO"},
|
||||
f"Found Assets: {len(asset_colls)} | Imported Actions: {len(actions_imported)} | New collections: {len(new_colls)}",
|
||||
)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AS_OT_shift_anim(bpy.types.Operator):
|
||||
bl_idname = "as.shift_anim"
|
||||
bl_label = "Shift Anim"
|
||||
bl_description = (
|
||||
"Shifts the animation of found assets by number of frames. "
|
||||
"It also shifts the camera animation as well as its modifier values"
|
||||
)
|
||||
|
||||
multi_assets: bpy.props.BoolProperty(name="Do Multi Assets")
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
# Define the frame offset by:
|
||||
# Subtracting the layout cut in frame (to set the 0)
|
||||
# Adding 101 (the animation start for a shot)
|
||||
# For example, layout frame 520 becomes frames_offset -520 + 101 = -419.
|
||||
|
||||
frames_offset = -context.scene.anim_setup.layout_cut_in + 101
|
||||
rigs: List[bpy.types.Armature] = []
|
||||
|
||||
if not self.multi_assets:
|
||||
# Get cam coll.
|
||||
try:
|
||||
rig = bpy.data.objects["RIG-camera", None]
|
||||
except KeyError:
|
||||
logger.warning("Failed to find camera object 'RIG-camera'")
|
||||
else:
|
||||
rigs.append(rig)
|
||||
|
||||
# Find assets.
|
||||
for asset in asglobals.ACTION_ASSETS:
|
||||
|
||||
# Check if asset is in current scene.
|
||||
try:
|
||||
coll = bpy.data.collections[asset]
|
||||
except KeyError:
|
||||
# Can continue here if not in scene we
|
||||
# cant load action anyway.
|
||||
continue
|
||||
else:
|
||||
logger.info("Found asset in scene: %s", coll.name)
|
||||
# Find rig.
|
||||
rig = opsdata.find_rig(coll)
|
||||
if not rig:
|
||||
logger.warning("%s contains no rig.", coll.name)
|
||||
continue
|
||||
rigs.append(rig)
|
||||
else:
|
||||
for asset in asglobals.MULTI_ASSETS:
|
||||
for coll in bpy.data.collections:
|
||||
|
||||
if not opsdata.is_item_lib_override(coll):
|
||||
continue
|
||||
|
||||
if not coll.name.startswith(asset):
|
||||
continue
|
||||
|
||||
logger.info("Found asset in scene: %s", coll.name)
|
||||
# Find rig.
|
||||
rig = opsdata.find_rig(coll)
|
||||
if not rig:
|
||||
logger.warning("%s contains no rig.", coll.name)
|
||||
continue
|
||||
rigs.append(rig)
|
||||
|
||||
if not rigs:
|
||||
self.report(
|
||||
{"ERROR"}, "Failed to find any assets or cameras to shift animation."
|
||||
)
|
||||
return {"CANCELLED"}
|
||||
|
||||
for rig in rigs:
|
||||
for fcurve in rig.animation_data.action.fcurves:
|
||||
|
||||
# Shift all keyframes.
|
||||
for point in fcurve.keyframe_points:
|
||||
# Print(f"{fcurve.data_path}|{fcurve.array_index}: {point.co.x}|{point.co.y}").
|
||||
point.co.x += frames_offset
|
||||
# Don't forget the keyframe's handles:.
|
||||
point.handle_left.x += frames_offset
|
||||
point.handle_right.x += frames_offset
|
||||
|
||||
# Shift all noise modififers values.
|
||||
for m in fcurve.modifiers:
|
||||
if not m.type == "NOISE":
|
||||
continue
|
||||
|
||||
m.offset += frames_offset
|
||||
|
||||
if m.use_restricted_range:
|
||||
frame_start = m.frame_start
|
||||
frame_end = m.frame_end
|
||||
m.frame_start = frame_start + (frames_offset)
|
||||
m.frame_end = frame_end + (frames_offset)
|
||||
|
||||
logger.info(
|
||||
"%s shifted %s modifier values by %i frames",
|
||||
m.id_data.name,
|
||||
m.type.lower(),
|
||||
frames_offset,
|
||||
)
|
||||
logger.info(
|
||||
"%s: %s shifted all keyframes by %i frames",
|
||||
rig.name,
|
||||
rig.animation_data.action.name,
|
||||
frames_offset,
|
||||
)
|
||||
|
||||
self.report(
|
||||
{"INFO"}, f"Shifted animation of {len(rigs)} actions by {frames_offset}"
|
||||
)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AS_OT_apply_additional_settings(bpy.types.Operator):
|
||||
|
||||
bl_idname = "as.apply_additional_settings"
|
||||
bl_label = "Apply Additional Settings"
|
||||
bl_description = (
|
||||
"Apply some additional settings that are important " "for animation scenes"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: bpy.types.Context) -> bool:
|
||||
sqe_area = cls._get_sqe_area(context)
|
||||
return bool(sqe_area)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
|
||||
sqe_area = self._get_sqe_area(context)
|
||||
|
||||
sqe_area.spaces.active.use_proxies = False
|
||||
sqe_area.spaces.active.proxy_render_size = "PROXY_100"
|
||||
|
||||
self.report({"INFO"}, "Set: use_proxies | proxy_render_size")
|
||||
return {"FINISHED"}
|
||||
|
||||
@classmethod
|
||||
def _get_sqe_area(cls, context: bpy.types.Context):
|
||||
for window in context.window_manager.windows:
|
||||
screen = window.screen
|
||||
|
||||
for area in screen.areas:
|
||||
if area.type == "SEQUENCE_EDITOR":
|
||||
return area
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class AS_OT_exclude_colls(bpy.types.Operator):
|
||||
"""Excludes Collections that are not needed for animation"""
|
||||
|
||||
bl_idname = "as.exclude_colls"
|
||||
bl_label = "Exclude Collections"
|
||||
bl_description = (
|
||||
"Exclude some collections by name that are not needed in animation scenes"
|
||||
)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||
view_layer_colls = opsdata.get_all_view_layer_colls(context)
|
||||
|
||||
excluded = []
|
||||
for coll_name in asglobals.HIDE_COLLS:
|
||||
# Find view layer collection, if same collection is linked in in 2 different colls in same scene, these
|
||||
# are 2 different view layer colls, we need to grab all.
|
||||
valid_view_layer_colls = [
|
||||
vc for vc in view_layer_colls if vc.name == coll_name
|
||||
]
|
||||
|
||||
if not valid_view_layer_colls:
|
||||
logger.info("No view layer collections named: %s", coll_name)
|
||||
continue
|
||||
|
||||
for view_layer_coll in valid_view_layer_colls:
|
||||
view_layer_coll.exclude = True
|
||||
logger.info("Excluded view layer collection: %s", view_layer_coll.name)
|
||||
excluded.append(view_layer_coll)
|
||||
|
||||
self.report(
|
||||
{"INFO"}, f"Excluded Collections: {list([v.name for v in excluded])}"
|
||||
)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
# ---------REGISTER ----------.
|
||||
|
||||
classes = [
|
||||
AS_OT_create_actions,
|
||||
AS_OT_setup_workspaces,
|
||||
AS_OT_load_latest_edit,
|
||||
AS_OT_import_camera,
|
||||
AS_OT_import_camera_action,
|
||||
AS_OT_shift_anim,
|
||||
AS_OT_apply_additional_settings,
|
||||
AS_OT_import_asset_actions,
|
||||
AS_OT_exclude_colls,
|
||||
AS_OT_import_multi_assets,
|
||||
]
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
|
||||
def unregister():
|
||||
for cls in reversed(classes):
|
||||
bpy.utils.unregister_class(cls)
|
@ -1,344 +0,0 @@
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Union, Any, List, Generator
|
||||
import bpy
|
||||
from bpy.types import Key
|
||||
|
||||
from . import prefs
|
||||
|
||||
|
||||
from .log import LoggerFactory
|
||||
|
||||
logger = LoggerFactory.getLogger()
|
||||
|
||||
|
||||
def get_shot_name_from_file() -> Optional[str]:
|
||||
if not bpy.data.filepath:
|
||||
return None
|
||||
|
||||
# Default 110_0030_A.anim.blend.
|
||||
return Path(bpy.data.filepath).name.split(".")[0]
|
||||
|
||||
|
||||
def get_sequence_from_file() -> Optional[str]:
|
||||
if not bpy.data.filepath:
|
||||
return None
|
||||
|
||||
# ./spritefright/pro/shots/110_rextoria/110_0010_A/110_0010_A.anim.blend.
|
||||
return Path(bpy.data.filepath).parents[1].name
|
||||
|
||||
|
||||
def get_seqeunce_short_from_shot_name(shotname: str) -> str:
|
||||
return shotname.split("_")[0]
|
||||
|
||||
|
||||
def get_cam_action_name_from_shot(shotname: str) -> str:
|
||||
# ANI-camera.070_0010_A.
|
||||
return f"ANI-camera.{shotname}"
|
||||
|
||||
|
||||
def get_cam_action_name_from_lib(shotname: str, libpath: Path) -> Optional[str]:
|
||||
|
||||
valid_actions = []
|
||||
|
||||
with bpy.data.libraries.load(libpath.as_posix(), relative=True) as (
|
||||
data_from,
|
||||
data_to,
|
||||
):
|
||||
|
||||
for action in data_from.actions:
|
||||
if action.startswith(get_cam_action_name_from_shot(shotname)):
|
||||
valid_actions.append(action)
|
||||
|
||||
if not valid_actions:
|
||||
return None
|
||||
|
||||
return sorted(valid_actions, reverse=True)[0]
|
||||
|
||||
|
||||
def get_previs_file(context: bpy.types.Context) -> Optional[Path]:
|
||||
|
||||
addon_prefs = prefs.addon_prefs_get(context)
|
||||
|
||||
shotname = get_shot_name_from_file()
|
||||
if not shotname:
|
||||
return None
|
||||
|
||||
seqname = get_seqeunce_short_from_shot_name(shotname)
|
||||
previs_path = Path(addon_prefs.previs_root_path)
|
||||
|
||||
# Catch custom cases when previs files are split up for specific shots.
|
||||
if shotname == "020_0010_A":
|
||||
return previs_path / "020_grove.020_0010_A.blend"
|
||||
|
||||
elif shotname == "020_0020_A" or shotname == "020_0050_A":
|
||||
return previs_path / "020_grove.shove.blend"
|
||||
|
||||
elif shotname in ["020_0060_A", "020_0070_A"]:
|
||||
return previs_path / "020_grove.crowdcamping_alt.blend"
|
||||
|
||||
elif shotname in ["020_0160_A", "020_0170_A", "020_0173_A", "020_0176_A"]:
|
||||
return previs_path / "020_grove.weenie_alt.blend"
|
||||
|
||||
else:
|
||||
for f in previs_path.iterdir():
|
||||
if f.is_file() and f.suffix == ".blend" and f.name.startswith(seqname):
|
||||
if len(f.name.split(".")) > 2:
|
||||
continue
|
||||
return f
|
||||
return None
|
||||
|
||||
|
||||
def traverse_collection_tree(
|
||||
collection: bpy.types.Collection,
|
||||
) -> Generator[bpy.types.Collection, None, None]:
|
||||
yield collection
|
||||
for child in collection.children:
|
||||
yield from traverse_collection_tree(child)
|
||||
|
||||
|
||||
def import_data_from_lib(
|
||||
data_category: str,
|
||||
data_name: str,
|
||||
libpath: Path,
|
||||
link: bool = True,
|
||||
):
|
||||
|
||||
noun = "Appended"
|
||||
if link:
|
||||
noun = "Linked"
|
||||
|
||||
with bpy.data.libraries.load(libpath.as_posix(), relative=True, link=link) as (
|
||||
data_from,
|
||||
data_to,
|
||||
):
|
||||
|
||||
if data_name not in eval(f"data_from.{data_category}"):
|
||||
logger.error(
|
||||
"Failed to import %s %s from %s. Doesn't exist in file.",
|
||||
data_category,
|
||||
data_name,
|
||||
libpath.as_posix(),
|
||||
)
|
||||
return None
|
||||
|
||||
# Check if datablock with same name already exists in blend file.
|
||||
try:
|
||||
eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
logger.info(
|
||||
"%s already in bpy.data.%s of this blendfile.", data_name, data_category
|
||||
)
|
||||
return None
|
||||
|
||||
# Append data block.
|
||||
eval(f"data_to.{data_category}.append('{data_name}')")
|
||||
logger.info(
|
||||
"%s: %s from library: %s",
|
||||
noun,
|
||||
data_name,
|
||||
libpath.as_posix(),
|
||||
)
|
||||
|
||||
if link:
|
||||
return eval(
|
||||
f"bpy.data.{data_category}['{data_name}', '{bpy.path.relpath(libpath.as_posix())}']"
|
||||
)
|
||||
|
||||
return eval(f"bpy.data.{data_category}['{data_name}']")
|
||||
|
||||
|
||||
def instance_coll_to_scene_and_override(
|
||||
context: bpy.types.Context, source_collection: bpy.types.Collection
|
||||
) -> bpy.types.Collection:
|
||||
instance_obj = _create_collection_instance(source_collection)
|
||||
_make_library_override(context, instance_obj)
|
||||
return bpy.data.collections[source_collection.name, None]
|
||||
|
||||
|
||||
def _create_collection_instance(
|
||||
source_collection: bpy.types.Collection,
|
||||
) -> bpy.types.Object:
|
||||
|
||||
# Name has no effect how the overwritten library collection in the end
|
||||
# use empty to instance source collection.
|
||||
instance_obj = bpy.data.objects.new(name="", object_data=None)
|
||||
instance_obj.instance_collection = source_collection
|
||||
instance_obj.instance_type = "COLLECTION"
|
||||
|
||||
parent_collection = bpy.context.view_layer.active_layer_collection
|
||||
parent_collection.collection.objects.link(instance_obj)
|
||||
|
||||
logger.info(
|
||||
"Instanced collection: %s as: %s",
|
||||
source_collection.name,
|
||||
instance_obj.name,
|
||||
)
|
||||
|
||||
return instance_obj
|
||||
|
||||
|
||||
def _make_library_override(
|
||||
context: bpy.types.Context,
|
||||
instance_obj: bpy.types.Object,
|
||||
) -> None:
|
||||
log_name = instance_obj.name
|
||||
# Deselect all.
|
||||
bpy.ops.object.select_all(action="DESELECT")
|
||||
|
||||
# Needs active object (coll instance).
|
||||
context.view_layer.objects.active = instance_obj
|
||||
instance_obj.select_set(True)
|
||||
|
||||
# Add library override.
|
||||
bpy.ops.object.make_override_library()
|
||||
|
||||
logger.info(
|
||||
"%s make library override.",
|
||||
log_name,
|
||||
)
|
||||
|
||||
|
||||
def find_asset_name(name: str) -> str:
|
||||
|
||||
if name.endswith("_rig"):
|
||||
name = name[:-4]
|
||||
return name.split("-")[-1] # CH-rex -> 'rex'
|
||||
|
||||
|
||||
def find_rig(coll: bpy.types.Collection) -> Optional[bpy.types.Armature]:
|
||||
|
||||
coll_suffix = find_asset_name(coll.name)
|
||||
|
||||
valid_rigs = []
|
||||
|
||||
for obj in coll.all_objects:
|
||||
# Default rig name: 'RIG-rex' / 'RIG-Rex'.
|
||||
if obj.type != "ARMATURE":
|
||||
continue
|
||||
|
||||
if not obj.name.startswith("RIG"):
|
||||
continue
|
||||
|
||||
valid_rigs.append(obj)
|
||||
|
||||
if not valid_rigs:
|
||||
return None
|
||||
|
||||
elif len(valid_rigs) == 1:
|
||||
logger.info("Found rig: %s", valid_rigs[0].name)
|
||||
return valid_rigs[0]
|
||||
else:
|
||||
logger.error("%s found multiple rigs %s", coll.name, str(valid_rigs))
|
||||
return None
|
||||
|
||||
|
||||
def ensure_name_version_suffix(datablock: Any) -> Any:
|
||||
version_pattern = r"v\d\d\d"
|
||||
match = re.search(version_pattern, datablock.name)
|
||||
|
||||
if not match:
|
||||
datablock.name = datablock.name + ".v001"
|
||||
|
||||
return datablock
|
||||
|
||||
|
||||
def get_valid_collections(context: bpy.types.Context) -> List[bpy.types.Collection]:
|
||||
valid_prefixes = ["CH-", "PR-"]
|
||||
valid_colls: List[bpy.types.Collection] = []
|
||||
|
||||
for coll in context.scene.collection.children:
|
||||
if coll.name[:3] not in valid_prefixes:
|
||||
continue
|
||||
valid_colls.append(coll)
|
||||
|
||||
return valid_colls
|
||||
|
||||
|
||||
def is_multi_asset(asset_name: str) -> bool:
|
||||
if asset_name.startswith("thorn"):
|
||||
return True
|
||||
multi_assets = ["sprite", "snail", "spider"]
|
||||
if asset_name.lower() in multi_assets:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def gen_action_name(coll: bpy.types.Collection):
|
||||
action_prefix = "ANI"
|
||||
asset_name = find_asset_name(coll.name).lower()
|
||||
asset_name = asset_name.replace(".", "_")
|
||||
version = "v001"
|
||||
shot_name = get_shot_name_from_file()
|
||||
|
||||
action_name_new = f"{action_prefix}-{asset_name}.{shot_name}.{version}"
|
||||
|
||||
if is_multi_asset(asset_name):
|
||||
action_name_new = f"{action_prefix}-{asset_name}_A.{shot_name}.{version}"
|
||||
|
||||
return action_name_new
|
||||
|
||||
|
||||
def set_layer_coll_exlcude(
|
||||
layer_collections: List[bpy.types.LayerCollection], exclude: bool
|
||||
) -> None:
|
||||
|
||||
noun = "Excluded" if exclude else "Included"
|
||||
|
||||
for lcoll in layer_collections:
|
||||
|
||||
if exclude:
|
||||
if lcoll.exclude:
|
||||
continue
|
||||
|
||||
lcoll.exclude = True
|
||||
|
||||
else:
|
||||
if not lcoll.exclude:
|
||||
continue
|
||||
|
||||
lcoll.exclude = False
|
||||
|
||||
logger.info("%s %s", noun, lcoll.name)
|
||||
|
||||
|
||||
def get_all_view_layer_colls(
|
||||
context: bpy.types.Context,
|
||||
) -> List[bpy.types.LayerCollection]:
|
||||
return list(traverse_collection_tree(context.view_layer.layer_collection))
|
||||
|
||||
|
||||
def get_ref_coll(coll: bpy.types.Collection) -> bpy.types.Collection:
|
||||
if not coll.override_library:
|
||||
return coll
|
||||
|
||||
return coll.override_library.reference
|
||||
|
||||
|
||||
def is_item_local(
|
||||
item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera]
|
||||
) -> bool:
|
||||
# Local collection of blend file.
|
||||
if not item.override_library and not item.library:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_item_lib_override(
|
||||
item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera]
|
||||
) -> bool:
|
||||
# Collection from libfile and overwritten.
|
||||
if item.override_library and not item.library:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_item_lib_source(
|
||||
item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera]
|
||||
) -> bool:
|
||||
# Source collection from libfile not overwritten.
|
||||
if not item.override_library and item.library:
|
||||
return True
|
||||
return False
|
@ -1,176 +0,0 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Union, Optional, Any, Dict, Set
|
||||
|
||||
import bpy
|
||||
|
||||
from .kitsu import KitsuException
|
||||
from . import asglobals
|
||||
|
||||
|
||||
class KitsuPreferences(bpy.types.PropertyGroup):
|
||||
backend: bpy.props.StringProperty( # type: ignore
|
||||
name="Server URL",
|
||||
description="Kitsu server address",
|
||||
default="https://kitsu.blender.cloud/api",
|
||||
)
|
||||
|
||||
email: bpy.props.StringProperty( # type: ignore
|
||||
name="Email",
|
||||
description="Email to connect to Kitsu",
|
||||
)
|
||||
|
||||
password: bpy.props.StringProperty( # type: ignore
|
||||
name="Password",
|
||||
description="Password to connect to Kitsu",
|
||||
subtype="PASSWORD",
|
||||
)
|
||||
|
||||
project_id: bpy.props.StringProperty( # type: ignore
|
||||
name="Project ID",
|
||||
description="Server Id that refers to the last active project",
|
||||
default=asglobals.PROJECT_ID,
|
||||
options={"HIDDEN", "SKIP_SAVE"},
|
||||
)
|
||||
|
||||
def draw(self, layout: bpy.types.UILayout, context: bpy.types.Context) -> None:
|
||||
box = layout.box()
|
||||
box.label(text="Kitsu")
|
||||
box.prop(self, "backend")
|
||||
box.prop(self, "email")
|
||||
box.prop(self, "password")
|
||||
box.prop(self, "project_id")
|
||||
|
||||
def _validate(self):
|
||||
if not (self.backend and self.email and self.password and self.project_id):
|
||||
raise KitsuException(
|
||||
"Kitsu connector has not been configured in the add-on preferences"
|
||||
)
|
||||
|
||||
|
||||
class AS_AddonPreferences(bpy.types.AddonPreferences):
|
||||
bl_idname = __package__
|
||||
|
||||
project_root: bpy.props.StringProperty( # type: ignore
|
||||
name="Project Root",
|
||||
default="",
|
||||
options={"HIDDEN", "SKIP_SAVE"},
|
||||
subtype="DIR_PATH",
|
||||
)
|
||||
edit_export_dir: bpy.props.StringProperty( # type: ignore
|
||||
name="Edit Export Directory",
|
||||
default="",
|
||||
options={"HIDDEN", "SKIP_SAVE"},
|
||||
subtype="DIR_PATH",
|
||||
)
|
||||
|
||||
kitsu: bpy.props.PointerProperty( # type: ignore
|
||||
name="Kitsu Preferences", type=KitsuPreferences
|
||||
)
|
||||
|
||||
def draw(self, context: bpy.types.Context) -> None:
|
||||
layout = self.layout
|
||||
box = layout.box()
|
||||
box.row().prop(self, "project_root")
|
||||
|
||||
if not self.project_root:
|
||||
row = box.row()
|
||||
row.label(text="Please specify the project root directory.", icon="ERROR")
|
||||
|
||||
if not bpy.data.filepath and self.project_root.startswith("//"):
|
||||
row = box.row()
|
||||
row.label(
|
||||
text="In order to use a relative path as root cache directory the current file needs to be saved.",
|
||||
icon="ERROR",
|
||||
)
|
||||
|
||||
box.row().prop(self, "edit_export_dir")
|
||||
|
||||
if not self.edit_export_dir:
|
||||
row = box.row()
|
||||
row.label(text="Please specify the edit edxport directory.", icon="ERROR")
|
||||
|
||||
if not bpy.data.filepath and self.edit_export_dir.startswith("//"):
|
||||
row = box.row()
|
||||
row.label(
|
||||
text="In order to use a relative path as edit export directory the current file needs to be saved.",
|
||||
icon="ERROR",
|
||||
)
|
||||
|
||||
self.kitsu.draw(layout, context)
|
||||
|
||||
@property
|
||||
def project_root_path(self) -> Optional[Path]:
|
||||
if not self.is_project_root_valid:
|
||||
return None
|
||||
return Path(os.path.abspath(bpy.path.abspath(self.project_root)))
|
||||
|
||||
@property
|
||||
def is_project_root_valid(self) -> bool:
|
||||
|
||||
# Check if file is saved.
|
||||
if not self.project_root:
|
||||
return False
|
||||
|
||||
if not bpy.data.filepath and self.project_root.startswith("//"):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def is_editorial_valid(self) -> bool:
|
||||
if not self.edit_export_dir:
|
||||
return False
|
||||
|
||||
return Path(self.edit_export_dir).exists()
|
||||
|
||||
@property
|
||||
def edit_export_path(self) -> Optional[Path]:
|
||||
if not self.is_editorial_valid:
|
||||
return None
|
||||
|
||||
return Path(self.edit_export_dir)
|
||||
|
||||
@property
|
||||
def previs_root_path(self) -> Optional[Path]:
|
||||
if not self.is_project_root_valid:
|
||||
return None
|
||||
|
||||
previs_path = self.project_root_path / "previz"
|
||||
|
||||
if not previs_path.exists():
|
||||
return None
|
||||
|
||||
return previs_path
|
||||
|
||||
@property
|
||||
def camera_rig_path(self) -> Optional[Path]:
|
||||
if not self.is_project_root_valid:
|
||||
return None
|
||||
|
||||
camera_rig_path = self.project_root_path / "pro/lib/cam/camera_rig.blend"
|
||||
|
||||
if not camera_rig_path.exists():
|
||||
return None
|
||||
|
||||
return camera_rig_path
|
||||
|
||||
|
||||
def addon_prefs_get(context: bpy.types.Context) -> bpy.types.AddonPreferences:
|
||||
"""Shortcut to get cache_manager addon preferences"""
|
||||
return context.preferences.addons["anim_setup"].preferences
|
||||
|
||||
|
||||
# ---------REGISTER ----------.
|
||||
|
||||
classes = [KitsuPreferences, AS_AddonPreferences]
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
|
||||
def unregister():
|
||||
for cls in reversed(classes):
|
||||
bpy.utils.unregister_class(cls)
|
@ -1,34 +0,0 @@
|
||||
from typing import List, Any, Generator, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
|
||||
class CM_property_group_scene(bpy.types.PropertyGroup):
|
||||
|
||||
layout_cut_in: bpy.props.IntProperty(
|
||||
name="Layout Cut In",
|
||||
description="Frame where the camera marker is set for the shot, in the layout file",
|
||||
default=0,
|
||||
step=1,
|
||||
)
|
||||
|
||||
|
||||
# ---------REGISTER ----------.
|
||||
|
||||
classes: List[Any] = [
|
||||
CM_property_group_scene,
|
||||
]
|
||||
|
||||
|
||||
def register():
|
||||
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
# Scene Properties.
|
||||
bpy.types.Scene.anim_setup = bpy.props.PointerProperty(type=CM_property_group_scene)
|
||||
|
||||
|
||||
def unregister():
|
||||
for cls in reversed(classes):
|
||||
bpy.utils.unregister_class(cls)
|
134
anim_setup/ui.py
134
anim_setup/ui.py
@ -1,134 +0,0 @@
|
||||
import bpy
|
||||
|
||||
from . import opsdata
|
||||
|
||||
from .ops import (
|
||||
AS_OT_create_actions,
|
||||
AS_OT_setup_workspaces,
|
||||
AS_OT_load_latest_edit,
|
||||
AS_OT_import_camera,
|
||||
AS_OT_import_camera_action,
|
||||
AS_OT_shift_anim,
|
||||
AS_OT_apply_additional_settings,
|
||||
AS_OT_import_asset_actions,
|
||||
AS_OT_exclude_colls,
|
||||
AS_OT_import_multi_assets
|
||||
)
|
||||
|
||||
|
||||
class AS_PT_view3d_general(bpy.types.Panel):
|
||||
"""
|
||||
Animation Setup general operators.
|
||||
"""
|
||||
|
||||
bl_category = "Anim Setup"
|
||||
bl_label = "General"
|
||||
bl_space_type = "VIEW_3D"
|
||||
bl_region_type = "UI"
|
||||
bl_order = 10
|
||||
|
||||
def draw(self, context: bpy.types.Context) -> None:
|
||||
valid_colls = opsdata.get_valid_collections(context)
|
||||
layout = self.layout
|
||||
col = layout.column(align=True)
|
||||
|
||||
# Workspace.
|
||||
col.operator(AS_OT_setup_workspaces.bl_idname)
|
||||
|
||||
# Load edit.
|
||||
col.operator(AS_OT_load_latest_edit.bl_idname)
|
||||
|
||||
|
||||
|
||||
class AS_PT_view3d_animation_and_actions(bpy.types.Panel):
|
||||
"""
|
||||
Animation Setup main operators and properties.
|
||||
"""
|
||||
|
||||
bl_category = "Anim Setup"
|
||||
bl_label = "Animation and Actions"
|
||||
bl_space_type = "VIEW_3D"
|
||||
bl_region_type = "UI"
|
||||
bl_order = 12
|
||||
|
||||
def draw(self, context: bpy.types.Context) -> None:
|
||||
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
layout.label(text=f"Previs file: {opsdata.get_previs_file(context)}")
|
||||
|
||||
col = layout.column(align=True)
|
||||
|
||||
# Import camera action.
|
||||
col.operator(AS_OT_import_camera_action.bl_idname)
|
||||
|
||||
# Import action.
|
||||
col.operator(
|
||||
AS_OT_import_asset_actions.bl_idname, text=f"Import Char Actions"
|
||||
)
|
||||
|
||||
col.operator(
|
||||
AS_OT_import_multi_assets.bl_idname, text=f"Import Multi Asset Actions"
|
||||
)
|
||||
|
||||
col.separator()
|
||||
col = layout.column()
|
||||
|
||||
# Shift animation.
|
||||
col.prop(context.scene.anim_setup, "layout_cut_in")
|
||||
col.separator()
|
||||
split = col.split(factor=0.5, align=True)
|
||||
split.operator(AS_OT_shift_anim.bl_idname, text="Shift Char/Cam")
|
||||
split.operator(AS_OT_shift_anim.bl_idname, text="Shift Multi").multi_assets = True
|
||||
|
||||
col.separator()
|
||||
|
||||
# Create actions.
|
||||
valid_collections_count = len(opsdata.get_valid_collections(context))
|
||||
row = col.row(align=True)
|
||||
row.operator(
|
||||
AS_OT_create_actions.bl_idname, text=f"Create {valid_collections_count} actions"
|
||||
)
|
||||
|
||||
|
||||
class AS_PT_view3d_scene(bpy.types.Panel):
|
||||
"""
|
||||
Animation Setup scene operators.
|
||||
"""
|
||||
|
||||
bl_category = "Anim Setup"
|
||||
bl_label = "Scene"
|
||||
bl_space_type = "VIEW_3D"
|
||||
bl_region_type = "UI"
|
||||
bl_order = 13
|
||||
|
||||
def draw(self, context: bpy.types.Context) -> None:
|
||||
|
||||
layout = self.layout
|
||||
|
||||
# Exclude collections.
|
||||
row = layout.row(align=True)
|
||||
row.operator(
|
||||
AS_OT_exclude_colls.bl_idname, text="Exclude Collections"
|
||||
)
|
||||
|
||||
|
||||
# ---------REGISTER ----------.
|
||||
|
||||
classes = [
|
||||
AS_PT_view3d_general,
|
||||
AS_PT_view3d_animation_and_actions,
|
||||
AS_PT_view3d_scene,
|
||||
]
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
|
||||
|
||||
def unregister():
|
||||
for cls in reversed(classes):
|
||||
bpy.utils.unregister_class(cls)
|
Loading…
Reference in New Issue
Block a user