Move Anim_Setup
module into Blender_Kitsu
#5
112
anim_setup/.gitignore
vendored
112
anim_setup/.gitignore
vendored
@ -1,112 +0,0 @@
|
|||||||
# Byte-compiled / optimized / DLL files
|
|
||||||
__pycache__/
|
|
||||||
*.py[cod]
|
|
||||||
*$py.class
|
|
||||||
|
|
||||||
# C extensions
|
|
||||||
*.so
|
|
||||||
|
|
||||||
# Distribution / packaging
|
|
||||||
.Python
|
|
||||||
env/
|
|
||||||
build/
|
|
||||||
develop-eggs/
|
|
||||||
dist/
|
|
||||||
downloads/
|
|
||||||
eggs/
|
|
||||||
.eggs/
|
|
||||||
lib/
|
|
||||||
lib64/
|
|
||||||
parts/
|
|
||||||
sdist/
|
|
||||||
var/
|
|
||||||
wheels/
|
|
||||||
*.egg-info/
|
|
||||||
.installed.cfg
|
|
||||||
*.egg
|
|
||||||
|
|
||||||
# PyInstaller
|
|
||||||
# Usually these files are written by a python script from a template
|
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
||||||
*.manifest
|
|
||||||
*.spec
|
|
||||||
|
|
||||||
# Installer logs
|
|
||||||
pip-log.txt
|
|
||||||
pip-delete-this-directory.txt
|
|
||||||
|
|
||||||
# Unit test / coverage reports
|
|
||||||
htmlcov/
|
|
||||||
.tox/
|
|
||||||
.coverage
|
|
||||||
.coverage.*
|
|
||||||
.cache
|
|
||||||
nosetests.xml
|
|
||||||
coverage.xml
|
|
||||||
*.cover
|
|
||||||
.hypothesis/
|
|
||||||
.pytest_cache/
|
|
||||||
|
|
||||||
# Translations
|
|
||||||
*.mo
|
|
||||||
*.pot
|
|
||||||
|
|
||||||
# Django stuff:
|
|
||||||
*.log
|
|
||||||
local_settings.py
|
|
||||||
|
|
||||||
# Flask stuff:
|
|
||||||
instance/
|
|
||||||
.webassets-cache
|
|
||||||
|
|
||||||
# Scrapy stuff:
|
|
||||||
.scrapy
|
|
||||||
|
|
||||||
# Sphinx documentation
|
|
||||||
docs/_build/
|
|
||||||
|
|
||||||
# PyBuilder
|
|
||||||
target/
|
|
||||||
|
|
||||||
# Jupyter Notebook
|
|
||||||
.ipynb_checkpoints
|
|
||||||
|
|
||||||
# pyenv
|
|
||||||
.python-version
|
|
||||||
|
|
||||||
# celery beat schedule file
|
|
||||||
celerybeat-schedule
|
|
||||||
|
|
||||||
# SageMath parsed files
|
|
||||||
*.sage.py
|
|
||||||
|
|
||||||
# dotenv
|
|
||||||
.env
|
|
||||||
|
|
||||||
# virtualenv
|
|
||||||
.venv
|
|
||||||
.venv*
|
|
||||||
venv/
|
|
||||||
ENV/
|
|
||||||
|
|
||||||
# Spyder project settings
|
|
||||||
.spyderproject
|
|
||||||
.spyproject
|
|
||||||
|
|
||||||
# Rope project settings
|
|
||||||
.ropeproject
|
|
||||||
|
|
||||||
# mkdocs documentation
|
|
||||||
/site
|
|
||||||
|
|
||||||
# mypy
|
|
||||||
.mypy_cache/
|
|
||||||
|
|
||||||
# IDE settings
|
|
||||||
.vscode/
|
|
||||||
|
|
||||||
# utility bat files:
|
|
||||||
*jump_in_venv.bat
|
|
||||||
|
|
||||||
#local tests
|
|
||||||
tests/local*
|
|
@ -1,24 +0,0 @@
|
|||||||
# anim-setup
|
|
||||||
anim-setup is a Blender Add-on that automates the setup of animation scenes for the Sprite-Fright project.
|
|
||||||
## Installation
|
|
||||||
Download or clone this repository.
|
|
||||||
In the root project folder you will find the 'anim_setup' folder. Place this folder in your Blender addons directory or create a sym link to it.
|
|
||||||
|
|
||||||
After install you need to configure the addon in the addon preferences.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
The addon relies on the correct naming of asset and camera actions in the corresponding previs file of the shot.
|
|
||||||
Check the <a href="https://www.notion.so/Animation-Setup-Checklist-ba4d044ec2354b8baae2b3472b757569"> Animation Setup Checklist</a>.
|
|
||||||
|
|
||||||
Operators of the addon:
|
|
||||||
- Setup Workspace for animation
|
|
||||||
- Load latest edit from edit export directory
|
|
||||||
- Import camera action from the previs file
|
|
||||||
- Import actions for found assets from previs file
|
|
||||||
- Shift animation of camera and asset actions to start at layout cut in
|
|
||||||
- Create missing actions for found assets in scene
|
|
||||||
|
|
||||||
## Development
|
|
||||||
In the project root you will find a `pyproject.toml` and `peotry.lock` file.
|
|
||||||
With `poetry` you can easily generate a virtual env for the project which should get you setup quickly.
|
|
||||||
Basic Usage: https://python-poetry.org/docs/basic-usage/
|
|
@ -1,57 +0,0 @@
|
|||||||
import bpy
|
|
||||||
|
|
||||||
from . import asglobals
|
|
||||||
from . import prefs
|
|
||||||
from . import kitsu
|
|
||||||
from . import props
|
|
||||||
from . import opsdata
|
|
||||||
from . import ops
|
|
||||||
from . import ui
|
|
||||||
from .log import LoggerFactory
|
|
||||||
|
|
||||||
logger = LoggerFactory.getLogger(__name__)
|
|
||||||
|
|
||||||
bl_info = {
|
|
||||||
"name": "Anim Setup",
|
|
||||||
"author": "Paul Golter",
|
|
||||||
"description": "Blender addon to setup animation scenes for the spritefright project",
|
|
||||||
"blender": (3, 0, 0),
|
|
||||||
"version": (0, 1, 0),
|
|
||||||
"location": "View3D",
|
|
||||||
"warning": "",
|
|
||||||
"doc_url": "",
|
|
||||||
"tracker_url": "",
|
|
||||||
"category": "Generic",
|
|
||||||
}
|
|
||||||
|
|
||||||
_need_reload = "ops" in locals()
|
|
||||||
|
|
||||||
if _need_reload:
|
|
||||||
import importlib
|
|
||||||
|
|
||||||
asglobals = importlib.reload(asglobals)
|
|
||||||
prefs = importlib.reload(prefs)
|
|
||||||
kitsu = importlib.reload(kitsu)
|
|
||||||
props = importlib.reload(props)
|
|
||||||
opsdata = importlib.reload(opsdata)
|
|
||||||
ops = importlib.reload(ops)
|
|
||||||
ui = importlib.reload(ui)
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
|
||||||
prefs.register()
|
|
||||||
props.register()
|
|
||||||
ops.register()
|
|
||||||
ui.register()
|
|
||||||
logger.info("Registered anim-setup")
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
|
||||||
ui.unregister()
|
|
||||||
ops.unregister()
|
|
||||||
props.unregister()
|
|
||||||
prefs.unregister()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
register()
|
|
@ -1,17 +0,0 @@
|
|||||||
PROJECT_NAME = "SpriteFright"
|
|
||||||
PROJECT_ID = "fc77c0b9-bb76-41c3-b843-c9b156f9b3ec"
|
|
||||||
ACTION_ASSETS = [
|
|
||||||
"CH-ellie",
|
|
||||||
"CH-jay",
|
|
||||||
"CH-phil",
|
|
||||||
"CH-rex",
|
|
||||||
"CH-elder_sprite",
|
|
||||||
"CH-victoria",
|
|
||||||
"CH-bird",
|
|
||||||
"PR-bbq_grill",
|
|
||||||
"PR-boombox",
|
|
||||||
"PR-tree_chasm",
|
|
||||||
"PR-log_bridge_trunk"
|
|
||||||
]
|
|
||||||
MULTI_ASSETS = ["CH-sprite"]
|
|
||||||
HIDE_COLLS = ["mushrooms_center", "treetop_leaves"]
|
|
@ -1,315 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from dataclasses import asdict, dataclass, field
|
|
||||||
from typing import Any, Dict, List, Optional, Union
|
|
||||||
|
|
||||||
from .log import LoggerFactory
|
|
||||||
|
|
||||||
logger = LoggerFactory.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
class KitsuException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class KitsuConnector:
|
|
||||||
def __init__(self, preferences: "AS_AddonPreferences"):
|
|
||||||
self._preferences = preferences
|
|
||||||
self.__access_token = ""
|
|
||||||
self.__validate()
|
|
||||||
self.__authorize()
|
|
||||||
|
|
||||||
def __validate(self) -> None:
|
|
||||||
self._preferences.kitsu._validate()
|
|
||||||
|
|
||||||
def __authorize(self) -> None:
|
|
||||||
kitsu_pref = self._preferences.kitsu
|
|
||||||
backend = kitsu_pref.backend
|
|
||||||
email = kitsu_pref.email
|
|
||||||
password = kitsu_pref.password
|
|
||||||
|
|
||||||
logger.info(f"authorize {email} against {backend}")
|
|
||||||
response = requests.post(
|
|
||||||
url=f"{backend}/auth/login", data={"email": email, "password": password}
|
|
||||||
)
|
|
||||||
if response.status_code != 200:
|
|
||||||
self.__access_token = ""
|
|
||||||
raise KitsuException(
|
|
||||||
f"unable to authorize (status code={response.status_code})"
|
|
||||||
)
|
|
||||||
json_response = response.json()
|
|
||||||
self.__access_token = json_response["access_token"]
|
|
||||||
|
|
||||||
def api_get(self, api: str) -> Any:
|
|
||||||
kitsu_pref = self._preferences.kitsu
|
|
||||||
backend = kitsu_pref.backend
|
|
||||||
|
|
||||||
response = requests.get(
|
|
||||||
url=f"{backend}{api}",
|
|
||||||
headers={"Authorization": f"Bearer {self.__access_token}"},
|
|
||||||
)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise KitsuException(
|
|
||||||
f"unable to call kitsu (api={api}, status code={response.status_code})"
|
|
||||||
)
|
|
||||||
return response.json()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fetch_first(
|
|
||||||
cls, json_response: Dict[str, Any], filter: Dict[str, Any]
|
|
||||||
) -> Optional[Dict[str, Any]]:
|
|
||||||
|
|
||||||
if not isinstance(json_response, list):
|
|
||||||
raise ValueError(
|
|
||||||
f"Failed to fetch one, excpected list object: {json_response}"
|
|
||||||
)
|
|
||||||
|
|
||||||
for item in json_response:
|
|
||||||
matches = 0
|
|
||||||
for f in filter:
|
|
||||||
if f in item and item[f] == filter[f]:
|
|
||||||
matches += 1
|
|
||||||
|
|
||||||
if matches == len(filter):
|
|
||||||
return item
|
|
||||||
|
|
||||||
logger.error("Filter had no match %s on json response.", str(filter))
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fetch_all(
|
|
||||||
cls, json_response: Dict[str, Any], filter: Dict[str, Any]
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
|
|
||||||
if not isinstance(json_response, list):
|
|
||||||
raise ValueError(
|
|
||||||
f"Failed to fetch all, excpected list object: {json_response}"
|
|
||||||
)
|
|
||||||
|
|
||||||
valid_items: List[Dict[str, Any]] = []
|
|
||||||
|
|
||||||
for item in json_response:
|
|
||||||
matches = 0
|
|
||||||
for f in filter:
|
|
||||||
if f in item and item[f] == filter[f]:
|
|
||||||
matches += 1
|
|
||||||
|
|
||||||
if matches == len(filter):
|
|
||||||
valid_items.append(item)
|
|
||||||
|
|
||||||
return valid_items
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectList(KitsuConnector):
|
|
||||||
"""
|
|
||||||
Class to get object oriented representation of backend productions data structure.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._projects: List[Project] = []
|
|
||||||
self._init_projects()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def names(self) -> List[str]:
|
|
||||||
return [p.name for p in self._projects]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def projects(self) -> List[Project]:
|
|
||||||
return self._projects
|
|
||||||
|
|
||||||
def _init_projects(self) -> None:
|
|
||||||
api_url = "data/projects"
|
|
||||||
|
|
||||||
for project in self.api_get(api_url):
|
|
||||||
self._projects.append(Project(**project))
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Project(KitsuConnector):
|
|
||||||
"""
|
|
||||||
Class to get object oriented representation of backend project data structure.
|
|
||||||
Can shortcut some functions from gazu api because active project is given through class instance.
|
|
||||||
Has multiple constructor functions (by_name, by_id, init>by_dict)
|
|
||||||
"""
|
|
||||||
|
|
||||||
id: str = ""
|
|
||||||
created_at: str = ""
|
|
||||||
updated_at: str = ""
|
|
||||||
name: str = ""
|
|
||||||
code: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
shotgun_id: Optional[str] = None
|
|
||||||
data: None = None
|
|
||||||
has_avatar: bool = False
|
|
||||||
fps: Optional[str] = None
|
|
||||||
ratio: Optional[str] = None
|
|
||||||
resolution: Optional[str] = None
|
|
||||||
production_type: str = ""
|
|
||||||
start_date: Optional[str] = None
|
|
||||||
end_date: Optional[str] = None
|
|
||||||
man_days: Optional[str] = None
|
|
||||||
nb_episodes: int = 0
|
|
||||||
episode_span: int = 0
|
|
||||||
project_status_id: str = ""
|
|
||||||
type: str = ""
|
|
||||||
project_status_name: str = ""
|
|
||||||
file_tree: Dict[str, Any] = field(default_factory=dict)
|
|
||||||
team: List[Any] = field(default_factory=list)
|
|
||||||
asset_types: List[Any] = field(default_factory=list)
|
|
||||||
task_types: List[Any] = field(default_factory=list)
|
|
||||||
task_statuses: List[Any] = field(default_factory=list)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def by_id(cls, connector: KitsuConnector, project_id: str) -> Project:
|
|
||||||
api_url = f"data/projects/{project_id}"
|
|
||||||
project_dict = connector.api_get(api_url)
|
|
||||||
return cls(**project_dict)
|
|
||||||
|
|
||||||
# SEQUENCES
|
|
||||||
# ---------------
|
|
||||||
|
|
||||||
def get_sequence(self, connector: KitsuConnector, seq_id: str) -> Sequence:
|
|
||||||
return Sequence.by_id(connector, seq_id)
|
|
||||||
|
|
||||||
def get_sequence_by_name(
|
|
||||||
self, connector: KitsuConnector, seq_name: str
|
|
||||||
) -> Optional[Sequence]:
|
|
||||||
return Sequence.by_name(connector, self, seq_name)
|
|
||||||
|
|
||||||
def get_sequences_all(self, connector: KitsuConnector) -> List[Sequence]:
|
|
||||||
api_url = f"data/projects/{self.id}/sequences"
|
|
||||||
seq_dicts = connector.api_get(api_url)
|
|
||||||
|
|
||||||
sequences = [Sequence(**s) for s in seq_dicts]
|
|
||||||
return sorted(sequences, key=lambda x: x.name)
|
|
||||||
|
|
||||||
# SHOT
|
|
||||||
# ---------------
|
|
||||||
|
|
||||||
def get_shot(self, connector: KitsuConnector, shot_id: str) -> Shot:
|
|
||||||
return Shot.by_id(connector, shot_id)
|
|
||||||
|
|
||||||
def get_shots_all(self, connector: KitsuConnector) -> List[Shot]:
|
|
||||||
api_url = f"data/projects/{self.id}/shots"
|
|
||||||
shot_dicts = connector.api_get(api_url)
|
|
||||||
|
|
||||||
shots = [Shot(**s) for s in shot_dicts]
|
|
||||||
return sorted(shots, key=lambda x: x.name)
|
|
||||||
|
|
||||||
def get_shot_by_name(
|
|
||||||
self, connector: KitsuConnector, sequence: Sequence, name: str
|
|
||||||
) -> Optional[Shot]:
|
|
||||||
all_shots = self.get_shots_all(connector)
|
|
||||||
return Shot.by_name(connector, sequence, name)
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
return bool(self.id)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Sequence(KitsuConnector):
|
|
||||||
"""
|
|
||||||
Class to get object oriented representation of backend sequence data structure.
|
|
||||||
Has multiple constructor functions (by_name, by_id, init>by_dict)
|
|
||||||
"""
|
|
||||||
|
|
||||||
id: str = ""
|
|
||||||
created_at: str = ""
|
|
||||||
updated_at: str = ""
|
|
||||||
name: str = ""
|
|
||||||
code: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
shotgun_id: Optional[str] = None
|
|
||||||
canceled: bool = False
|
|
||||||
nb_frames: Optional[int] = None
|
|
||||||
project_id: str = ""
|
|
||||||
entity_type_id: str = ""
|
|
||||||
parent_id: str = ""
|
|
||||||
source_id: Optional[str] = None
|
|
||||||
preview_file_id: Optional[str] = None
|
|
||||||
data: Optional[Dict[str, Any]] = None
|
|
||||||
type: str = ""
|
|
||||||
project_name: str = ""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def by_id(cls, connector: KitsuConnector, seq_id: str) -> Sequence:
|
|
||||||
api_url = f"data/sequences/{seq_id}"
|
|
||||||
seq_dict = connector.api_get(seq_id)
|
|
||||||
return cls(**seq_dict)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def by_name(
|
|
||||||
cls, connector: KitsuConnector, project: Project, seq_name: str
|
|
||||||
) -> Optional[Sequence]:
|
|
||||||
api_url = f"data/projects/{project.id}/sequences"
|
|
||||||
seq_dicts = connector.api_get(api_url)
|
|
||||||
seq_dict = connector.fetch_first(seq_dicts, {"name": seq_name})
|
|
||||||
|
|
||||||
# Can be None if name not found.
|
|
||||||
if not seq_dict:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return cls(**seq_dict)
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
return bool(self.id)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Shot(KitsuConnector):
|
|
||||||
"""
|
|
||||||
Class to get object oriented representation of backend shot data structure.
|
|
||||||
Has multiple constructor functions (by_name, by_id, init>by_dict
|
|
||||||
"""
|
|
||||||
|
|
||||||
id: str = ""
|
|
||||||
created_at: str = ""
|
|
||||||
updated_at: str = ""
|
|
||||||
name: str = ""
|
|
||||||
canceled: bool = False
|
|
||||||
code: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
entity_type_id: str = ""
|
|
||||||
episode_id: Optional[str] = None
|
|
||||||
episode_name: str = ""
|
|
||||||
fps: str = ""
|
|
||||||
frame_in: str = ""
|
|
||||||
frame_out: str = ""
|
|
||||||
nb_frames: int = 0
|
|
||||||
parent_id: str = ""
|
|
||||||
preview_file_id: Optional[str] = None
|
|
||||||
project_id: str = ""
|
|
||||||
project_name: str = ""
|
|
||||||
sequence_id: str = ""
|
|
||||||
sequence_name: str = ""
|
|
||||||
source_id: Optional[str] = None
|
|
||||||
shotgun_id: Optional[str] = None
|
|
||||||
type: str = ""
|
|
||||||
data: Dict[str, Any] = field(default_factory=dict)
|
|
||||||
tasks: List[Dict[str, Any]] = field(default_factory=list)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def by_id(cls, connector: KitsuConnector, shot_id: str) -> Shot:
|
|
||||||
api_url = f"data/shots/{shot_id}"
|
|
||||||
shot_dict = connector.api_get(shot_id)
|
|
||||||
return cls(**shot_dict)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def by_name(
|
|
||||||
cls, connector: KitsuConnector, sequence: Sequence, shot_name: str
|
|
||||||
) -> Optional[Shot]:
|
|
||||||
api_url = f"data/projects/{sequence.project_id}/shots"
|
|
||||||
shot_dicts = connector.api_get(api_url)
|
|
||||||
shot_dict = connector.fetch_first(
|
|
||||||
shot_dicts, {"parent_id": sequence.id, "name": shot_name}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Can be None if name not found.
|
|
||||||
if not shot_dict:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return cls(**shot_dict)
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
return bool(self.id)
|
|
@ -1,16 +0,0 @@
|
|||||||
import logging
|
|
||||||
import sys
|
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
|
|
||||||
class LoggerFactory:
|
|
||||||
|
|
||||||
"""
|
|
||||||
Utility class to streamline logger creation
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def getLogger(name=__name__):
|
|
||||||
name = name
|
|
||||||
logger = logging.getLogger(name)
|
|
||||||
return logger
|
|
@ -1,835 +0,0 @@
|
|||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
import types
|
|
||||||
from typing import Container, Dict, List, Set, Optional
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
|
|
||||||
from .log import LoggerFactory
|
|
||||||
from .kitsu import KitsuConnector, Shot, Project, Sequence
|
|
||||||
from . import opsdata, prefs, asglobals
|
|
||||||
|
|
||||||
logger = LoggerFactory.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def ui_redraw() -> None:
|
|
||||||
"""Forces blender to redraw the UI."""
|
|
||||||
for screen in bpy.data.screens:
|
|
||||||
for area in screen.areas:
|
|
||||||
area.tag_redraw()
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_create_actions(bpy.types.Operator):
|
|
||||||
bl_idname = "as.create_action"
|
|
||||||
bl_label = "Create action"
|
|
||||||
bl_description = (
|
|
||||||
"Creates action for all found assets that have no assigned yet. "
|
|
||||||
"Names them following the blender-studio convention"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context: bpy.types.Context) -> bool:
|
|
||||||
act_coll = context.view_layer.active_layer_collection.collection
|
|
||||||
return bool(bpy.data.filepath and act_coll)
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
assigned: List[bpy.types.Action] = []
|
|
||||||
created: List[bpy.types.Action] = []
|
|
||||||
failed: List[bpy.types.Collection] = []
|
|
||||||
collections = opsdata.get_valid_collections(context)
|
|
||||||
exists: List[bpy.types.Collection] = []
|
|
||||||
|
|
||||||
if not collections:
|
|
||||||
self.report({"WARNING"}, "No valid collections available")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
for coll in collections:
|
|
||||||
print("\n")
|
|
||||||
rig = opsdata.find_rig(coll)
|
|
||||||
|
|
||||||
if not rig:
|
|
||||||
logger.warning(f"{coll.name} contains no rig.")
|
|
||||||
failed.append(coll)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create animation data if not existent.
|
|
||||||
if not rig.animation_data:
|
|
||||||
rig.animation_data_create()
|
|
||||||
logger.info("%s created animation data", rig.name)
|
|
||||||
|
|
||||||
# If action already exists check for fake user and then continue.
|
|
||||||
if rig.animation_data.action:
|
|
||||||
logger.info("%s already has an action assigned", rig.name)
|
|
||||||
|
|
||||||
if not rig.animation_data.action.use_fake_user:
|
|
||||||
rig.animation_data.action.use_fake_user = True
|
|
||||||
logger.info("%s assigned existing action fake user", rig.name)
|
|
||||||
exists.append(coll)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create new action.
|
|
||||||
action_name_new = opsdata.gen_action_name(coll)
|
|
||||||
try:
|
|
||||||
action = bpy.data.actions[action_name_new]
|
|
||||||
except KeyError:
|
|
||||||
action = bpy.data.actions.new(action_name_new)
|
|
||||||
logger.info("Created action: %s", action.name)
|
|
||||||
created.append(action)
|
|
||||||
else:
|
|
||||||
logger.info("Action %s already exists. Will take that.", action.name)
|
|
||||||
|
|
||||||
# Assign action.
|
|
||||||
rig.animation_data.action = action
|
|
||||||
logger.info("%s assigned action %s", rig.name, action.name)
|
|
||||||
|
|
||||||
# Add fake user.
|
|
||||||
action.use_fake_user = True
|
|
||||||
assigned.append(action)
|
|
||||||
|
|
||||||
self.report(
|
|
||||||
{"INFO"},
|
|
||||||
"Actions: Created %s | Assigned %s | Exists %s | Failed %s"
|
|
||||||
% (len(created), len(assigned), len(exists), len(failed)),
|
|
||||||
)
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_setup_workspaces(bpy.types.Operator):
|
|
||||||
bl_idname = "as.setup_workspaces"
|
|
||||||
bl_label = "Setup Workspace"
|
|
||||||
bl_description = "Sets up the workspaces for the animation task"
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
|
|
||||||
# Remove non anim workspaces.
|
|
||||||
for ws in bpy.data.workspaces:
|
|
||||||
if ws.name != "Animation":
|
|
||||||
bpy.ops.workspace.delete({"workspace": ws})
|
|
||||||
|
|
||||||
self.report({"INFO"}, "Deleted non Animation workspaces")
|
|
||||||
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_load_latest_edit(bpy.types.Operator):
|
|
||||||
bl_idname = "as.load_latest_edit"
|
|
||||||
bl_label = "Load edit"
|
|
||||||
bl_description = (
|
|
||||||
"Loads latest edit from shot_preview_folder "
|
|
||||||
"Shifts edit so current shot starts at 3d_in metadata shot key from Kitsu"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def can_load_edit(cls, context: bpy.types.Context) -> bool:
|
|
||||||
"""Check if shared dir and VSE area are available"""
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
edit_export_path = Path(addon_prefs.edit_export_path)
|
|
||||||
|
|
||||||
# Needs to be run in sequence editor area
|
|
||||||
# TODO: temporarily create a VSE area if not available.
|
|
||||||
area_override = None
|
|
||||||
for area in bpy.context.screen.areas:
|
|
||||||
if area.type == "SEQUENCE_EDITOR":
|
|
||||||
area_override = area
|
|
||||||
|
|
||||||
return bool(area_override and edit_export_path)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context: bpy.types.Context) -> bool:
|
|
||||||
return cls.can_load_edit(context)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def description(cls, context, properties):
|
|
||||||
if cls.can_load_edit(context):
|
|
||||||
return "Load latest edit from shared folder"
|
|
||||||
else:
|
|
||||||
return "Shared folder not set, or VSE area not available in this workspace"
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
edit_export_path = Path(addon_prefs.edit_export_path)
|
|
||||||
strip_channel = 1
|
|
||||||
latest_file = self._get_latest_edit(context)
|
|
||||||
if not latest_file:
|
|
||||||
self.report(
|
|
||||||
{"ERROR"}, f"Found no edit file in: {edit_export_path.as_posix()}"
|
|
||||||
)
|
|
||||||
strip_filepath = latest_file.as_posix()
|
|
||||||
strip_frame_start = 101
|
|
||||||
|
|
||||||
# Needs to be run in sequence editor area.
|
|
||||||
area_override = None
|
|
||||||
for area in bpy.context.screen.areas:
|
|
||||||
if area.type == "SEQUENCE_EDITOR":
|
|
||||||
area_override = area
|
|
||||||
|
|
||||||
if not area_override:
|
|
||||||
self.report({"ERROR"}, "No sequence editor are found")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
override = bpy.context.copy()
|
|
||||||
override["area"] = area_override
|
|
||||||
|
|
||||||
bpy.ops.sequencer.movie_strip_add(
|
|
||||||
override,
|
|
||||||
filepath=strip_filepath,
|
|
||||||
relative_path=False,
|
|
||||||
frame_start=strip_frame_start,
|
|
||||||
channel=strip_channel,
|
|
||||||
fit_method="FIT",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get sequence name.
|
|
||||||
seqname = opsdata.get_sequence_from_file()
|
|
||||||
if not seqname:
|
|
||||||
self.report({"ERROR"}, "Failed to retrieve seqname from current file.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Get shotname.
|
|
||||||
shotname = opsdata.get_shot_name_from_file()
|
|
||||||
if not shotname:
|
|
||||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Setup connector and get data from kitsu.
|
|
||||||
connector = KitsuConnector(addon_prefs)
|
|
||||||
project = Project.by_id(connector, addon_prefs.kitsu.project_id)
|
|
||||||
sequence = project.get_sequence_by_name(connector, seqname)
|
|
||||||
|
|
||||||
if not sequence:
|
|
||||||
self.report({"ERROR"}, f"Failed to find {seqname} on kitsu.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
shot = project.get_shot_by_name(connector, sequence, shotname)
|
|
||||||
|
|
||||||
if not shot:
|
|
||||||
self.report({"ERROR"}, f"Failed to find shot {shotname} on kitsu.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Update shift frame range prop.
|
|
||||||
frame_in = shot.data["frame_in"]
|
|
||||||
frame_out = shot.data["frame_out"]
|
|
||||||
frame_3d_in = shot.data["3d_in"]
|
|
||||||
frame_3d_offset = frame_3d_in - 101
|
|
||||||
|
|
||||||
if not frame_in:
|
|
||||||
self.report(
|
|
||||||
{"ERROR"}, f"On kitsu 'frame_in' is not defined for shot {shotname}."
|
|
||||||
)
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Set sequence strip start kitsu data.
|
|
||||||
for strip in context.scene.sequence_editor.sequences_all:
|
|
||||||
strip.frame_start = -frame_in + (strip_frame_start * 2) + frame_3d_offset
|
|
||||||
|
|
||||||
self.report({"INFO"}, f"Loaded latest edit: {latest_file.name}")
|
|
||||||
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
def _get_latest_edit(self, context: bpy.types.Context):
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
|
|
||||||
edit_export_path = Path(addon_prefs.edit_export_path)
|
|
||||||
|
|
||||||
files_list = [
|
|
||||||
f
|
|
||||||
for f in edit_export_path.iterdir()
|
|
||||||
if f.is_file() and self._is_valid_edit_name(f.name)
|
|
||||||
]
|
|
||||||
files_list = sorted(files_list, reverse=True)
|
|
||||||
|
|
||||||
return files_list[0]
|
|
||||||
|
|
||||||
def _is_valid_edit_name(self, filename: str) -> bool:
|
|
||||||
pattern = r"sf-edit-v\d\d\d.mp4"
|
|
||||||
|
|
||||||
match = re.search(pattern, filename)
|
|
||||||
if match:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_import_camera(bpy.types.Operator):
|
|
||||||
bl_idname = "as.import_camera"
|
|
||||||
bl_label = "Import Camera"
|
|
||||||
bl_description = "Imports camera rig and makes library override"
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context: bpy.types.Context) -> bool:
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
|
|
||||||
# Import camera rig and make override.
|
|
||||||
camera_rig_path = addon_prefs.camera_rig_path
|
|
||||||
if not camera_rig_path:
|
|
||||||
self.report({"ERROR"}, "Failed to import camera rig")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
cam_lib_coll = opsdata.import_data_from_lib(
|
|
||||||
"collections",
|
|
||||||
"CA-camera_rig",
|
|
||||||
camera_rig_path,
|
|
||||||
)
|
|
||||||
opsdata.instance_coll_to_scene_and_override(context, cam_lib_coll)
|
|
||||||
cam_coll = bpy.data.collections[cam_lib_coll.name, None]
|
|
||||||
|
|
||||||
self.report({"INFO"}, f"Imported camera: {cam_coll.name}")
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_import_camera_action(bpy.types.Operator):
|
|
||||||
bl_idname = "as.import_camera_action"
|
|
||||||
bl_label = "Import Camera Action"
|
|
||||||
bl_description = (
|
|
||||||
"Imports camera action of previs file that matches current shot and assigns it"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context: bpy.types.Context) -> bool:
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
|
|
||||||
try:
|
|
||||||
cam_coll = bpy.data.collections["CA-camera_rig", None]
|
|
||||||
except KeyError:
|
|
||||||
self.report({"ERROR"}, f"Camera collection CA-camera_rig is not imported")
|
|
||||||
return {"CANCELELD"}
|
|
||||||
|
|
||||||
# Import camera action from previz file.
|
|
||||||
|
|
||||||
# Get shotname and previs filepath.
|
|
||||||
shotname = opsdata.get_shot_name_from_file()
|
|
||||||
if not shotname:
|
|
||||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
previs_path = opsdata.get_previs_file(context)
|
|
||||||
if not previs_path:
|
|
||||||
self.report({"ERROR"}, "Failed to find previz file")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Check if cam action name exists in previs library.
|
|
||||||
cam_action_name_new = opsdata.get_cam_action_name_from_lib(
|
|
||||||
shotname, previs_path
|
|
||||||
)
|
|
||||||
if not cam_action_name_new:
|
|
||||||
self.report(
|
|
||||||
{"ERROR"},
|
|
||||||
f"Camera action: {cam_action_name_new} not found in lib: {previs_path.name}",
|
|
||||||
)
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Import cam action data block.
|
|
||||||
cam_action = opsdata.import_data_from_lib(
|
|
||||||
"actions", cam_action_name_new, previs_path, link=False
|
|
||||||
)
|
|
||||||
|
|
||||||
# Find rig to assing action to.
|
|
||||||
rig = opsdata.find_rig(cam_coll)
|
|
||||||
if not rig:
|
|
||||||
self.report({"WARNING"}, f"{cam_coll.name} contains no rig.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Assign action.
|
|
||||||
rig.animation_data.action = cam_action
|
|
||||||
logger.info("%s assigned action %s", rig.name, cam_action.name)
|
|
||||||
|
|
||||||
# Add fake user.
|
|
||||||
cam_action.use_fake_user = True
|
|
||||||
|
|
||||||
# Ensure version suffix to action data bloc.
|
|
||||||
opsdata.ensure_name_version_suffix(cam_action)
|
|
||||||
|
|
||||||
self.report({"INFO"}, f"{rig.name} imported camera action: {cam_action.name}")
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_import_asset_actions(bpy.types.Operator):
|
|
||||||
"""Imports asset action of previs file that matches current shot and assigns it"""
|
|
||||||
|
|
||||||
bl_idname = "as.import_asset_actions"
|
|
||||||
bl_label = "Import Asset Actions"
|
|
||||||
bl_description = (
|
|
||||||
"For each found asset tries to find action in previs file. "
|
|
||||||
"Imports it to current file, renames it, adds fake user and assigns it"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context: bpy.types.Context) -> bool:
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
|
|
||||||
succeeded = []
|
|
||||||
failed = []
|
|
||||||
actions_imported = []
|
|
||||||
renamed_actions = []
|
|
||||||
|
|
||||||
# Get shotname and previs filepath.
|
|
||||||
shotname = opsdata.get_shot_name_from_file()
|
|
||||||
if not shotname:
|
|
||||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
previs_path = opsdata.get_previs_file(context)
|
|
||||||
if not previs_path:
|
|
||||||
self.report({"ERROR"}, "Failed to find previz file")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Check if cam action name exists in previs library.
|
|
||||||
action_candidates: Dict[str, List[str]] = {}
|
|
||||||
asset_colls = []
|
|
||||||
|
|
||||||
with bpy.data.libraries.load(
|
|
||||||
previs_path.as_posix(), relative=True, link=False
|
|
||||||
) as (
|
|
||||||
data_from,
|
|
||||||
data_to,
|
|
||||||
):
|
|
||||||
|
|
||||||
for asset in asglobals.ACTION_ASSETS:
|
|
||||||
|
|
||||||
# Check if asset is in current scene.
|
|
||||||
try:
|
|
||||||
coll = bpy.data.collections[asset]
|
|
||||||
except KeyError:
|
|
||||||
# can continue here if not in scene we
|
|
||||||
# cant load action anyway
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.info("Found asset in scene: %s", coll.name)
|
|
||||||
asset_colls.append(coll)
|
|
||||||
|
|
||||||
# Find if actions exists for that asset in previs file.
|
|
||||||
asset_name = opsdata.find_asset_name(asset)
|
|
||||||
for action in data_from.actions:
|
|
||||||
if action.startswith(f"ANI-{asset_name}."):
|
|
||||||
|
|
||||||
# Create key if not existent yet.
|
|
||||||
if asset not in action_candidates:
|
|
||||||
action_candidates[asset] = []
|
|
||||||
|
|
||||||
# Append action to that asset.
|
|
||||||
action_candidates[asset].append(action)
|
|
||||||
|
|
||||||
# Load and assign actions for asset colls.
|
|
||||||
for coll in asset_colls:
|
|
||||||
|
|
||||||
# Find rig.
|
|
||||||
rig = opsdata.find_rig(coll)
|
|
||||||
if not rig:
|
|
||||||
logger.warning("%s contains no rig.", coll.name)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if action was found in previs file for that asset.
|
|
||||||
if not coll.name in action_candidates:
|
|
||||||
logger.warning("%s no action found in previs file", coll.name)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
"%s found actions in previs file: %s",
|
|
||||||
asset,
|
|
||||||
str(action_candidates[coll.name]),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if multiple actions are in the prvis file for that asset.
|
|
||||||
if len(action_candidates[coll.name]) > 1:
|
|
||||||
logger.warning(
|
|
||||||
"%s Multiple actions found in previs file: %s",
|
|
||||||
coll.name,
|
|
||||||
str(action_candidates[coll.name]),
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Import action from previs file.
|
|
||||||
actions = action_candidates[coll.name]
|
|
||||||
action = opsdata.import_data_from_lib(
|
|
||||||
"actions", actions[0], previs_path, link=False
|
|
||||||
)
|
|
||||||
if not action:
|
|
||||||
continue
|
|
||||||
|
|
||||||
actions_imported.append(action)
|
|
||||||
|
|
||||||
# Create animation data if not existent.
|
|
||||||
if not rig.animation_data:
|
|
||||||
rig.animation_data_create()
|
|
||||||
logger.info("%s created animation data", rig.name)
|
|
||||||
|
|
||||||
# Assign action.
|
|
||||||
rig.animation_data.action = action
|
|
||||||
logger.info("%s assigned action %s", rig.name, action.name)
|
|
||||||
|
|
||||||
# Add fake user.
|
|
||||||
action.use_fake_user = True
|
|
||||||
|
|
||||||
# Rename actions.
|
|
||||||
action_name_new = opsdata.gen_action_name(coll)
|
|
||||||
try:
|
|
||||||
action_existing = bpy.data.actions[action_name_new]
|
|
||||||
except KeyError:
|
|
||||||
# Action does not exists can rename.
|
|
||||||
old_name = action.name
|
|
||||||
action.name = action_name_new
|
|
||||||
logger.info("Renamed action: %s to %s", old_name, action.name)
|
|
||||||
renamed_actions.append(action)
|
|
||||||
else:
|
|
||||||
# Action name already exists in this scene.
|
|
||||||
logger.info(
|
|
||||||
"Failed to rename action action %s to %s. Already exists",
|
|
||||||
action.name,
|
|
||||||
action_name_new,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.report(
|
|
||||||
{"INFO"},
|
|
||||||
f"Found Assets: {len(asset_colls)} | Imported Actions: {len(actions_imported)} | Renamed Actions: {len(renamed_actions)}",
|
|
||||||
)
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_import_multi_assets(bpy.types.Operator):
|
|
||||||
bl_idname = "as.import_multi_assets"
|
|
||||||
bl_label = "Import Multi Assets"
|
|
||||||
bl_description = (
|
|
||||||
"For each found multi asset tries to find action in previs file. "
|
|
||||||
"Imports it to current file, renames it, adds fake user and assigns it"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context: bpy.types.Context) -> bool:
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
return bool(addon_prefs.is_project_root_valid and bpy.data.filepath)
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
actions_imported = []
|
|
||||||
new_colls = []
|
|
||||||
|
|
||||||
# Get shotname and previs filepath.
|
|
||||||
shotname = opsdata.get_shot_name_from_file()
|
|
||||||
if not shotname:
|
|
||||||
self.report({"ERROR"}, "Failed to retrieve shotname from current file.")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
previs_path = opsdata.get_previs_file(context)
|
|
||||||
if not previs_path:
|
|
||||||
self.report({"ERROR"}, "Failed to find previz file")
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
# Check if cam action name exists in previs library.
|
|
||||||
action_candidates: Dict[str, List[str]] = {}
|
|
||||||
asset_colls: List[bpy.types.Collection] = []
|
|
||||||
|
|
||||||
with bpy.data.libraries.load(
|
|
||||||
previs_path.as_posix(), relative=True, link=False
|
|
||||||
) as (
|
|
||||||
data_from,
|
|
||||||
data_to,
|
|
||||||
):
|
|
||||||
data_from_actions: List[str] = data_from.actions
|
|
||||||
data_from_actions.sort()
|
|
||||||
|
|
||||||
# Find all sprites actions.
|
|
||||||
for asset in asglobals.MULTI_ASSETS:
|
|
||||||
# Check if asset is in current scene.
|
|
||||||
try:
|
|
||||||
coll = bpy.data.collections[asset]
|
|
||||||
except KeyError:
|
|
||||||
# Can continue here if not in scene we
|
|
||||||
# cant load action anyway.
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.info("Found asset in scene: %s", coll.name)
|
|
||||||
asset_colls.append(coll)
|
|
||||||
|
|
||||||
# Find if actions exists for that asset in previs file.
|
|
||||||
asset_name = opsdata.find_asset_name(asset)
|
|
||||||
for action in data_from_actions:
|
|
||||||
if action.startswith(f"ANI-{asset_name}"):
|
|
||||||
|
|
||||||
# Create key if not existent yet.
|
|
||||||
if asset not in action_candidates:
|
|
||||||
action_candidates[asset] = []
|
|
||||||
|
|
||||||
# Append action to that asset.
|
|
||||||
action_candidates[asset].append(action)
|
|
||||||
|
|
||||||
# Load and assign actions for asset colls.
|
|
||||||
color_tag: str = ""
|
|
||||||
for coll in asset_colls:
|
|
||||||
|
|
||||||
# Check if action was found in previs file for that asset.
|
|
||||||
if not coll.name in action_candidates:
|
|
||||||
logger.warning("%s no action found in previs file", coll.name)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
"%s found actions in previs file: %s",
|
|
||||||
asset,
|
|
||||||
str(action_candidates[coll.name]),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create duplicate for each action.
|
|
||||||
for idx, action_candidate in enumerate(action_candidates[coll.name]):
|
|
||||||
|
|
||||||
# First index use existing collection that was already created by shot builder.
|
|
||||||
if idx == 0:
|
|
||||||
new_coll = bpy.data.collections[asset, None]
|
|
||||||
logger.info("First index will use existing coll: %s", new_coll.name)
|
|
||||||
color_tag = new_coll.color_tag # Take color from first collection.
|
|
||||||
else:
|
|
||||||
ref_coll = opsdata.get_ref_coll(coll)
|
|
||||||
new_coll = ref_coll.override_hierarchy_create(
|
|
||||||
context.scene, context.view_layer, reference=coll
|
|
||||||
)
|
|
||||||
new_coll.color_tag = color_tag
|
|
||||||
logger.info("Created new override collection: %s", new_coll.name)
|
|
||||||
new_colls.append(new_coll)
|
|
||||||
|
|
||||||
# Find rig of new coll.
|
|
||||||
rig = opsdata.find_rig(new_coll)
|
|
||||||
if not rig:
|
|
||||||
logger.warning("%s contains no rig.", coll.name)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Import action.
|
|
||||||
action = opsdata.import_data_from_lib(
|
|
||||||
"actions", action_candidate, previs_path, link=False
|
|
||||||
)
|
|
||||||
if not action:
|
|
||||||
continue
|
|
||||||
|
|
||||||
actions_imported.append(action)
|
|
||||||
|
|
||||||
# Create animation data if not existent.
|
|
||||||
if not rig.animation_data:
|
|
||||||
rig.animation_data_create()
|
|
||||||
logger.info("%s created animation data", rig.name)
|
|
||||||
|
|
||||||
# Assign action.
|
|
||||||
rig.animation_data.action = action
|
|
||||||
logger.info("%s assigned action %s", rig.name, action.name)
|
|
||||||
|
|
||||||
self.report(
|
|
||||||
{"INFO"},
|
|
||||||
f"Found Assets: {len(asset_colls)} | Imported Actions: {len(actions_imported)} | New collections: {len(new_colls)}",
|
|
||||||
)
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_shift_anim(bpy.types.Operator):
|
|
||||||
bl_idname = "as.shift_anim"
|
|
||||||
bl_label = "Shift Anim"
|
|
||||||
bl_description = (
|
|
||||||
"Shifts the animation of found assets by number of frames. "
|
|
||||||
"It also shifts the camera animation as well as its modifier values"
|
|
||||||
)
|
|
||||||
|
|
||||||
multi_assets: bpy.props.BoolProperty(name="Do Multi Assets")
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
# Define the frame offset by:
|
|
||||||
# Subtracting the layout cut in frame (to set the 0)
|
|
||||||
# Adding 101 (the animation start for a shot)
|
|
||||||
# For example, layout frame 520 becomes frames_offset -520 + 101 = -419.
|
|
||||||
|
|
||||||
frames_offset = -context.scene.anim_setup.layout_cut_in + 101
|
|
||||||
rigs: List[bpy.types.Armature] = []
|
|
||||||
|
|
||||||
if not self.multi_assets:
|
|
||||||
# Get cam coll.
|
|
||||||
try:
|
|
||||||
rig = bpy.data.objects["RIG-camera", None]
|
|
||||||
except KeyError:
|
|
||||||
logger.warning("Failed to find camera object 'RIG-camera'")
|
|
||||||
else:
|
|
||||||
rigs.append(rig)
|
|
||||||
|
|
||||||
# Find assets.
|
|
||||||
for asset in asglobals.ACTION_ASSETS:
|
|
||||||
|
|
||||||
# Check if asset is in current scene.
|
|
||||||
try:
|
|
||||||
coll = bpy.data.collections[asset]
|
|
||||||
except KeyError:
|
|
||||||
# Can continue here if not in scene we
|
|
||||||
# cant load action anyway.
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.info("Found asset in scene: %s", coll.name)
|
|
||||||
# Find rig.
|
|
||||||
rig = opsdata.find_rig(coll)
|
|
||||||
if not rig:
|
|
||||||
logger.warning("%s contains no rig.", coll.name)
|
|
||||||
continue
|
|
||||||
rigs.append(rig)
|
|
||||||
else:
|
|
||||||
for asset in asglobals.MULTI_ASSETS:
|
|
||||||
for coll in bpy.data.collections:
|
|
||||||
|
|
||||||
if not opsdata.is_item_lib_override(coll):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not coll.name.startswith(asset):
|
|
||||||
continue
|
|
||||||
|
|
||||||
logger.info("Found asset in scene: %s", coll.name)
|
|
||||||
# Find rig.
|
|
||||||
rig = opsdata.find_rig(coll)
|
|
||||||
if not rig:
|
|
||||||
logger.warning("%s contains no rig.", coll.name)
|
|
||||||
continue
|
|
||||||
rigs.append(rig)
|
|
||||||
|
|
||||||
if not rigs:
|
|
||||||
self.report(
|
|
||||||
{"ERROR"}, "Failed to find any assets or cameras to shift animation."
|
|
||||||
)
|
|
||||||
return {"CANCELLED"}
|
|
||||||
|
|
||||||
for rig in rigs:
|
|
||||||
for fcurve in rig.animation_data.action.fcurves:
|
|
||||||
|
|
||||||
# Shift all keyframes.
|
|
||||||
for point in fcurve.keyframe_points:
|
|
||||||
# Print(f"{fcurve.data_path}|{fcurve.array_index}: {point.co.x}|{point.co.y}").
|
|
||||||
point.co.x += frames_offset
|
|
||||||
# Don't forget the keyframe's handles:.
|
|
||||||
point.handle_left.x += frames_offset
|
|
||||||
point.handle_right.x += frames_offset
|
|
||||||
|
|
||||||
# Shift all noise modififers values.
|
|
||||||
for m in fcurve.modifiers:
|
|
||||||
if not m.type == "NOISE":
|
|
||||||
continue
|
|
||||||
|
|
||||||
m.offset += frames_offset
|
|
||||||
|
|
||||||
if m.use_restricted_range:
|
|
||||||
frame_start = m.frame_start
|
|
||||||
frame_end = m.frame_end
|
|
||||||
m.frame_start = frame_start + (frames_offset)
|
|
||||||
m.frame_end = frame_end + (frames_offset)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
"%s shifted %s modifier values by %i frames",
|
|
||||||
m.id_data.name,
|
|
||||||
m.type.lower(),
|
|
||||||
frames_offset,
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
"%s: %s shifted all keyframes by %i frames",
|
|
||||||
rig.name,
|
|
||||||
rig.animation_data.action.name,
|
|
||||||
frames_offset,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.report(
|
|
||||||
{"INFO"}, f"Shifted animation of {len(rigs)} actions by {frames_offset}"
|
|
||||||
)
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_apply_additional_settings(bpy.types.Operator):
|
|
||||||
|
|
||||||
bl_idname = "as.apply_additional_settings"
|
|
||||||
bl_label = "Apply Additional Settings"
|
|
||||||
bl_description = (
|
|
||||||
"Apply some additional settings that are important " "for animation scenes"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def poll(cls, context: bpy.types.Context) -> bool:
|
|
||||||
sqe_area = cls._get_sqe_area(context)
|
|
||||||
return bool(sqe_area)
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
|
|
||||||
sqe_area = self._get_sqe_area(context)
|
|
||||||
|
|
||||||
sqe_area.spaces.active.use_proxies = False
|
|
||||||
sqe_area.spaces.active.proxy_render_size = "PROXY_100"
|
|
||||||
|
|
||||||
self.report({"INFO"}, "Set: use_proxies | proxy_render_size")
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_sqe_area(cls, context: bpy.types.Context):
|
|
||||||
for window in context.window_manager.windows:
|
|
||||||
screen = window.screen
|
|
||||||
|
|
||||||
for area in screen.areas:
|
|
||||||
if area.type == "SEQUENCE_EDITOR":
|
|
||||||
return area
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class AS_OT_exclude_colls(bpy.types.Operator):
|
|
||||||
"""Excludes Collections that are not needed for animation"""
|
|
||||||
|
|
||||||
bl_idname = "as.exclude_colls"
|
|
||||||
bl_label = "Exclude Collections"
|
|
||||||
bl_description = (
|
|
||||||
"Exclude some collections by name that are not needed in animation scenes"
|
|
||||||
)
|
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
|
||||||
view_layer_colls = opsdata.get_all_view_layer_colls(context)
|
|
||||||
|
|
||||||
excluded = []
|
|
||||||
for coll_name in asglobals.HIDE_COLLS:
|
|
||||||
# Find view layer collection, if same collection is linked in in 2 different colls in same scene, these
|
|
||||||
# are 2 different view layer colls, we need to grab all.
|
|
||||||
valid_view_layer_colls = [
|
|
||||||
vc for vc in view_layer_colls if vc.name == coll_name
|
|
||||||
]
|
|
||||||
|
|
||||||
if not valid_view_layer_colls:
|
|
||||||
logger.info("No view layer collections named: %s", coll_name)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for view_layer_coll in valid_view_layer_colls:
|
|
||||||
view_layer_coll.exclude = True
|
|
||||||
logger.info("Excluded view layer collection: %s", view_layer_coll.name)
|
|
||||||
excluded.append(view_layer_coll)
|
|
||||||
|
|
||||||
self.report(
|
|
||||||
{"INFO"}, f"Excluded Collections: {list([v.name for v in excluded])}"
|
|
||||||
)
|
|
||||||
return {"FINISHED"}
|
|
||||||
|
|
||||||
|
|
||||||
# ---------REGISTER ----------.
|
|
||||||
|
|
||||||
classes = [
|
|
||||||
AS_OT_create_actions,
|
|
||||||
AS_OT_setup_workspaces,
|
|
||||||
AS_OT_load_latest_edit,
|
|
||||||
AS_OT_import_camera,
|
|
||||||
AS_OT_import_camera_action,
|
|
||||||
AS_OT_shift_anim,
|
|
||||||
AS_OT_apply_additional_settings,
|
|
||||||
AS_OT_import_asset_actions,
|
|
||||||
AS_OT_exclude_colls,
|
|
||||||
AS_OT_import_multi_assets,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
|
||||||
for cls in classes:
|
|
||||||
bpy.utils.register_class(cls)
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
|
||||||
for cls in reversed(classes):
|
|
||||||
bpy.utils.unregister_class(cls)
|
|
@ -1,344 +0,0 @@
|
|||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional, Dict, Union, Any, List, Generator
|
|
||||||
import bpy
|
|
||||||
from bpy.types import Key
|
|
||||||
|
|
||||||
from . import prefs
|
|
||||||
|
|
||||||
|
|
||||||
from .log import LoggerFactory
|
|
||||||
|
|
||||||
logger = LoggerFactory.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def get_shot_name_from_file() -> Optional[str]:
|
|
||||||
if not bpy.data.filepath:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Default 110_0030_A.anim.blend.
|
|
||||||
return Path(bpy.data.filepath).name.split(".")[0]
|
|
||||||
|
|
||||||
|
|
||||||
def get_sequence_from_file() -> Optional[str]:
|
|
||||||
if not bpy.data.filepath:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# ./spritefright/pro/shots/110_rextoria/110_0010_A/110_0010_A.anim.blend.
|
|
||||||
return Path(bpy.data.filepath).parents[1].name
|
|
||||||
|
|
||||||
|
|
||||||
def get_seqeunce_short_from_shot_name(shotname: str) -> str:
|
|
||||||
return shotname.split("_")[0]
|
|
||||||
|
|
||||||
|
|
||||||
def get_cam_action_name_from_shot(shotname: str) -> str:
|
|
||||||
# ANI-camera.070_0010_A.
|
|
||||||
return f"ANI-camera.{shotname}"
|
|
||||||
|
|
||||||
|
|
||||||
def get_cam_action_name_from_lib(shotname: str, libpath: Path) -> Optional[str]:
|
|
||||||
|
|
||||||
valid_actions = []
|
|
||||||
|
|
||||||
with bpy.data.libraries.load(libpath.as_posix(), relative=True) as (
|
|
||||||
data_from,
|
|
||||||
data_to,
|
|
||||||
):
|
|
||||||
|
|
||||||
for action in data_from.actions:
|
|
||||||
if action.startswith(get_cam_action_name_from_shot(shotname)):
|
|
||||||
valid_actions.append(action)
|
|
||||||
|
|
||||||
if not valid_actions:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return sorted(valid_actions, reverse=True)[0]
|
|
||||||
|
|
||||||
|
|
||||||
def get_previs_file(context: bpy.types.Context) -> Optional[Path]:
|
|
||||||
|
|
||||||
addon_prefs = prefs.addon_prefs_get(context)
|
|
||||||
|
|
||||||
shotname = get_shot_name_from_file()
|
|
||||||
if not shotname:
|
|
||||||
return None
|
|
||||||
|
|
||||||
seqname = get_seqeunce_short_from_shot_name(shotname)
|
|
||||||
previs_path = Path(addon_prefs.previs_root_path)
|
|
||||||
|
|
||||||
# Catch custom cases when previs files are split up for specific shots.
|
|
||||||
if shotname == "020_0010_A":
|
|
||||||
return previs_path / "020_grove.020_0010_A.blend"
|
|
||||||
|
|
||||||
elif shotname == "020_0020_A" or shotname == "020_0050_A":
|
|
||||||
return previs_path / "020_grove.shove.blend"
|
|
||||||
|
|
||||||
elif shotname in ["020_0060_A", "020_0070_A"]:
|
|
||||||
return previs_path / "020_grove.crowdcamping_alt.blend"
|
|
||||||
|
|
||||||
elif shotname in ["020_0160_A", "020_0170_A", "020_0173_A", "020_0176_A"]:
|
|
||||||
return previs_path / "020_grove.weenie_alt.blend"
|
|
||||||
|
|
||||||
else:
|
|
||||||
for f in previs_path.iterdir():
|
|
||||||
if f.is_file() and f.suffix == ".blend" and f.name.startswith(seqname):
|
|
||||||
if len(f.name.split(".")) > 2:
|
|
||||||
continue
|
|
||||||
return f
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def traverse_collection_tree(
|
|
||||||
collection: bpy.types.Collection,
|
|
||||||
) -> Generator[bpy.types.Collection, None, None]:
|
|
||||||
yield collection
|
|
||||||
for child in collection.children:
|
|
||||||
yield from traverse_collection_tree(child)
|
|
||||||
|
|
||||||
|
|
||||||
def import_data_from_lib(
|
|
||||||
data_category: str,
|
|
||||||
data_name: str,
|
|
||||||
libpath: Path,
|
|
||||||
link: bool = True,
|
|
||||||
):
|
|
||||||
|
|
||||||
noun = "Appended"
|
|
||||||
if link:
|
|
||||||
noun = "Linked"
|
|
||||||
|
|
||||||
with bpy.data.libraries.load(libpath.as_posix(), relative=True, link=link) as (
|
|
||||||
data_from,
|
|
||||||
data_to,
|
|
||||||
):
|
|
||||||
|
|
||||||
if data_name not in eval(f"data_from.{data_category}"):
|
|
||||||
logger.error(
|
|
||||||
"Failed to import %s %s from %s. Doesn't exist in file.",
|
|
||||||
data_category,
|
|
||||||
data_name,
|
|
||||||
libpath.as_posix(),
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Check if datablock with same name already exists in blend file.
|
|
||||||
try:
|
|
||||||
eval(f"bpy.data.{data_category}['{data_name}']")
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
"%s already in bpy.data.%s of this blendfile.", data_name, data_category
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Append data block.
|
|
||||||
eval(f"data_to.{data_category}.append('{data_name}')")
|
|
||||||
logger.info(
|
|
||||||
"%s: %s from library: %s",
|
|
||||||
noun,
|
|
||||||
data_name,
|
|
||||||
libpath.as_posix(),
|
|
||||||
)
|
|
||||||
|
|
||||||
if link:
|
|
||||||
return eval(
|
|
||||||
f"bpy.data.{data_category}['{data_name}', '{bpy.path.relpath(libpath.as_posix())}']"
|
|
||||||
)
|
|
||||||
|
|
||||||
return eval(f"bpy.data.{data_category}['{data_name}']")
|
|
||||||
|
|
||||||
|
|
||||||
def instance_coll_to_scene_and_override(
|
|
||||||
context: bpy.types.Context, source_collection: bpy.types.Collection
|
|
||||||
) -> bpy.types.Collection:
|
|
||||||
instance_obj = _create_collection_instance(source_collection)
|
|
||||||
_make_library_override(context, instance_obj)
|
|
||||||
return bpy.data.collections[source_collection.name, None]
|
|
||||||
|
|
||||||
|
|
||||||
def _create_collection_instance(
|
|
||||||
source_collection: bpy.types.Collection,
|
|
||||||
) -> bpy.types.Object:
|
|
||||||
|
|
||||||
# Name has no effect how the overwritten library collection in the end
|
|
||||||
# use empty to instance source collection.
|
|
||||||
instance_obj = bpy.data.objects.new(name="", object_data=None)
|
|
||||||
instance_obj.instance_collection = source_collection
|
|
||||||
instance_obj.instance_type = "COLLECTION"
|
|
||||||
|
|
||||||
parent_collection = bpy.context.view_layer.active_layer_collection
|
|
||||||
parent_collection.collection.objects.link(instance_obj)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
"Instanced collection: %s as: %s",
|
|
||||||
source_collection.name,
|
|
||||||
instance_obj.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return instance_obj
|
|
||||||
|
|
||||||
|
|
||||||
def _make_library_override(
|
|
||||||
context: bpy.types.Context,
|
|
||||||
instance_obj: bpy.types.Object,
|
|
||||||
) -> None:
|
|
||||||
log_name = instance_obj.name
|
|
||||||
# Deselect all.
|
|
||||||
bpy.ops.object.select_all(action="DESELECT")
|
|
||||||
|
|
||||||
# Needs active object (coll instance).
|
|
||||||
context.view_layer.objects.active = instance_obj
|
|
||||||
instance_obj.select_set(True)
|
|
||||||
|
|
||||||
# Add library override.
|
|
||||||
bpy.ops.object.make_override_library()
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
"%s make library override.",
|
|
||||||
log_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def find_asset_name(name: str) -> str:
|
|
||||||
|
|
||||||
if name.endswith("_rig"):
|
|
||||||
name = name[:-4]
|
|
||||||
return name.split("-")[-1] # CH-rex -> 'rex'
|
|
||||||
|
|
||||||
|
|
||||||
def find_rig(coll: bpy.types.Collection) -> Optional[bpy.types.Armature]:
|
|
||||||
|
|
||||||
coll_suffix = find_asset_name(coll.name)
|
|
||||||
|
|
||||||
valid_rigs = []
|
|
||||||
|
|
||||||
for obj in coll.all_objects:
|
|
||||||
# Default rig name: 'RIG-rex' / 'RIG-Rex'.
|
|
||||||
if obj.type != "ARMATURE":
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not obj.name.startswith("RIG"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
valid_rigs.append(obj)
|
|
||||||
|
|
||||||
if not valid_rigs:
|
|
||||||
return None
|
|
||||||
|
|
||||||
elif len(valid_rigs) == 1:
|
|
||||||
logger.info("Found rig: %s", valid_rigs[0].name)
|
|
||||||
return valid_rigs[0]
|
|
||||||
else:
|
|
||||||
logger.error("%s found multiple rigs %s", coll.name, str(valid_rigs))
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_name_version_suffix(datablock: Any) -> Any:
|
|
||||||
version_pattern = r"v\d\d\d"
|
|
||||||
match = re.search(version_pattern, datablock.name)
|
|
||||||
|
|
||||||
if not match:
|
|
||||||
datablock.name = datablock.name + ".v001"
|
|
||||||
|
|
||||||
return datablock
|
|
||||||
|
|
||||||
|
|
||||||
def get_valid_collections(context: bpy.types.Context) -> List[bpy.types.Collection]:
|
|
||||||
valid_prefixes = ["CH-", "PR-"]
|
|
||||||
valid_colls: List[bpy.types.Collection] = []
|
|
||||||
|
|
||||||
for coll in context.scene.collection.children:
|
|
||||||
if coll.name[:3] not in valid_prefixes:
|
|
||||||
continue
|
|
||||||
valid_colls.append(coll)
|
|
||||||
|
|
||||||
return valid_colls
|
|
||||||
|
|
||||||
|
|
||||||
def is_multi_asset(asset_name: str) -> bool:
|
|
||||||
if asset_name.startswith("thorn"):
|
|
||||||
return True
|
|
||||||
multi_assets = ["sprite", "snail", "spider"]
|
|
||||||
if asset_name.lower() in multi_assets:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def gen_action_name(coll: bpy.types.Collection):
|
|
||||||
action_prefix = "ANI"
|
|
||||||
asset_name = find_asset_name(coll.name).lower()
|
|
||||||
asset_name = asset_name.replace(".", "_")
|
|
||||||
version = "v001"
|
|
||||||
shot_name = get_shot_name_from_file()
|
|
||||||
|
|
||||||
action_name_new = f"{action_prefix}-{asset_name}.{shot_name}.{version}"
|
|
||||||
|
|
||||||
if is_multi_asset(asset_name):
|
|
||||||
action_name_new = f"{action_prefix}-{asset_name}_A.{shot_name}.{version}"
|
|
||||||
|
|
||||||
return action_name_new
|
|
||||||
|
|
||||||
|
|
||||||
def set_layer_coll_exlcude(
|
|
||||||
layer_collections: List[bpy.types.LayerCollection], exclude: bool
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
noun = "Excluded" if exclude else "Included"
|
|
||||||
|
|
||||||
for lcoll in layer_collections:
|
|
||||||
|
|
||||||
if exclude:
|
|
||||||
if lcoll.exclude:
|
|
||||||
continue
|
|
||||||
|
|
||||||
lcoll.exclude = True
|
|
||||||
|
|
||||||
else:
|
|
||||||
if not lcoll.exclude:
|
|
||||||
continue
|
|
||||||
|
|
||||||
lcoll.exclude = False
|
|
||||||
|
|
||||||
logger.info("%s %s", noun, lcoll.name)
|
|
||||||
|
|
||||||
|
|
||||||
def get_all_view_layer_colls(
|
|
||||||
context: bpy.types.Context,
|
|
||||||
) -> List[bpy.types.LayerCollection]:
|
|
||||||
return list(traverse_collection_tree(context.view_layer.layer_collection))
|
|
||||||
|
|
||||||
|
|
||||||
def get_ref_coll(coll: bpy.types.Collection) -> bpy.types.Collection:
|
|
||||||
if not coll.override_library:
|
|
||||||
return coll
|
|
||||||
|
|
||||||
return coll.override_library.reference
|
|
||||||
|
|
||||||
|
|
||||||
def is_item_local(
|
|
||||||
item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera]
|
|
||||||
) -> bool:
|
|
||||||
# Local collection of blend file.
|
|
||||||
if not item.override_library and not item.library:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_item_lib_override(
|
|
||||||
item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera]
|
|
||||||
) -> bool:
|
|
||||||
# Collection from libfile and overwritten.
|
|
||||||
if item.override_library and not item.library:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_item_lib_source(
|
|
||||||
item: Union[bpy.types.Collection, bpy.types.Object, bpy.types.Camera]
|
|
||||||
) -> bool:
|
|
||||||
# Source collection from libfile not overwritten.
|
|
||||||
if not item.override_library and item.library:
|
|
||||||
return True
|
|
||||||
return False
|
|
@ -1,176 +0,0 @@
|
|||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Union, Optional, Any, Dict, Set
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
|
|
||||||
from .kitsu import KitsuException
|
|
||||||
from . import asglobals
|
|
||||||
|
|
||||||
|
|
||||||
class KitsuPreferences(bpy.types.PropertyGroup):
|
|
||||||
backend: bpy.props.StringProperty( # type: ignore
|
|
||||||
name="Server URL",
|
|
||||||
description="Kitsu server address",
|
|
||||||
default="https://kitsu.blender.cloud/api",
|
|
||||||
)
|
|
||||||
|
|
||||||
email: bpy.props.StringProperty( # type: ignore
|
|
||||||
name="Email",
|
|
||||||
description="Email to connect to Kitsu",
|
|
||||||
)
|
|
||||||
|
|
||||||
password: bpy.props.StringProperty( # type: ignore
|
|
||||||
name="Password",
|
|
||||||
description="Password to connect to Kitsu",
|
|
||||||
subtype="PASSWORD",
|
|
||||||
)
|
|
||||||
|
|
||||||
project_id: bpy.props.StringProperty( # type: ignore
|
|
||||||
name="Project ID",
|
|
||||||
description="Server Id that refers to the last active project",
|
|
||||||
default=asglobals.PROJECT_ID,
|
|
||||||
options={"HIDDEN", "SKIP_SAVE"},
|
|
||||||
)
|
|
||||||
|
|
||||||
def draw(self, layout: bpy.types.UILayout, context: bpy.types.Context) -> None:
|
|
||||||
box = layout.box()
|
|
||||||
box.label(text="Kitsu")
|
|
||||||
box.prop(self, "backend")
|
|
||||||
box.prop(self, "email")
|
|
||||||
box.prop(self, "password")
|
|
||||||
box.prop(self, "project_id")
|
|
||||||
|
|
||||||
def _validate(self):
|
|
||||||
if not (self.backend and self.email and self.password and self.project_id):
|
|
||||||
raise KitsuException(
|
|
||||||
"Kitsu connector has not been configured in the add-on preferences"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AS_AddonPreferences(bpy.types.AddonPreferences):
|
|
||||||
bl_idname = __package__
|
|
||||||
|
|
||||||
project_root: bpy.props.StringProperty( # type: ignore
|
|
||||||
name="Project Root",
|
|
||||||
default="",
|
|
||||||
options={"HIDDEN", "SKIP_SAVE"},
|
|
||||||
subtype="DIR_PATH",
|
|
||||||
)
|
|
||||||
edit_export_dir: bpy.props.StringProperty( # type: ignore
|
|
||||||
name="Edit Export Directory",
|
|
||||||
default="",
|
|
||||||
options={"HIDDEN", "SKIP_SAVE"},
|
|
||||||
subtype="DIR_PATH",
|
|
||||||
)
|
|
||||||
|
|
||||||
kitsu: bpy.props.PointerProperty( # type: ignore
|
|
||||||
name="Kitsu Preferences", type=KitsuPreferences
|
|
||||||
)
|
|
||||||
|
|
||||||
def draw(self, context: bpy.types.Context) -> None:
|
|
||||||
layout = self.layout
|
|
||||||
box = layout.box()
|
|
||||||
box.row().prop(self, "project_root")
|
|
||||||
|
|
||||||
if not self.project_root:
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Please specify the project root directory.", icon="ERROR")
|
|
||||||
|
|
||||||
if not bpy.data.filepath and self.project_root.startswith("//"):
|
|
||||||
row = box.row()
|
|
||||||
row.label(
|
|
||||||
text="In order to use a relative path as root cache directory the current file needs to be saved.",
|
|
||||||
icon="ERROR",
|
|
||||||
)
|
|
||||||
|
|
||||||
box.row().prop(self, "edit_export_dir")
|
|
||||||
|
|
||||||
if not self.edit_export_dir:
|
|
||||||
row = box.row()
|
|
||||||
row.label(text="Please specify the edit edxport directory.", icon="ERROR")
|
|
||||||
|
|
||||||
if not bpy.data.filepath and self.edit_export_dir.startswith("//"):
|
|
||||||
row = box.row()
|
|
||||||
row.label(
|
|
||||||
text="In order to use a relative path as edit export directory the current file needs to be saved.",
|
|
||||||
icon="ERROR",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.kitsu.draw(layout, context)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def project_root_path(self) -> Optional[Path]:
|
|
||||||
if not self.is_project_root_valid:
|
|
||||||
return None
|
|
||||||
return Path(os.path.abspath(bpy.path.abspath(self.project_root)))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_project_root_valid(self) -> bool:
|
|
||||||
|
|
||||||
# Check if file is saved.
|
|
||||||
if not self.project_root:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not bpy.data.filepath and self.project_root.startswith("//"):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_editorial_valid(self) -> bool:
|
|
||||||
if not self.edit_export_dir:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return Path(self.edit_export_dir).exists()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def edit_export_path(self) -> Optional[Path]:
|
|
||||||
if not self.is_editorial_valid:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return Path(self.edit_export_dir)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def previs_root_path(self) -> Optional[Path]:
|
|
||||||
if not self.is_project_root_valid:
|
|
||||||
return None
|
|
||||||
|
|
||||||
previs_path = self.project_root_path / "previz"
|
|
||||||
|
|
||||||
if not previs_path.exists():
|
|
||||||
return None
|
|
||||||
|
|
||||||
return previs_path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def camera_rig_path(self) -> Optional[Path]:
|
|
||||||
if not self.is_project_root_valid:
|
|
||||||
return None
|
|
||||||
|
|
||||||
camera_rig_path = self.project_root_path / "pro/lib/cam/camera_rig.blend"
|
|
||||||
|
|
||||||
if not camera_rig_path.exists():
|
|
||||||
return None
|
|
||||||
|
|
||||||
return camera_rig_path
|
|
||||||
|
|
||||||
|
|
||||||
def addon_prefs_get(context: bpy.types.Context) -> bpy.types.AddonPreferences:
|
|
||||||
"""Shortcut to get cache_manager addon preferences"""
|
|
||||||
return context.preferences.addons["anim_setup"].preferences
|
|
||||||
|
|
||||||
|
|
||||||
# ---------REGISTER ----------.
|
|
||||||
|
|
||||||
classes = [KitsuPreferences, AS_AddonPreferences]
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
|
||||||
for cls in classes:
|
|
||||||
bpy.utils.register_class(cls)
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
|
||||||
for cls in reversed(classes):
|
|
||||||
bpy.utils.unregister_class(cls)
|
|
@ -1,34 +0,0 @@
|
|||||||
from typing import List, Any, Generator, Optional
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
|
|
||||||
|
|
||||||
class CM_property_group_scene(bpy.types.PropertyGroup):
|
|
||||||
|
|
||||||
layout_cut_in: bpy.props.IntProperty(
|
|
||||||
name="Layout Cut In",
|
|
||||||
description="Frame where the camera marker is set for the shot, in the layout file",
|
|
||||||
default=0,
|
|
||||||
step=1,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------REGISTER ----------.
|
|
||||||
|
|
||||||
classes: List[Any] = [
|
|
||||||
CM_property_group_scene,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
|
||||||
|
|
||||||
for cls in classes:
|
|
||||||
bpy.utils.register_class(cls)
|
|
||||||
|
|
||||||
# Scene Properties.
|
|
||||||
bpy.types.Scene.anim_setup = bpy.props.PointerProperty(type=CM_property_group_scene)
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
|
||||||
for cls in reversed(classes):
|
|
||||||
bpy.utils.unregister_class(cls)
|
|
134
anim_setup/ui.py
134
anim_setup/ui.py
@ -1,134 +0,0 @@
|
|||||||
import bpy
|
|
||||||
|
|
||||||
from . import opsdata
|
|
||||||
|
|
||||||
from .ops import (
|
|
||||||
AS_OT_create_actions,
|
|
||||||
AS_OT_setup_workspaces,
|
|
||||||
AS_OT_load_latest_edit,
|
|
||||||
AS_OT_import_camera,
|
|
||||||
AS_OT_import_camera_action,
|
|
||||||
AS_OT_shift_anim,
|
|
||||||
AS_OT_apply_additional_settings,
|
|
||||||
AS_OT_import_asset_actions,
|
|
||||||
AS_OT_exclude_colls,
|
|
||||||
AS_OT_import_multi_assets
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AS_PT_view3d_general(bpy.types.Panel):
|
|
||||||
"""
|
|
||||||
Animation Setup general operators.
|
|
||||||
"""
|
|
||||||
|
|
||||||
bl_category = "Anim Setup"
|
|
||||||
bl_label = "General"
|
|
||||||
bl_space_type = "VIEW_3D"
|
|
||||||
bl_region_type = "UI"
|
|
||||||
bl_order = 10
|
|
||||||
|
|
||||||
def draw(self, context: bpy.types.Context) -> None:
|
|
||||||
valid_colls = opsdata.get_valid_collections(context)
|
|
||||||
layout = self.layout
|
|
||||||
col = layout.column(align=True)
|
|
||||||
|
|
||||||
# Workspace.
|
|
||||||
col.operator(AS_OT_setup_workspaces.bl_idname)
|
|
||||||
|
|
||||||
# Load edit.
|
|
||||||
col.operator(AS_OT_load_latest_edit.bl_idname)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class AS_PT_view3d_animation_and_actions(bpy.types.Panel):
|
|
||||||
"""
|
|
||||||
Animation Setup main operators and properties.
|
|
||||||
"""
|
|
||||||
|
|
||||||
bl_category = "Anim Setup"
|
|
||||||
bl_label = "Animation and Actions"
|
|
||||||
bl_space_type = "VIEW_3D"
|
|
||||||
bl_region_type = "UI"
|
|
||||||
bl_order = 12
|
|
||||||
|
|
||||||
def draw(self, context: bpy.types.Context) -> None:
|
|
||||||
|
|
||||||
layout = self.layout
|
|
||||||
layout.use_property_split = True
|
|
||||||
layout.use_property_decorate = False # No animation.
|
|
||||||
|
|
||||||
layout.label(text=f"Previs file: {opsdata.get_previs_file(context)}")
|
|
||||||
|
|
||||||
col = layout.column(align=True)
|
|
||||||
|
|
||||||
# Import camera action.
|
|
||||||
col.operator(AS_OT_import_camera_action.bl_idname)
|
|
||||||
|
|
||||||
# Import action.
|
|
||||||
col.operator(
|
|
||||||
AS_OT_import_asset_actions.bl_idname, text=f"Import Char Actions"
|
|
||||||
)
|
|
||||||
|
|
||||||
col.operator(
|
|
||||||
AS_OT_import_multi_assets.bl_idname, text=f"Import Multi Asset Actions"
|
|
||||||
)
|
|
||||||
|
|
||||||
col.separator()
|
|
||||||
col = layout.column()
|
|
||||||
|
|
||||||
# Shift animation.
|
|
||||||
col.prop(context.scene.anim_setup, "layout_cut_in")
|
|
||||||
col.separator()
|
|
||||||
split = col.split(factor=0.5, align=True)
|
|
||||||
split.operator(AS_OT_shift_anim.bl_idname, text="Shift Char/Cam")
|
|
||||||
split.operator(AS_OT_shift_anim.bl_idname, text="Shift Multi").multi_assets = True
|
|
||||||
|
|
||||||
col.separator()
|
|
||||||
|
|
||||||
# Create actions.
|
|
||||||
valid_collections_count = len(opsdata.get_valid_collections(context))
|
|
||||||
row = col.row(align=True)
|
|
||||||
row.operator(
|
|
||||||
AS_OT_create_actions.bl_idname, text=f"Create {valid_collections_count} actions"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AS_PT_view3d_scene(bpy.types.Panel):
|
|
||||||
"""
|
|
||||||
Animation Setup scene operators.
|
|
||||||
"""
|
|
||||||
|
|
||||||
bl_category = "Anim Setup"
|
|
||||||
bl_label = "Scene"
|
|
||||||
bl_space_type = "VIEW_3D"
|
|
||||||
bl_region_type = "UI"
|
|
||||||
bl_order = 13
|
|
||||||
|
|
||||||
def draw(self, context: bpy.types.Context) -> None:
|
|
||||||
|
|
||||||
layout = self.layout
|
|
||||||
|
|
||||||
# Exclude collections.
|
|
||||||
row = layout.row(align=True)
|
|
||||||
row.operator(
|
|
||||||
AS_OT_exclude_colls.bl_idname, text="Exclude Collections"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------REGISTER ----------.
|
|
||||||
|
|
||||||
classes = [
|
|
||||||
AS_PT_view3d_general,
|
|
||||||
AS_PT_view3d_animation_and_actions,
|
|
||||||
AS_PT_view3d_scene,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
|
||||||
for cls in classes:
|
|
||||||
bpy.utils.register_class(cls)
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
|
||||||
for cls in reversed(classes):
|
|
||||||
bpy.utils.unregister_class(cls)
|
|
@ -40,6 +40,8 @@ from blender_kitsu import (
|
|||||||
ui,
|
ui,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
from blender_kitsu.logger import LoggerFactory, LoggerLevelManager
|
from blender_kitsu.logger import LoggerFactory, LoggerLevelManager
|
||||||
|
|
||||||
logger = LoggerFactory.getLogger(__name__)
|
logger = LoggerFactory.getLogger(__name__)
|
||||||
@ -94,6 +96,7 @@ def register():
|
|||||||
playblast.register()
|
playblast.register()
|
||||||
anim.register()
|
anim.register()
|
||||||
shot_builder.register()
|
shot_builder.register()
|
||||||
|
|
||||||
|
|
||||||
LoggerLevelManager.configure_levels()
|
LoggerLevelManager.configure_levels()
|
||||||
logger.info("Registered blender-kitsu")
|
logger.info("Registered blender-kitsu")
|
||||||
|
@ -21,6 +21,7 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
from typing import Optional, Any, Set, Tuple, List
|
from typing import Optional, Any, Set, Tuple, List
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -39,6 +40,8 @@ from blender_kitsu.auth.ops import (
|
|||||||
)
|
)
|
||||||
from blender_kitsu.context.ops import KITSU_OT_con_productions_load
|
from blender_kitsu.context.ops import KITSU_OT_con_productions_load
|
||||||
from blender_kitsu.lookdev.prefs import LOOKDEV_preferences
|
from blender_kitsu.lookdev.prefs import LOOKDEV_preferences
|
||||||
|
from blender_kitsu.shot_builder.editorial.core import editorial_export_check_latest
|
||||||
|
|
||||||
|
|
||||||
logger = LoggerFactory.getLogger()
|
logger = LoggerFactory.getLogger()
|
||||||
|
|
||||||
@ -249,6 +252,10 @@ class KITSU_addon_preferences(bpy.types.AddonPreferences):
|
|||||||
name="Show Advanced Settings",
|
name="Show Advanced Settings",
|
||||||
description="Show advanced settings that should already have good defaults",
|
description="Show advanced settings that should already have good defaults",
|
||||||
)
|
)
|
||||||
|
shot_builder_show_advanced : bpy.props.BoolProperty( # type: ignore
|
||||||
|
name="Show Advanced Settings",
|
||||||
|
description="Show advanced settings that should already have good defaults",
|
||||||
|
)
|
||||||
|
|
||||||
shot_pattern: bpy.props.StringProperty( # type: ignore
|
shot_pattern: bpy.props.StringProperty( # type: ignore
|
||||||
name="Shot Pattern",
|
name="Shot Pattern",
|
||||||
@ -294,6 +301,51 @@ class KITSU_addon_preferences(bpy.types.AddonPreferences):
|
|||||||
subtype='DIR_PATH',
|
subtype='DIR_PATH',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
edit_export_dir: bpy.props.StringProperty( # type: ignore
|
||||||
|
name="Editorial Export Directory",
|
||||||
|
options={"HIDDEN", "SKIP_SAVE"},
|
||||||
|
description="Directory path to editorial's export folder containing storyboard/animatic exports. Path should be similar to '~/shared-{proj_name}/editorial/export/'",
|
||||||
|
subtype="DIR_PATH",
|
||||||
|
)
|
||||||
|
|
||||||
|
edit_export_file_pattern: bpy.props.StringProperty( # type: ignore
|
||||||
|
name="Editorial Export File Pattern",
|
||||||
|
options={"HIDDEN", "SKIP_SAVE"},
|
||||||
|
description="File pattern to search for latest editorial export. Typically '{proj_name}_v\d\d\d.mp4'",
|
||||||
|
default="petprojects_v\d\d\d.mp4",
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
edit_export_frame_offset: bpy.props.IntProperty( # type: ignore
|
||||||
|
name="Editorial Export Offset",
|
||||||
|
description="Shift Editorial Export by this frame-range after set-up.",
|
||||||
|
default=-102, #HARD CODED FOR PET PROJECTS BLENDER FILM
|
||||||
|
)
|
||||||
|
|
||||||
|
shot_builder_frame_offset: bpy.props.IntProperty( # type: ignore
|
||||||
|
name="Start Frame Offset",
|
||||||
|
description="All Shots built by 'Shot_builder' should begin at this frame",
|
||||||
|
default=101,
|
||||||
|
)
|
||||||
|
|
||||||
|
shot_builder_armature_prefix: bpy.props.StringProperty( # type: ignore
|
||||||
|
name="Armature Prefix",
|
||||||
|
description="Naming convention prefix that exists on published assets containing armatures. Used to create/name actions during 'Shot_Build'. Armature name example:'{prefix}{base_name}'",
|
||||||
|
default="RIG-",
|
||||||
|
)
|
||||||
|
|
||||||
|
shot_builder_action_prefix: bpy.props.StringProperty( # type: ignore
|
||||||
|
name="Action Prefix",
|
||||||
|
description="Naming convention prefix to add to new actions. Actions will be named '{prefix}{base_name}.{shot_name}.v001' and set to fake user during 'Shot_Build'",
|
||||||
|
default="ANI-",
|
||||||
|
)
|
||||||
|
|
||||||
|
user_exec_code: bpy.props.StringProperty(# type: ignore
|
||||||
|
name="Post Execution Command",
|
||||||
|
description="Run this command after shot_builder is complete, but before the file is saved.",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
session: Session = Session()
|
session: Session = Session()
|
||||||
|
|
||||||
tasks: bpy.props.CollectionProperty(type=KITSU_task)
|
tasks: bpy.props.CollectionProperty(type=KITSU_task)
|
||||||
@ -372,6 +424,21 @@ class KITSU_addon_preferences(bpy.types.AddonPreferences):
|
|||||||
icon="ADD",
|
icon="ADD",
|
||||||
emboss=False,
|
emboss=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Shot_Builder settings.
|
||||||
|
box = layout.box()
|
||||||
|
box.label(text="Shot Builder", icon="MOD_BUILD")
|
||||||
|
box.row().prop(self, "edit_export_dir")
|
||||||
|
box.row().prop(self, "edit_export_file_pattern")
|
||||||
|
box.row().prop(self, "edit_export_frame_offset")
|
||||||
|
box.row().prop(self, "shot_builder_show_advanced")
|
||||||
|
if self.shot_builder_show_advanced:
|
||||||
|
start_frame_row = box.row()
|
||||||
|
start_frame_row.label(text="Start Frame Offset")
|
||||||
|
start_frame_row.prop(self, "shot_builder_frame_offset", text="")
|
||||||
|
box.row().prop(self, "shot_builder_armature_prefix")
|
||||||
|
box.row().prop(self, "shot_builder_action_prefix")
|
||||||
|
box.row().prop(self, "user_exec_code")
|
||||||
|
|
||||||
# Misc settings.
|
# Misc settings.
|
||||||
box = layout.box()
|
box = layout.box()
|
||||||
@ -386,6 +453,7 @@ class KITSU_addon_preferences(bpy.types.AddonPreferences):
|
|||||||
box.row().prop(self, "shot_counter_digits")
|
box.row().prop(self, "shot_counter_digits")
|
||||||
box.row().prop(self, "shot_counter_increment")
|
box.row().prop(self, "shot_counter_increment")
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def playblast_root_path(self) -> Optional[Path]:
|
def playblast_root_path(self) -> Optional[Path]:
|
||||||
if not self.is_playblast_root_valid:
|
if not self.is_playblast_root_valid:
|
||||||
@ -427,7 +495,15 @@ class KITSU_addon_preferences(bpy.types.AddonPreferences):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_editorial_dir_valid(self) -> bool:
|
||||||
|
if editorial_export_check_latest(bpy.context) is None:
|
||||||
|
logger.error(
|
||||||
|
"Failed to initialize editorial export file model. Invalid path/pattern. Check addon preferences"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def session_get(context: bpy.types.Context) -> Session:
|
def session_get(context: bpy.types.Context) -> Session:
|
||||||
"""
|
"""
|
||||||
|
@ -22,6 +22,8 @@ from blender_kitsu.shot_builder.ui import *
|
|||||||
from blender_kitsu.shot_builder.connectors.kitsu import *
|
from blender_kitsu.shot_builder.connectors.kitsu import *
|
||||||
from blender_kitsu.shot_builder.operators import *
|
from blender_kitsu.shot_builder.operators import *
|
||||||
import bpy
|
import bpy
|
||||||
|
from blender_kitsu.shot_builder.anim_setup import ops as anim_setup_ops #TODO Fix Registraion
|
||||||
|
from blender_kitsu.shot_builder.editorial import ops as editorial_ops #TODO Fix Registraion
|
||||||
|
|
||||||
# import logging
|
# import logging
|
||||||
# logging.basicConfig(level=logging.DEBUG)
|
# logging.basicConfig(level=logging.DEBUG)
|
||||||
@ -45,12 +47,18 @@ classes = (
|
|||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
|
anim_setup_ops.register()
|
||||||
|
editorial_ops.register()
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.register_class(cls)
|
bpy.utils.register_class(cls)
|
||||||
bpy.types.TOPBAR_MT_file_new.append(topbar_file_new_draw_handler)
|
bpy.types.TOPBAR_MT_file_new.append(topbar_file_new_draw_handler)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
def unregister():
|
||||||
|
anim_setup_ops.unregister()
|
||||||
|
editorial_ops.unregister()
|
||||||
bpy.types.TOPBAR_MT_file_new.remove(topbar_file_new_draw_handler)
|
bpy.types.TOPBAR_MT_file_new.remove(topbar_file_new_draw_handler)
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
bpy.utils.unregister_class(cls)
|
bpy.utils.unregister_class(cls)
|
||||||
|
|
||||||
|
29
blender_kitsu/shot_builder/anim_setup/core.py
Normal file
29
blender_kitsu/shot_builder/anim_setup/core.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import bpy
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Set
|
||||||
|
from blender_kitsu import prefs
|
||||||
|
from blender_kitsu import cache
|
||||||
|
|
||||||
|
|
||||||
|
def animation_workspace_vse_area_add(context:bpy.types.Context):
|
||||||
|
"""Split smallest 3D View in current workspace"""
|
||||||
|
for workspace in [workspace for workspace in bpy.data.workspaces if workspace.name == "Animation"]:
|
||||||
|
context.window.workspace = workspace
|
||||||
|
context.view_layer.update()
|
||||||
|
areas = workspace.screens[0].areas
|
||||||
|
view_3d_areas = sorted([area for area in areas if area.ui_type =="VIEW_3D"], key=lambda x: x.width, reverse=False)
|
||||||
|
small_view_3d = view_3d_areas[0]
|
||||||
|
with context.temp_override(window=context.window, area=small_view_3d):
|
||||||
|
bpy.ops.screen.area_split(direction='HORIZONTAL', factor=0.5)
|
||||||
|
small_view_3d.ui_type = "SEQUENCE_EDITOR"
|
||||||
|
small_view_3d.spaces[0].view_type = "PREVIEW"
|
||||||
|
|
||||||
|
def animation_workspace_delete_others():
|
||||||
|
"""Delete any workspace that is not an animation workspace"""
|
||||||
|
for ws in bpy.data.workspaces:
|
||||||
|
if ws.name != "Animation":
|
||||||
|
bpy.ops.workspace.delete({"workspace": ws})
|
||||||
|
|
||||||
|
|
||||||
|
|
35
blender_kitsu/shot_builder/anim_setup/ops.py
Normal file
35
blender_kitsu/shot_builder/anim_setup/ops.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import bpy
|
||||||
|
from typing import Set
|
||||||
|
from blender_kitsu.shot_builder.anim_setup.core import animation_workspace_delete_others, animation_workspace_vse_area_add
|
||||||
|
class ANIM_SETUP_OT_setup_workspaces(bpy.types.Operator):
|
||||||
|
bl_idname = "anim_setup.setup_workspaces"
|
||||||
|
bl_label = "Setup Workspace"
|
||||||
|
bl_description = "Sets up the workspaces for the animation task"
|
||||||
|
|
||||||
|
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||||
|
animation_workspace_delete_others(self, context)
|
||||||
|
self.report({"INFO"}, "Deleted non Animation workspaces")
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
class ANIM_SETUP_OT_animation_workspace_vse_area_add(bpy.types.Operator):
|
||||||
|
bl_idname = "anim_setup.animation_workspace_vse_area_add"
|
||||||
|
bl_label = "Split Viewport"
|
||||||
|
bl_description = "Split smallest 3D View in current workspace"
|
||||||
|
|
||||||
|
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||||
|
animation_workspace_vse_area_add(self, context)
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
classes = [
|
||||||
|
ANIM_SETUP_OT_setup_workspaces,
|
||||||
|
ANIM_SETUP_OT_animation_workspace_vse_area_add
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.unregister_class(cls)
|
@ -7,7 +7,6 @@ from blender_kitsu.shot_builder.builder.init_shot import InitShotStep
|
|||||||
from blender_kitsu.shot_builder.builder.set_render_settings import SetRenderSettingsStep
|
from blender_kitsu.shot_builder.builder.set_render_settings import SetRenderSettingsStep
|
||||||
from blender_kitsu.shot_builder.builder.new_scene import NewSceneStep
|
from blender_kitsu.shot_builder.builder.new_scene import NewSceneStep
|
||||||
from blender_kitsu.shot_builder.builder.invoke_hook import InvokeHookStep
|
from blender_kitsu.shot_builder.builder.invoke_hook import InvokeHookStep
|
||||||
from blender_kitsu.shot_builder.builder.save_file import SaveFileStep
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
@ -76,8 +75,6 @@ class ShotBuilder:
|
|||||||
for hook in production.hooks.filter(match_task_type=task_type.name, match_asset_type=asset.asset_type):
|
for hook in production.hooks.filter(match_task_type=task_type.name, match_asset_type=asset.asset_type):
|
||||||
self._steps.append(InvokeHookStep(hook))
|
self._steps.append(InvokeHookStep(hook))
|
||||||
|
|
||||||
self._steps.append(SaveFileStep())
|
|
||||||
|
|
||||||
def build(self) -> None:
|
def build(self) -> None:
|
||||||
num_steps = len(self._steps)
|
num_steps = len(self._steps)
|
||||||
step_number = 1
|
step_number = 1
|
||||||
|
@ -11,6 +11,14 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def save_shot_builder_file(file_path: str):
|
||||||
|
"""Save Shot File within Folder of matching name.
|
||||||
|
Set Shot File to relative Paths."""
|
||||||
|
dir_path = pathlib.Path(file_path)
|
||||||
|
dir_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
bpy.ops.wm.save_mainfile(filepath=file_path, relative_remap=True)
|
||||||
|
|
||||||
class SaveFileStep(BuildStep):
|
class SaveFileStep(BuildStep):
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return "save file"
|
return "save file"
|
||||||
@ -18,7 +26,5 @@ class SaveFileStep(BuildStep):
|
|||||||
def execute(self, build_context: BuildContext) -> None:
|
def execute(self, build_context: BuildContext) -> None:
|
||||||
shot = build_context.shot
|
shot = build_context.shot
|
||||||
file_path = pathlib.Path(shot.file_path)
|
file_path = pathlib.Path(shot.file_path)
|
||||||
file_path.mkdir(parents=True, exist_ok=True)
|
save_shot_builder_file(file_path)
|
||||||
|
|
||||||
logger.info(f"save file {shot.file_path}")
|
logger.info(f"save file {shot.file_path}")
|
||||||
bpy.ops.wm.save_mainfile(filepath=shot.file_path, relative_remap=True)
|
|
||||||
|
30
blender_kitsu/shot_builder/editorial/__init__.py
Normal file
30
blender_kitsu/shot_builder/editorial/__init__.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
|
# <pep8 compliant>
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from blender_kitsu.shot_builder.editorial import ops
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
ops.register()
|
||||||
|
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
ops.unregister()
|
75
blender_kitsu/shot_builder/editorial/core.py
Normal file
75
blender_kitsu/shot_builder/editorial/core.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import bpy
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Set
|
||||||
|
from blender_kitsu import prefs
|
||||||
|
from blender_kitsu import cache
|
||||||
|
|
||||||
|
def editorial_export_get_latest(context:bpy.types.Context, shot) -> list[bpy.types.Sequence]: #TODO add info to shot
|
||||||
|
"""Loads latest export from editorial department"""
|
||||||
|
addon_prefs = prefs.addon_prefs_get(context)
|
||||||
|
strip_channel = 1
|
||||||
|
latest_file = editorial_export_check_latest(context)
|
||||||
|
if not latest_file:
|
||||||
|
return None
|
||||||
|
# Check if Kitsu server returned empty shot
|
||||||
|
if shot.get("id") == '':
|
||||||
|
return None
|
||||||
|
strip_filepath = latest_file.as_posix()
|
||||||
|
strip_frame_start = addon_prefs.shot_builder_frame_offset
|
||||||
|
|
||||||
|
scene = context.scene
|
||||||
|
if not scene.sequence_editor:
|
||||||
|
scene.sequence_editor_create()
|
||||||
|
seq_editor = scene.sequence_editor
|
||||||
|
movie_strip = seq_editor.sequences.new_movie(
|
||||||
|
latest_file.name,
|
||||||
|
strip_filepath,
|
||||||
|
strip_channel + 1,
|
||||||
|
strip_frame_start,
|
||||||
|
fit_method="FIT",
|
||||||
|
)
|
||||||
|
sound_strip = seq_editor.sequences.new_sound(
|
||||||
|
latest_file.name,
|
||||||
|
strip_filepath,
|
||||||
|
strip_channel,
|
||||||
|
strip_frame_start,
|
||||||
|
)
|
||||||
|
new_strips = [movie_strip, sound_strip]
|
||||||
|
|
||||||
|
# Update shift frame range prop.
|
||||||
|
frame_in = shot["data"].get("frame_in")
|
||||||
|
frame_3d_in = shot["data"].get("3d_in")
|
||||||
|
frame_3d_offset = frame_3d_in - addon_prefs.shot_builder_frame_offset
|
||||||
|
edit_export_offset = addon_prefs.edit_export_frame_offset
|
||||||
|
|
||||||
|
# Set sequence strip start kitsu data.
|
||||||
|
for strip in new_strips:
|
||||||
|
strip.frame_start = -frame_in + (strip_frame_start * 2) + frame_3d_offset + edit_export_offset
|
||||||
|
return new_strips
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def editorial_export_check_latest(context: bpy.types.Context):
|
||||||
|
"""Find latest export in editorial export directory"""
|
||||||
|
addon_prefs = prefs.addon_prefs_get(context)
|
||||||
|
|
||||||
|
edit_export_path = Path(addon_prefs.edit_export_dir)
|
||||||
|
|
||||||
|
files_list = [
|
||||||
|
f
|
||||||
|
for f in edit_export_path.iterdir()
|
||||||
|
if f.is_file() and editorial_export_is_valid_edit_name(addon_prefs.edit_export_file_pattern, f.name)
|
||||||
|
]
|
||||||
|
if len(files_list) >= 1:
|
||||||
|
files_list = sorted(files_list, reverse=True)
|
||||||
|
return files_list[0]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def editorial_export_is_valid_edit_name(file_pattern:str, filename: str) -> bool:
|
||||||
|
"""Verify file name matches file pattern set in preferences"""
|
||||||
|
match = re.search(file_pattern, filename)
|
||||||
|
if match:
|
||||||
|
return True
|
||||||
|
return False
|
38
blender_kitsu/shot_builder/editorial/ops.py
Normal file
38
blender_kitsu/shot_builder/editorial/ops.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import bpy
|
||||||
|
from typing import Set
|
||||||
|
from blender_kitsu.shot_builder.editorial.core import editorial_export_get_latest
|
||||||
|
from blender_kitsu import cache, gazu
|
||||||
|
|
||||||
|
class ANIM_SETUP_OT_load_latest_editorial(bpy.types.Operator):
|
||||||
|
bl_idname = "asset_setup.load_latest_editorial"
|
||||||
|
bl_label = "Load Editorial Export"
|
||||||
|
bl_description = (
|
||||||
|
"Loads latest edit from shot_preview_folder "
|
||||||
|
"Shifts edit so current shot starts at 3d_in metadata shot key from Kitsu"
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||||
|
cache_shot = cache.shot_active_get()
|
||||||
|
shot = gazu.shot.get_shot(cache_shot.id) #TODO INEFFICENT TO LOAD SHOT TWICE
|
||||||
|
strips = editorial_export_get_latest(context, shot)
|
||||||
|
if strips is None:
|
||||||
|
self.report(
|
||||||
|
{"ERROR"}, f"No valid editorial export in editorial export path."
|
||||||
|
)
|
||||||
|
return {"CANCELLED"}
|
||||||
|
|
||||||
|
self.report({"INFO"}, f"Loaded latest edit: {strips[0].name}")
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
classes = [
|
||||||
|
ANIM_SETUP_OT_load_latest_editorial,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.unregister_class(cls)
|
@ -17,13 +17,18 @@
|
|||||||
# ##### END GPL LICENSE BLOCK #####
|
# ##### END GPL LICENSE BLOCK #####
|
||||||
|
|
||||||
# <pep8 compliant>
|
# <pep8 compliant>
|
||||||
|
import pathlib
|
||||||
from typing import *
|
from typing import *
|
||||||
import bpy
|
import bpy
|
||||||
from blender_kitsu.shot_builder.shot import ShotRef
|
from blender_kitsu.shot_builder.shot import ShotRef
|
||||||
from blender_kitsu.shot_builder.project import ensure_loaded_production, get_active_production
|
from blender_kitsu.shot_builder.project import ensure_loaded_production, get_active_production
|
||||||
from blender_kitsu.shot_builder.builder import ShotBuilder
|
from blender_kitsu.shot_builder.builder import ShotBuilder
|
||||||
from blender_kitsu.shot_builder.task_type import TaskType
|
from blender_kitsu.shot_builder.task_type import TaskType
|
||||||
from blender_kitsu import prefs, cache
|
from blender_kitsu import prefs, cache, gazu
|
||||||
|
from blender_kitsu.shot_builder.anim_setup.core import animation_workspace_delete_others, animation_workspace_vse_area_add
|
||||||
|
from blender_kitsu.shot_builder.editorial.core import editorial_export_get_latest
|
||||||
|
from blender_kitsu.shot_builder.builder.save_file import save_shot_builder_file
|
||||||
|
|
||||||
|
|
||||||
_production_task_type_items: List[Tuple[str, str, str]] = []
|
_production_task_type_items: List[Tuple[str, str, str]] = []
|
||||||
|
|
||||||
@ -73,6 +78,11 @@ class SHOTBUILDER_OT_NewShotFile(bpy.types.Operator):
|
|||||||
bl_idname = "shotbuilder.new_shot_file"
|
bl_idname = "shotbuilder.new_shot_file"
|
||||||
bl_label = "New Production Shot File"
|
bl_label = "New Production Shot File"
|
||||||
|
|
||||||
|
_timer = None
|
||||||
|
_built_shot = False
|
||||||
|
_add_vse_area = False
|
||||||
|
_file_path = ''
|
||||||
|
|
||||||
production_root: bpy.props.StringProperty( # type: ignore
|
production_root: bpy.props.StringProperty( # type: ignore
|
||||||
name="Production Root",
|
name="Production Root",
|
||||||
description="Root of the production",
|
description="Root of the production",
|
||||||
@ -102,6 +112,36 @@ class SHOTBUILDER_OT_NewShotFile(bpy.types.Operator):
|
|||||||
description="Task to create the shot file for",
|
description="Task to create the shot file for",
|
||||||
items=production_task_type_items
|
items=production_task_type_items
|
||||||
)
|
)
|
||||||
|
auto_save: bpy.props.BoolProperty(
|
||||||
|
name="Save after building.",
|
||||||
|
description="Automatically save build file after 'Shot Builder' is complete.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def modal(self, context, event):
|
||||||
|
|
||||||
|
if event.type == 'TIMER' and not self._add_vse_area:
|
||||||
|
# Show Storyboard/Animatic from VSE
|
||||||
|
"""Running as Modal Event because functions within execute() function like
|
||||||
|
animation_workspace_delete_others() changed UI context that needs to be refreshed.
|
||||||
|
https://docs.blender.org/api/current/info_gotcha.html#no-updates-after-changing-ui-context"""
|
||||||
|
animation_workspace_vse_area_add(context)
|
||||||
|
self._add_vse_area = True
|
||||||
|
|
||||||
|
if self._built_shot and self._add_vse_area:
|
||||||
|
if self.auto_save:
|
||||||
|
file_path = pathlib.Path()
|
||||||
|
try:
|
||||||
|
save_shot_builder_file(self._file_path)
|
||||||
|
self.report({"INFO"}, f"Saved Shot{self.shot_id} at {self._file_path}")
|
||||||
|
return {'FINISHED'}
|
||||||
|
except FileExistsError:
|
||||||
|
self.report({"ERROR"}, f"Cannot create a file/folder when that file/folder already exists {file_path}")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
self.report({"INFO"}, f"Built Shot {self.shot_id}, file is not saved!")
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
return {'PASS_THROUGH'}
|
||||||
|
|
||||||
def invoke(self, context: bpy.types.Context, event: bpy.types.Event) -> Set[str]:
|
def invoke(self, context: bpy.types.Context, event: bpy.types.Event) -> Set[str]:
|
||||||
addon_prefs = prefs.addon_prefs_get(bpy.context)
|
addon_prefs = prefs.addon_prefs_get(bpy.context)
|
||||||
@ -122,6 +162,11 @@ class SHOTBUILDER_OT_NewShotFile(bpy.types.Operator):
|
|||||||
{'ERROR'}, "Operator is not able to determine the project root directory. Check project root directiory is configured in 'Blender Kitsu' addon preferences.")
|
{'ERROR'}, "Operator is not able to determine the project root directory. Check project root directiory is configured in 'Blender Kitsu' addon preferences.")
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
if not addon_prefs.is_editorial_dir_valid:
|
||||||
|
self.report(
|
||||||
|
{'ERROR'}, "Shot builder is dependant on a valid editorial export path and file pattern. Check Preferences, errors appear in console")
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
self.production_root = addon_prefs.project_root_dir
|
self.production_root = addon_prefs.project_root_dir
|
||||||
self.production_name = project.name
|
self.production_name = project.name
|
||||||
|
|
||||||
@ -146,19 +191,51 @@ class SHOTBUILDER_OT_NewShotFile(bpy.types.Operator):
|
|||||||
return cast(Set[str], context.window_manager.invoke_props_dialog(self, width=400))
|
return cast(Set[str], context.window_manager.invoke_props_dialog(self, width=400))
|
||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> Set[str]:
|
def execute(self, context: bpy.types.Context) -> Set[str]:
|
||||||
|
wm = context.window_manager
|
||||||
|
self._timer = wm.event_timer_add(0.1, window=context.window)
|
||||||
|
wm.modal_handler_add(self)
|
||||||
if not self.production_root:
|
if not self.production_root:
|
||||||
self.report(
|
self.report(
|
||||||
{'ERROR'}, "Shot builder can only be started from the File menu. Shortcuts like CTRL-N don't work")
|
{'ERROR'}, "Shot builder can only be started from the File menu. Shortcuts like CTRL-N don't work")
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
if self._built_shot:
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
addon_prefs = bpy.context.preferences.addons["blender_kitsu"].preferences
|
||||||
ensure_loaded_production(context)
|
ensure_loaded_production(context)
|
||||||
production = get_active_production()
|
production = get_active_production()
|
||||||
shot_builder = ShotBuilder(
|
shot_builder = ShotBuilder(
|
||||||
context=context, production=production, shot_name=self.shot_id, task_type=TaskType(self.task_type))
|
context=context, production=production, shot_name=self.shot_id, task_type=TaskType(self.task_type))
|
||||||
shot_builder.create_build_steps()
|
shot_builder.create_build_steps()
|
||||||
shot_builder.build()
|
shot_builder.build()
|
||||||
|
|
||||||
|
# Build Kitsu Context
|
||||||
|
sequence = gazu.shot.get_sequence_by_name(production.config['KITSU_PROJECT_ID'], self.seq_id)
|
||||||
|
shot = gazu.shot.get_shot_by_name(sequence, self.shot_id)
|
||||||
|
|
||||||
|
#Load EDIT
|
||||||
|
editorial_export_get_latest(context, shot)
|
||||||
|
# Load Anim Workspace
|
||||||
|
animation_workspace_delete_others()
|
||||||
|
|
||||||
|
# Initilize armatures
|
||||||
|
for obj in [obj for obj in bpy.data.objects if obj.type == "ARMATURE"]:
|
||||||
|
base_name = obj.name.split(addon_prefs.shot_builder_armature_prefix)[-1]
|
||||||
|
new_action = bpy.data.actions.new(f"{addon_prefs.shot_builder_action_prefix}{base_name}.{self.shot_id}.v001")
|
||||||
|
new_action.use_fake_user = True
|
||||||
|
obj.animation_data.action = new_action
|
||||||
|
|
||||||
|
# Set Shot Frame Range
|
||||||
|
frame_length = shot.get('nb_frames')
|
||||||
|
context.scene.frame_start = addon_prefs.shot_builder_frame_offset
|
||||||
|
context.scene.frame_end = frame_length + addon_prefs.shot_builder_frame_offset
|
||||||
|
|
||||||
|
# Run User Script
|
||||||
|
exec(addon_prefs.user_exec_code)
|
||||||
|
|
||||||
|
self._file_path = shot_builder.build_context.shot.file_path
|
||||||
|
self._built_shot = True
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
return {'FINISHED'}
|
|
||||||
|
|
||||||
def draw(self, context: bpy.types.Context) -> None:
|
def draw(self, context: bpy.types.Context) -> None:
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
@ -168,3 +245,4 @@ class SHOTBUILDER_OT_NewShotFile(bpy.types.Operator):
|
|||||||
layout.prop(self, "seq_id")
|
layout.prop(self, "seq_id")
|
||||||
layout.prop(self, "shot_id")
|
layout.prop(self, "shot_id")
|
||||||
layout.prop(self, "task_type")
|
layout.prop(self, "task_type")
|
||||||
|
layout.prop(self, "auto_save")
|
||||||
|
Loading…
Reference in New Issue
Block a user