Asset Pipeline v2 #145

Closed
Nick Alberelli wants to merge 431 commits from (deleted):feature/asset-pipeline-v2 into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
2 changed files with 28 additions and 22 deletions
Showing only changes of commit 129a2936db - Show all commits

View File

@ -101,7 +101,7 @@ def find_file_version(file):
def get_next_published_file(current_file: Path, publish_type="publish"): def get_next_published_file(current_file: Path, publish_type="publish"):
last_publish = find_published(current_file, publish_type) last_publish = find_latest_publish(current_file, publish_type)
base_name = current_file.name.split(".")[0] base_name = current_file.name.split(".")[0]
publish_dir = current_file.parent.joinpath(publish_type) publish_dir = current_file.parent.joinpath(publish_type)
if not last_publish: if not last_publish:
@ -113,21 +113,26 @@ def get_next_published_file(current_file: Path, publish_type="publish"):
return publish_dir.joinpath(base_name + f".v" + new_version + ".blend") return publish_dir.joinpath(base_name + f".v" + new_version + ".blend")
def find_published(current_file: Path, publish_type="publish"): def find_all_published(current_file, publish_type):
publish_dir = current_file.parent.joinpath(publish_type) publish_dir = current_file.parent.joinpath(publish_type)
if not publish_dir.exists(): if not publish_dir.exists():
return return
published_files = list(publish_dir.glob('*.blend')) published_files = list(publish_dir.glob('*.blend'))
published_files.sort(key=find_file_version) published_files.sort(key=find_file_version)
return published_files
def find_latest_publish(current_file: Path, publish_type="publish"):
published_files = find_all_published(current_file, publish_type)
if published_files: if published_files:
return published_files[-1] return published_files[-1]
def find_sync_target(current_file: Path): def find_sync_target(current_file: Path):
latest_staged = find_published(current_file, publish_type="staged") latest_staged = find_latest_publish(current_file, publish_type="staged")
if latest_staged: if latest_staged:
return latest_staged return latest_staged
return find_published(current_file, publish_type="publish") return find_latest_publish(current_file, publish_type="publish")
def import_data_from_lib( def import_data_from_lib(

View File

@ -96,8 +96,6 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
return {'CANCELLED'} return {'CANCELLED'}
pub_file = core.find_sync_target(current_file) pub_file = core.find_sync_target(current_file)
pub_file_path = pub_file.__str__()
if self.pull: if self.pull:
error_msg = core.merge_task_layer( error_msg = core.merge_task_layer(
context, context,
@ -116,25 +114,28 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
if not self.push: if not self.push:
return {'FINISHED'} return {'FINISHED'}
bpy.ops.wm.open_mainfile(filepath=pub_file_path) # TODO Include staged file if it exists in this loop
for file in core.find_all_published(current_file, "publish"):
file_path = file.__str__()
bpy.ops.wm.open_mainfile(filepath=file_path)
local_tls = [ local_tls = [
item for item in constants.TASK_LAYER_KEYS if item != task_layer_name item for item in constants.TASK_LAYER_KEYS if item != task_layer_name
] ]
error_msg = core.merge_task_layer( error_msg = core.merge_task_layer(
context, context,
col_base_name=get_parent_col_name(), col_base_name=get_parent_col_name(),
local_tls=local_tls, local_tls=local_tls,
target_file=current_file, target_file=current_file,
) )
if error_msg: if error_msg:
bpy.ops.wm.open_mainfile(filepath=current_file.__str__())
self.report({'ERROR'}, error_msg)
return {'CANCELLED'}
bpy.ops.wm.save_as_mainfile(filepath=file_path)
bpy.ops.wm.open_mainfile(filepath=current_file.__str__()) bpy.ops.wm.open_mainfile(filepath=current_file.__str__())
self.report({'ERROR'}, error_msg)
return {'CANCELLED'}
bpy.ops.wm.save_as_mainfile(filepath=pub_file_path)
bpy.ops.wm.open_mainfile(filepath=current_file.__str__())
return {'FINISHED'} return {'FINISHED'}