Asset Pipeline v2 #145
@ -101,7 +101,7 @@ def find_file_version(file):
|
||||
|
||||
|
||||
def get_next_published_file(current_file: Path, publish_type="publish"):
|
||||
last_publish = find_published(current_file, publish_type)
|
||||
last_publish = find_latest_publish(current_file, publish_type)
|
||||
base_name = current_file.name.split(".")[0]
|
||||
publish_dir = current_file.parent.joinpath(publish_type)
|
||||
if not last_publish:
|
||||
@ -113,21 +113,26 @@ def get_next_published_file(current_file: Path, publish_type="publish"):
|
||||
return publish_dir.joinpath(base_name + f".v" + new_version + ".blend")
|
||||
|
||||
|
||||
def find_published(current_file: Path, publish_type="publish"):
|
||||
def find_all_published(current_file, publish_type):
|
||||
publish_dir = current_file.parent.joinpath(publish_type)
|
||||
if not publish_dir.exists():
|
||||
return
|
||||
published_files = list(publish_dir.glob('*.blend'))
|
||||
published_files.sort(key=find_file_version)
|
||||
return published_files
|
||||
|
||||
|
||||
def find_latest_publish(current_file: Path, publish_type="publish"):
|
||||
published_files = find_all_published(current_file, publish_type)
|
||||
if published_files:
|
||||
return published_files[-1]
|
||||
|
||||
|
||||
def find_sync_target(current_file: Path):
|
||||
latest_staged = find_published(current_file, publish_type="staged")
|
||||
latest_staged = find_latest_publish(current_file, publish_type="staged")
|
||||
if latest_staged:
|
||||
return latest_staged
|
||||
return find_published(current_file, publish_type="publish")
|
||||
return find_latest_publish(current_file, publish_type="publish")
|
||||
|
||||
|
||||
def import_data_from_lib(
|
||||
|
@ -96,8 +96,6 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
|
||||
return {'CANCELLED'}
|
||||
|
||||
pub_file = core.find_sync_target(current_file)
|
||||
pub_file_path = pub_file.__str__()
|
||||
|
||||
if self.pull:
|
||||
error_msg = core.merge_task_layer(
|
||||
context,
|
||||
@ -116,25 +114,28 @@ class ASSETPIPE_OT_sync_with_publish(bpy.types.Operator):
|
||||
if not self.push:
|
||||
return {'FINISHED'}
|
||||
|
||||
bpy.ops.wm.open_mainfile(filepath=pub_file_path)
|
||||
# TODO Include staged file if it exists in this loop
|
||||
for file in core.find_all_published(current_file, "publish"):
|
||||
file_path = file.__str__()
|
||||
bpy.ops.wm.open_mainfile(filepath=file_path)
|
||||
|
||||
local_tls = [
|
||||
item for item in constants.TASK_LAYER_KEYS if item != task_layer_name
|
||||
]
|
||||
local_tls = [
|
||||
item for item in constants.TASK_LAYER_KEYS if item != task_layer_name
|
||||
]
|
||||
|
||||
error_msg = core.merge_task_layer(
|
||||
context,
|
||||
col_base_name=get_parent_col_name(),
|
||||
local_tls=local_tls,
|
||||
target_file=current_file,
|
||||
)
|
||||
if error_msg:
|
||||
error_msg = core.merge_task_layer(
|
||||
context,
|
||||
col_base_name=get_parent_col_name(),
|
||||
local_tls=local_tls,
|
||||
target_file=current_file,
|
||||
)
|
||||
if error_msg:
|
||||
bpy.ops.wm.open_mainfile(filepath=current_file.__str__())
|
||||
self.report({'ERROR'}, error_msg)
|
||||
return {'CANCELLED'}
|
||||
|
||||
bpy.ops.wm.save_as_mainfile(filepath=file_path)
|
||||
bpy.ops.wm.open_mainfile(filepath=current_file.__str__())
|
||||
self.report({'ERROR'}, error_msg)
|
||||
return {'CANCELLED'}
|
||||
|
||||
bpy.ops.wm.save_as_mainfile(filepath=pub_file_path)
|
||||
bpy.ops.wm.open_mainfile(filepath=current_file.__str__())
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user