Compare commits
79 Commits
version-1.
...
version-1.
Author | SHA1 | Date | |
---|---|---|---|
1d1c8cf3d6 | |||
fc01e32f0d | |||
7577b348a5 | |||
be99bcb250 | |||
2190bd795e | |||
76d1f88c4e | |||
f0b7a0451d | |||
6eab5ba0af | |||
d457c77b19 | |||
ef70d20a77 | |||
db10495e7f | |||
586905a183 | |||
822c8daf07 | |||
e044607d91 | |||
e484d6496c | |||
78d567793e | |||
7e105167c0 | |||
d53938e03b | |||
0f26551368 | |||
645529bf35 | |||
4d2314e08f | |||
a5df609d95 | |||
e9a08c11b3 | |||
7bdfa28a3f | |||
e73e9d3df7 | |||
671e9f31fa | |||
6de026c8e2 | |||
6470feac7c | |||
6462561f2d | |||
2080f92558 | |||
a6f5a16583 | |||
6f376027e5 | |||
2ee9d1ebfa | |||
ed02816872 | |||
d100232428 | |||
9044bfadb9 | |||
4cdf2cee9c | |||
9c527520a9 | |||
56137c485f | |||
eb77461ca0 | |||
884d68ebe8 | |||
36d62082f3 | |||
af53d61cf2 | |||
332c32ca9c | |||
988dc72ba1 | |||
82c7560c7b | |||
73e2fd77e2 | |||
483e847ffe | |||
ef822208c8 | |||
791b3f480c | |||
efb1456596 | |||
58785977e7 | |||
8a5efc18db | |||
b970530f44 | |||
ded05b6ca9 | |||
5f5f0d8db9 | |||
30f71ac9fc | |||
bdef942b0b | |||
2a0ef39b12 | |||
c57a3bc902 | |||
b94998d12e | |||
1cd42e246e | |||
079689a532 | |||
597ba6de1c | |||
7b59391872 | |||
8201ba7691 | |||
8f2b0f8faa | |||
33b52cc8a9 | |||
be46b9cf81 | |||
ba4c951d32 | |||
5c7343f8c9 | |||
64d36818fe | |||
07f28d3072 | |||
48ca91a364 | |||
7ee052f71b | |||
2bb859efd9 | |||
ac3943fe6c | |||
5eaee872bf | |||
6ce4399407 |
@@ -24,6 +24,8 @@ Installing the addon
|
||||
|
||||
* If you don't have one already, sign up for an account at
|
||||
the [Blender ID site](https://www.blender.org/id/).
|
||||
* If you had a previous version of the addon installed, deactivate it
|
||||
and restart Blender.
|
||||
* Install and log in with the
|
||||
[Blender ID addon](https://developer.blender.org/diffusion/BIA/).
|
||||
* Install the Blender Cloud addon in Blender (User Preferences →
|
||||
|
@@ -19,19 +19,21 @@
|
||||
# <pep8 compliant>
|
||||
|
||||
bl_info = {
|
||||
'name': 'Blender Cloud Texture Browser',
|
||||
'name': 'Blender Cloud',
|
||||
'author': 'Sybren A. Stüvel and Francesco Siddi',
|
||||
'version': (0, 2, 0),
|
||||
'version': (1, 2, 2),
|
||||
'blender': (2, 77, 0),
|
||||
'location': 'Ctrl+Shift+Alt+A anywhere',
|
||||
'description': 'Allows downloading of textures from the Blender Cloud. Requires '
|
||||
'the Blender ID addon and Blender 2.77a or newer.',
|
||||
'location': 'Addon Preferences panel, and Ctrl+Shift+Alt+A anywhere for texture browser',
|
||||
'description': 'Texture library browser and Blender Sync. Requires the Blender ID addon '
|
||||
'and Blender 2.77a or newer.',
|
||||
'wiki_url': 'http://wiki.blender.org/index.php/Extensions:2.6/Py/'
|
||||
'Scripts/System/BlenderCloud',
|
||||
'category': 'System',
|
||||
'support': 'TESTING'
|
||||
'support': 'OFFICIAL'
|
||||
}
|
||||
|
||||
import logging
|
||||
|
||||
# Support reloading
|
||||
if 'pillar' in locals():
|
||||
import importlib
|
||||
@@ -43,16 +45,21 @@ if 'pillar' in locals():
|
||||
cache = importlib.reload(cache)
|
||||
else:
|
||||
from . import wheels
|
||||
|
||||
wheels.load_wheels()
|
||||
|
||||
from . import pillar, cache
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def register():
|
||||
"""Late-loads and registers the Blender-dependent submodules."""
|
||||
|
||||
import sys
|
||||
|
||||
_monkey_patch_requests()
|
||||
|
||||
# Support reloading
|
||||
if '%s.blender' % __name__ in sys.modules:
|
||||
import importlib
|
||||
@@ -66,20 +73,40 @@ def register():
|
||||
blender = reload_mod('blender')
|
||||
gui = reload_mod('gui')
|
||||
async_loop = reload_mod('async_loop')
|
||||
settings_sync = reload_mod('settings_sync')
|
||||
reload_mod('blendfile')
|
||||
else:
|
||||
from . import blender, gui, async_loop
|
||||
from . import blender, gui, async_loop, settings_sync, blendfile
|
||||
|
||||
async_loop.setup_asyncio_executor()
|
||||
async_loop.register()
|
||||
|
||||
blender.register()
|
||||
gui.register()
|
||||
blender.register()
|
||||
settings_sync.register()
|
||||
|
||||
|
||||
def _monkey_patch_requests():
|
||||
"""Monkey-patch old versions of Requests.
|
||||
|
||||
This is required for the Mac version of Blender 2.77a.
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
if requests.__build__ >= 0x020601:
|
||||
return
|
||||
|
||||
log.info('Monkey-patching requests version %s', requests.__version__)
|
||||
from requests.packages.urllib3.response import HTTPResponse
|
||||
HTTPResponse.chunked = False
|
||||
HTTPResponse.chunk_left = None
|
||||
|
||||
|
||||
def unregister():
|
||||
from . import blender, gui, async_loop
|
||||
from . import blender, gui, async_loop, settings_sync
|
||||
|
||||
gui.unregister()
|
||||
settings_sync.unregister()
|
||||
blender.unregister()
|
||||
gui.unregister()
|
||||
async_loop.unregister()
|
||||
|
||||
|
@@ -87,6 +87,15 @@ def ensure_async_loop():
|
||||
log.debug('Result of starting modal operator is %r', result)
|
||||
|
||||
|
||||
def erase_async_loop():
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
log.debug('Erasing async loop')
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.stop()
|
||||
|
||||
|
||||
class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
bl_idname = 'asyncio.loop'
|
||||
bl_label = 'Runs the asyncio main loop'
|
||||
@@ -94,6 +103,14 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
timer = None
|
||||
log = logging.getLogger(__name__ + '.AsyncLoopModalOperator')
|
||||
|
||||
def __del__(self):
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
# This can be required when the operator is running while Blender
|
||||
# (re)loads a file. The operator then doesn't get the chance to
|
||||
# finish the async tasks, hence stop_after_this_kick is never True.
|
||||
_loop_kicking_operator_running = False
|
||||
|
||||
def execute(self, context):
|
||||
return self.invoke(context, None)
|
||||
|
||||
@@ -115,6 +132,12 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
def modal(self, context, event):
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
# If _loop_kicking_operator_running is set to False, someone called
|
||||
# erase_async_loop(). This is a signal that we really should stop
|
||||
# running.
|
||||
if not _loop_kicking_operator_running:
|
||||
return {'FINISHED'}
|
||||
|
||||
if event.type != 'TIMER':
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
@@ -130,6 +153,84 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class AsyncModalOperatorMixin:
|
||||
async_task = None # asyncio task for fetching thumbnails
|
||||
signalling_future = None # asyncio future for signalling that we want to cancel everything.
|
||||
log = logging.getLogger('%s.AsyncModalOperatorMixin' % __name__)
|
||||
|
||||
_state = 'INITIALIZING'
|
||||
|
||||
def invoke(self, context, event):
|
||||
context.window_manager.modal_handler_add(self)
|
||||
self.timer = context.window_manager.event_timer_add(1 / 15, context.window)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def modal(self, context, event):
|
||||
task = self.async_task
|
||||
|
||||
if self._state != 'EXCEPTION' and task and task.done() and not task.cancelled():
|
||||
ex = task.exception()
|
||||
if ex is not None:
|
||||
self._state = 'EXCEPTION'
|
||||
self.log.error('Exception while running task: %s', ex)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
if self._state == 'QUIT':
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def _finish(self, context):
|
||||
self._stop_async_task()
|
||||
context.window_manager.event_timer_remove(self.timer)
|
||||
|
||||
def _new_async_task(self, async_task: asyncio.coroutine, future: asyncio.Future = None):
|
||||
"""Stops the currently running async task, and starts another one."""
|
||||
|
||||
self.log.debug('Setting up a new task %r, so any existing task must be stopped', async_task)
|
||||
self._stop_async_task()
|
||||
|
||||
# Download the previews asynchronously.
|
||||
self.signalling_future = future or asyncio.Future()
|
||||
self.async_task = asyncio.ensure_future(async_task)
|
||||
self.log.debug('Created new task %r', self.async_task)
|
||||
|
||||
# Start the async manager so everything happens.
|
||||
ensure_async_loop()
|
||||
|
||||
def _stop_async_task(self):
|
||||
self.log.debug('Stopping async task')
|
||||
if self.async_task is None:
|
||||
self.log.debug('No async task, trivially stopped')
|
||||
return
|
||||
|
||||
# Signal that we want to stop.
|
||||
self.async_task.cancel()
|
||||
if not self.signalling_future.done():
|
||||
self.log.info("Signalling that we want to cancel anything that's running.")
|
||||
self.signalling_future.cancel()
|
||||
|
||||
# Wait until the asynchronous task is done.
|
||||
if not self.async_task.done():
|
||||
self.log.info("blocking until async task is done.")
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(self.async_task)
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
return
|
||||
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
self.async_task.result() # This re-raises any exception of the task.
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
except Exception:
|
||||
self.log.exception("Exception from asynchronous task")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(AsyncLoopModalOperator)
|
||||
|
||||
|
@@ -6,8 +6,8 @@ Separated from __init__.py so that we can import & run from non-Blender environm
|
||||
import logging
|
||||
|
||||
import bpy
|
||||
from bpy.types import AddonPreferences, Operator, WindowManager, Scene
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import AddonPreferences, Operator, WindowManager, Scene, PropertyGroup
|
||||
from bpy.props import StringProperty, EnumProperty, PointerProperty
|
||||
|
||||
from . import pillar, gui
|
||||
|
||||
@@ -18,26 +18,82 @@ ADDON_NAME = 'blender_cloud'
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def redraw(self, context):
|
||||
context.area.tag_redraw()
|
||||
|
||||
|
||||
def blender_syncable_versions(self, context):
|
||||
bss = context.window_manager.blender_sync_status
|
||||
versions = bss.available_blender_versions
|
||||
if not versions:
|
||||
return [('', 'No settings stored in your Blender Cloud', '')]
|
||||
return [(v, v, '') for v in versions]
|
||||
|
||||
|
||||
class SyncStatusProperties(PropertyGroup):
|
||||
status = EnumProperty(
|
||||
items=[
|
||||
('NONE', 'NONE', 'We have done nothing at all yet.'),
|
||||
('IDLE', 'IDLE', 'User requested something, which is done, and we are now idle.'),
|
||||
('SYNCING', 'SYNCING', 'Synchronising with Blender Cloud.'),
|
||||
],
|
||||
name='status',
|
||||
description='Current status of Blender Sync.',
|
||||
update=redraw)
|
||||
|
||||
version = EnumProperty(
|
||||
items=blender_syncable_versions,
|
||||
name='Version of Blender from which to pull',
|
||||
description='Version of Blender from which to pull')
|
||||
|
||||
message = StringProperty(name='message', update=redraw)
|
||||
level = EnumProperty(
|
||||
items=[
|
||||
('INFO', 'INFO', ''),
|
||||
('WARNING', 'WARNING', ''),
|
||||
('ERROR', 'ERROR', ''),
|
||||
('SUBSCRIBE', 'SUBSCRIBE', ''),
|
||||
],
|
||||
name='level',
|
||||
update=redraw)
|
||||
|
||||
def report(self, level: set, message: str):
|
||||
assert len(level) == 1, 'level should be a set of one string, not %r' % level
|
||||
self.level = level.pop()
|
||||
self.message = message
|
||||
|
||||
# Message can also be empty, just to erase it from the GUI.
|
||||
# No need to actually log those.
|
||||
if message:
|
||||
try:
|
||||
loglevel = logging._nameToLevel[self.level]
|
||||
except KeyError:
|
||||
loglevel = logging.WARNING
|
||||
log.log(loglevel, message)
|
||||
|
||||
# List of syncable versions is stored in 'available_blender_versions' ID property,
|
||||
# because I don't know how to store a variable list of strings in a proper RNA property.
|
||||
@property
|
||||
def available_blender_versions(self) -> list:
|
||||
return self.get('available_blender_versions', [])
|
||||
|
||||
@available_blender_versions.setter
|
||||
def available_blender_versions(self, new_versions):
|
||||
self['available_blender_versions'] = new_versions
|
||||
|
||||
|
||||
class BlenderCloudPreferences(AddonPreferences):
|
||||
bl_idname = ADDON_NAME
|
||||
|
||||
# The following two properties are read-only to limit the scope of the
|
||||
# addon and allow for proper testing within this scope.
|
||||
pillar_server = bpy.props.StringProperty(
|
||||
pillar_server = StringProperty(
|
||||
name='Blender Cloud Server',
|
||||
description='URL of the Blender Cloud backend server',
|
||||
default=PILLAR_SERVER_URL,
|
||||
get=lambda self: PILLAR_SERVER_URL
|
||||
)
|
||||
|
||||
# TODO: Move to the Scene properties?
|
||||
project_uuid = bpy.props.StringProperty(
|
||||
name='Project UUID',
|
||||
description='UUID of the current Blender Cloud project',
|
||||
default='5672beecc0261b2005ed1a33',
|
||||
get=lambda self: '5672beecc0261b2005ed1a33'
|
||||
)
|
||||
|
||||
local_texture_dir = StringProperty(
|
||||
name='Default Blender Cloud texture storage directory',
|
||||
subtype='DIR_PATH',
|
||||
@@ -65,7 +121,7 @@ class BlenderCloudPreferences(AddonPreferences):
|
||||
icon = 'ERROR'
|
||||
text = 'You are logged out.'
|
||||
help_text = 'To login, go to the Blender ID add-on preferences.'
|
||||
elif pillar.SUBCLIENT_ID not in blender_id_profile.subclients:
|
||||
elif bpy.app.debug and pillar.SUBCLIENT_ID not in blender_id_profile.subclients:
|
||||
icon = 'QUESTION'
|
||||
text = 'No Blender Cloud credentials.'
|
||||
help_text = ('You are logged in on Blender ID, but your credentials have not '
|
||||
@@ -77,30 +133,85 @@ class BlenderCloudPreferences(AddonPreferences):
|
||||
help_text = ('To logout or change profile, '
|
||||
'go to the Blender ID add-on preferences.')
|
||||
|
||||
sub = layout.column(align=True)
|
||||
sub.label(text=text, icon=icon)
|
||||
# Authentication stuff
|
||||
auth_box = layout.box()
|
||||
auth_box.label(text=text, icon=icon)
|
||||
|
||||
help_lines = textwrap.wrap(help_text, 80)
|
||||
for line in help_lines:
|
||||
sub.label(text=line)
|
||||
auth_box.label(text=line)
|
||||
if bpy.app.debug:
|
||||
auth_box.operator("pillar.credentials_update")
|
||||
|
||||
sub = layout.column()
|
||||
# Texture browser stuff
|
||||
texture_box = layout.box()
|
||||
texture_box.enabled = icon != 'ERROR'
|
||||
sub = texture_box.column()
|
||||
sub.label(text='Local directory for downloaded textures')
|
||||
sub.prop(self, "local_texture_dir", text='Default')
|
||||
sub.prop(context.scene, "local_texture_dir", text='Current scene')
|
||||
|
||||
# options for Pillar
|
||||
sub = layout.column()
|
||||
sub.enabled = icon != 'ERROR'
|
||||
# Blender Sync stuff
|
||||
bss = context.window_manager.blender_sync_status
|
||||
bsync_box = layout.box()
|
||||
bsync_box.enabled = icon != 'ERROR'
|
||||
row = bsync_box.row().split(percentage=0.33)
|
||||
row.label('Blender Sync with Blender Cloud')
|
||||
|
||||
# TODO: let users easily pick a project. For now, we just use the
|
||||
# hard-coded server URL and UUID of the textures project.
|
||||
# sub.prop(self, "pillar_server")
|
||||
# sub.prop(self, "project_uuid")
|
||||
sub.operator("pillar.credentials_update")
|
||||
icon_for_level = {
|
||||
'INFO': 'NONE',
|
||||
'WARNING': 'INFO',
|
||||
'ERROR': 'ERROR',
|
||||
'SUBSCRIBE': 'ERROR',
|
||||
}
|
||||
icon = icon_for_level[bss.level] if bss.message else 'NONE'
|
||||
message_container = row.row()
|
||||
message_container.label(bss.message, icon=icon)
|
||||
|
||||
sub = bsync_box.column()
|
||||
|
||||
if bss.level == 'SUBSCRIBE':
|
||||
self.draw_subscribe_button(sub)
|
||||
else:
|
||||
self.draw_sync_buttons(sub, bss)
|
||||
|
||||
def draw_subscribe_button(self, layout):
|
||||
layout.operator('pillar.subscribe', icon='WORLD')
|
||||
|
||||
def draw_sync_buttons(self, layout, bss):
|
||||
layout.enabled = bss.status in {'NONE', 'IDLE'}
|
||||
|
||||
buttons = layout.column()
|
||||
row_buttons = buttons.row().split(percentage=0.5)
|
||||
row_push = row_buttons.row()
|
||||
row_pull = row_buttons.row(align=True)
|
||||
|
||||
row_push.operator('pillar.sync',
|
||||
text='Save %i.%i settings' % bpy.app.version[:2],
|
||||
icon='TRIA_UP').action = 'PUSH'
|
||||
|
||||
versions = bss.available_blender_versions
|
||||
version = bss.version
|
||||
if bss.status in {'NONE', 'IDLE'}:
|
||||
if not versions or not version:
|
||||
row_pull.operator('pillar.sync',
|
||||
text='Find version to load',
|
||||
icon='TRIA_DOWN').action = 'REFRESH'
|
||||
else:
|
||||
props = row_pull.operator('pillar.sync',
|
||||
text='Load %s settings' % version,
|
||||
icon='TRIA_DOWN')
|
||||
props.action = 'PULL'
|
||||
props.blender_version = version
|
||||
row_pull.operator('pillar.sync',
|
||||
text='',
|
||||
icon='DOTSDOWN').action = 'SELECT'
|
||||
else:
|
||||
row_pull.label('Cloud Sync is running.')
|
||||
|
||||
|
||||
class PillarCredentialsUpdate(Operator):
|
||||
class PillarCredentialsUpdate(pillar.PillarOperatorMixin,
|
||||
Operator):
|
||||
"""Updates the Pillar URL and tests the new URL."""
|
||||
bl_idname = 'pillar.credentials_update'
|
||||
bl_label = 'Update credentials'
|
||||
@@ -130,7 +241,7 @@ class PillarCredentialsUpdate(Operator):
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(pillar.refresh_pillar_credentials())
|
||||
loop.run_until_complete(self.check_credentials(context, set()))
|
||||
except blender_id.BlenderIdCommError as ex:
|
||||
log.exception('Error sending subclient-specific token to Blender ID')
|
||||
self.report({'ERROR'}, 'Failed to sync Blender ID to Blender Cloud')
|
||||
@@ -144,6 +255,20 @@ class PillarCredentialsUpdate(Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PILLAR_OT_subscribe(Operator):
|
||||
"""Opens a browser to subscribe the user to the Cloud."""
|
||||
bl_idname = 'pillar.subscribe'
|
||||
bl_label = 'Subscribe to the Cloud'
|
||||
|
||||
def execute(self, context):
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open_new_tab('https://cloud.blender.org/join')
|
||||
self.report({'INFO'}, 'We just started a browser for you.')
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def preferences() -> BlenderCloudPreferences:
|
||||
return bpy.context.user_preferences.addons[ADDON_NAME].preferences
|
||||
|
||||
@@ -151,17 +276,15 @@ def preferences() -> BlenderCloudPreferences:
|
||||
def register():
|
||||
bpy.utils.register_class(BlenderCloudPreferences)
|
||||
bpy.utils.register_class(PillarCredentialsUpdate)
|
||||
|
||||
WindowManager.blender_cloud_project = StringProperty(
|
||||
name="Blender Cloud project UUID",
|
||||
default='5672beecc0261b2005ed1a33') # TODO: don't hard-code this
|
||||
|
||||
WindowManager.blender_cloud_node = StringProperty(
|
||||
name="Blender Cloud node UUID",
|
||||
default='') # empty == top-level of project
|
||||
bpy.utils.register_class(SyncStatusProperties)
|
||||
bpy.utils.register_class(PILLAR_OT_subscribe)
|
||||
|
||||
addon_prefs = preferences()
|
||||
|
||||
WindowManager.last_blender_cloud_location = StringProperty(
|
||||
name="Last Blender Cloud browser location",
|
||||
default="/")
|
||||
|
||||
def default_if_empty(scene, context):
|
||||
"""The scene's local_texture_dir, if empty, reverts to the addon prefs."""
|
||||
|
||||
@@ -174,13 +297,16 @@ def register():
|
||||
default=addon_prefs.local_texture_dir,
|
||||
update=default_if_empty)
|
||||
|
||||
WindowManager.blender_sync_status = PointerProperty(type=SyncStatusProperties)
|
||||
|
||||
|
||||
def unregister():
|
||||
gui.unregister()
|
||||
|
||||
bpy.utils.unregister_class(PillarCredentialsUpdate)
|
||||
bpy.utils.unregister_class(BlenderCloudPreferences)
|
||||
bpy.utils.unregister_class(SyncStatusProperties)
|
||||
bpy.utils.unregister_class(PILLAR_OT_subscribe)
|
||||
|
||||
del WindowManager.blender_cloud_project
|
||||
del WindowManager.blender_cloud_node
|
||||
del WindowManager.blender_cloud_thumbnails
|
||||
del WindowManager.last_blender_cloud_location
|
||||
del WindowManager.blender_sync_status
|
||||
|
929
blender_cloud/blendfile.py
Normal file
929
blender_cloud/blendfile.py
Normal file
@@ -0,0 +1,929 @@
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ***** END GPL LICENCE BLOCK *****
|
||||
#
|
||||
# (c) 2009, At Mind B.V. - Jeroen Bakker
|
||||
# (c) 2014, Blender Foundation - Campbell Barton
|
||||
|
||||
import gzip
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
import tempfile
|
||||
|
||||
log = logging.getLogger("blendfile")
|
||||
|
||||
FILE_BUFFER_SIZE = 1024 * 1024
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# module global routines
|
||||
#
|
||||
# read routines
|
||||
# open a filename
|
||||
# determine if the file is compressed
|
||||
# and returns a handle
|
||||
def open_blend(filename, access="rb"):
|
||||
"""Opens a blend file for reading or writing pending on the access
|
||||
supports 2 kind of blend files. Uncompressed and compressed.
|
||||
Known issue: does not support packaged blend files
|
||||
"""
|
||||
handle = open(filename, access)
|
||||
magic_test = b"BLENDER"
|
||||
magic = handle.read(len(magic_test))
|
||||
if magic == magic_test:
|
||||
log.debug("normal blendfile detected")
|
||||
handle.seek(0, os.SEEK_SET)
|
||||
bfile = BlendFile(handle)
|
||||
bfile.is_compressed = False
|
||||
bfile.filepath_orig = filename
|
||||
return bfile
|
||||
elif magic[:2] == b'\x1f\x8b':
|
||||
log.debug("gzip blendfile detected")
|
||||
handle.close()
|
||||
log.debug("decompressing started")
|
||||
fs = gzip.open(filename, "rb")
|
||||
data = fs.read(FILE_BUFFER_SIZE)
|
||||
magic = data[:len(magic_test)]
|
||||
if magic == magic_test:
|
||||
handle = tempfile.TemporaryFile()
|
||||
while data:
|
||||
handle.write(data)
|
||||
data = fs.read(FILE_BUFFER_SIZE)
|
||||
log.debug("decompressing finished")
|
||||
fs.close()
|
||||
log.debug("resetting decompressed file")
|
||||
handle.seek(os.SEEK_SET, 0)
|
||||
bfile = BlendFile(handle)
|
||||
bfile.is_compressed = True
|
||||
bfile.filepath_orig = filename
|
||||
return bfile
|
||||
else:
|
||||
raise Exception("filetype inside gzip not a blend")
|
||||
else:
|
||||
raise Exception("filetype not a blend or a gzip blend")
|
||||
|
||||
|
||||
def pad_up_4(offset):
|
||||
return (offset + 3) & ~3
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# module classes
|
||||
|
||||
|
||||
class BlendFile:
|
||||
"""
|
||||
Blend file.
|
||||
"""
|
||||
__slots__ = (
|
||||
# file (result of open())
|
||||
"handle",
|
||||
# str (original name of the file path)
|
||||
"filepath_orig",
|
||||
# BlendFileHeader
|
||||
"header",
|
||||
# struct.Struct
|
||||
"block_header_struct",
|
||||
# BlendFileBlock
|
||||
"blocks",
|
||||
# [DNAStruct, ...]
|
||||
"structs",
|
||||
# dict {b'StructName': sdna_index}
|
||||
# (where the index is an index into 'structs')
|
||||
"sdna_index_from_id",
|
||||
# dict {addr_old: block}
|
||||
"block_from_offset",
|
||||
# int
|
||||
"code_index",
|
||||
# bool (did we make a change)
|
||||
"is_modified",
|
||||
# bool (is file gzipped)
|
||||
"is_compressed",
|
||||
)
|
||||
|
||||
def __init__(self, handle):
|
||||
log.debug("initializing reading blend-file")
|
||||
self.handle = handle
|
||||
self.header = BlendFileHeader(handle)
|
||||
self.block_header_struct = self.header.create_block_header_struct()
|
||||
self.blocks = []
|
||||
self.code_index = {}
|
||||
|
||||
block = BlendFileBlock(handle, self)
|
||||
while block.code != b'ENDB':
|
||||
if block.code == b'DNA1':
|
||||
(self.structs,
|
||||
self.sdna_index_from_id,
|
||||
) = BlendFile.decode_structs(self.header, block, handle)
|
||||
else:
|
||||
handle.seek(block.size, os.SEEK_CUR)
|
||||
|
||||
self.blocks.append(block)
|
||||
self.code_index.setdefault(block.code, []).append(block)
|
||||
|
||||
block = BlendFileBlock(handle, self)
|
||||
self.is_modified = False
|
||||
self.blocks.append(block)
|
||||
|
||||
# cache (could lazy init, incase we never use?)
|
||||
self.block_from_offset = {block.addr_old: block for block in self.blocks if block.code != b'ENDB'}
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.close()
|
||||
|
||||
def find_blocks_from_code(self, code):
|
||||
assert(type(code) == bytes)
|
||||
if code not in self.code_index:
|
||||
return []
|
||||
return self.code_index[code]
|
||||
|
||||
def find_block_from_offset(self, offset):
|
||||
# same as looking looping over all blocks,
|
||||
# then checking ``block.addr_old == offset``
|
||||
assert(type(offset) is int)
|
||||
return self.block_from_offset.get(offset)
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the blend file
|
||||
writes the blend file to disk if changes has happened
|
||||
"""
|
||||
handle = self.handle
|
||||
|
||||
if self.is_modified:
|
||||
if self.is_compressed:
|
||||
log.debug("close compressed blend file")
|
||||
handle.seek(os.SEEK_SET, 0)
|
||||
log.debug("compressing started")
|
||||
fs = gzip.open(self.filepath_orig, "wb")
|
||||
data = handle.read(FILE_BUFFER_SIZE)
|
||||
while data:
|
||||
fs.write(data)
|
||||
data = handle.read(FILE_BUFFER_SIZE)
|
||||
fs.close()
|
||||
log.debug("compressing finished")
|
||||
|
||||
handle.close()
|
||||
|
||||
def ensure_subtype_smaller(self, sdna_index_curr, sdna_index_next):
|
||||
# never refine to a smaller type
|
||||
if (self.structs[sdna_index_curr].size >
|
||||
self.structs[sdna_index_next].size):
|
||||
|
||||
raise RuntimeError("cant refine to smaller type (%s -> %s)" %
|
||||
(self.structs[sdna_index_curr].dna_type_id.decode('ascii'),
|
||||
self.structs[sdna_index_next].dna_type_id.decode('ascii')))
|
||||
|
||||
@staticmethod
|
||||
def decode_structs(header, block, handle):
|
||||
"""
|
||||
DNACatalog is a catalog of all information in the DNA1 file-block
|
||||
"""
|
||||
log.debug("building DNA catalog")
|
||||
shortstruct = DNA_IO.USHORT[header.endian_index]
|
||||
shortstruct2 = struct.Struct(header.endian_str + b'HH')
|
||||
intstruct = DNA_IO.UINT[header.endian_index]
|
||||
|
||||
data = handle.read(block.size)
|
||||
types = []
|
||||
names = []
|
||||
|
||||
structs = []
|
||||
sdna_index_from_id = {}
|
||||
|
||||
offset = 8
|
||||
names_len = intstruct.unpack_from(data, offset)[0]
|
||||
offset += 4
|
||||
|
||||
log.debug("building #%d names" % names_len)
|
||||
for i in range(names_len):
|
||||
tName = DNA_IO.read_data0_offset(data, offset)
|
||||
offset = offset + len(tName) + 1
|
||||
names.append(DNAName(tName))
|
||||
del names_len
|
||||
|
||||
offset = pad_up_4(offset)
|
||||
offset += 4
|
||||
types_len = intstruct.unpack_from(data, offset)[0]
|
||||
offset += 4
|
||||
log.debug("building #%d types" % types_len)
|
||||
for i in range(types_len):
|
||||
dna_type_id = DNA_IO.read_data0_offset(data, offset)
|
||||
# None will be replaced by the DNAStruct, below
|
||||
types.append(DNAStruct(dna_type_id))
|
||||
offset += len(dna_type_id) + 1
|
||||
|
||||
offset = pad_up_4(offset)
|
||||
offset += 4
|
||||
log.debug("building #%d type-lengths" % types_len)
|
||||
for i in range(types_len):
|
||||
tLen = shortstruct.unpack_from(data, offset)[0]
|
||||
offset = offset + 2
|
||||
types[i].size = tLen
|
||||
del types_len
|
||||
|
||||
offset = pad_up_4(offset)
|
||||
offset += 4
|
||||
|
||||
structs_len = intstruct.unpack_from(data, offset)[0]
|
||||
offset += 4
|
||||
log.debug("building #%d structures" % structs_len)
|
||||
for sdna_index in range(structs_len):
|
||||
d = shortstruct2.unpack_from(data, offset)
|
||||
struct_type_index = d[0]
|
||||
offset += 4
|
||||
dna_struct = types[struct_type_index]
|
||||
sdna_index_from_id[dna_struct.dna_type_id] = sdna_index
|
||||
structs.append(dna_struct)
|
||||
|
||||
fields_len = d[1]
|
||||
dna_offset = 0
|
||||
|
||||
for field_index in range(fields_len):
|
||||
d2 = shortstruct2.unpack_from(data, offset)
|
||||
field_type_index = d2[0]
|
||||
field_name_index = d2[1]
|
||||
offset += 4
|
||||
dna_type = types[field_type_index]
|
||||
dna_name = names[field_name_index]
|
||||
if dna_name.is_pointer or dna_name.is_method_pointer:
|
||||
dna_size = header.pointer_size * dna_name.array_size
|
||||
else:
|
||||
dna_size = dna_type.size * dna_name.array_size
|
||||
|
||||
field = DNAField(dna_type, dna_name, dna_size, dna_offset)
|
||||
dna_struct.fields.append(field)
|
||||
dna_struct.field_from_name[dna_name.name_only] = field
|
||||
dna_offset += dna_size
|
||||
|
||||
return structs, sdna_index_from_id
|
||||
|
||||
|
||||
class BlendFileBlock:
|
||||
"""
|
||||
Instance of a struct.
|
||||
"""
|
||||
__slots__ = (
|
||||
# BlendFile
|
||||
"file",
|
||||
"code",
|
||||
"size",
|
||||
"addr_old",
|
||||
"sdna_index",
|
||||
"count",
|
||||
"file_offset",
|
||||
"user_data",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return ("<%s.%s (%s), size=%d at %s>" %
|
||||
# fields=[%s]
|
||||
(self.__class__.__name__,
|
||||
self.dna_type.dna_type_id.decode('ascii'),
|
||||
self.code.decode(),
|
||||
self.size,
|
||||
# b", ".join(f.dna_name.name_only for f in self.dna_type.fields).decode('ascii'),
|
||||
hex(self.addr_old),
|
||||
))
|
||||
|
||||
def __init__(self, handle, bfile):
|
||||
OLDBLOCK = struct.Struct(b'4sI')
|
||||
|
||||
self.file = bfile
|
||||
self.user_data = None
|
||||
|
||||
data = handle.read(bfile.block_header_struct.size)
|
||||
# header size can be 8, 20, or 24 bytes long
|
||||
# 8: old blend files ENDB block (exception)
|
||||
# 20: normal headers 32 bit platform
|
||||
# 24: normal headers 64 bit platform
|
||||
if len(data) > 15:
|
||||
|
||||
blockheader = bfile.block_header_struct.unpack(data)
|
||||
self.code = blockheader[0].partition(b'\0')[0]
|
||||
if self.code != b'ENDB':
|
||||
self.size = blockheader[1]
|
||||
self.addr_old = blockheader[2]
|
||||
self.sdna_index = blockheader[3]
|
||||
self.count = blockheader[4]
|
||||
self.file_offset = handle.tell()
|
||||
else:
|
||||
self.size = 0
|
||||
self.addr_old = 0
|
||||
self.sdna_index = 0
|
||||
self.count = 0
|
||||
self.file_offset = 0
|
||||
else:
|
||||
blockheader = OLDBLOCK.unpack(data)
|
||||
self.code = blockheader[0].partition(b'\0')[0]
|
||||
self.code = DNA_IO.read_data0(blockheader[0])
|
||||
self.size = 0
|
||||
self.addr_old = 0
|
||||
self.sdna_index = 0
|
||||
self.count = 0
|
||||
self.file_offset = 0
|
||||
|
||||
@property
|
||||
def dna_type(self):
|
||||
return self.file.structs[self.sdna_index]
|
||||
|
||||
def refine_type_from_index(self, sdna_index_next):
|
||||
assert(type(sdna_index_next) is int)
|
||||
sdna_index_curr = self.sdna_index
|
||||
self.file.ensure_subtype_smaller(sdna_index_curr, sdna_index_next)
|
||||
self.sdna_index = sdna_index_next
|
||||
|
||||
def refine_type(self, dna_type_id):
|
||||
assert(type(dna_type_id) is bytes)
|
||||
self.refine_type_from_index(self.file.sdna_index_from_id[dna_type_id])
|
||||
|
||||
def get_file_offset(self, path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
base_index=0,
|
||||
):
|
||||
"""
|
||||
Return (offset, length)
|
||||
"""
|
||||
assert(type(path) is bytes)
|
||||
|
||||
ofs = self.file_offset
|
||||
if base_index != 0:
|
||||
assert(base_index < self.count)
|
||||
ofs += (self.size // self.count) * base_index
|
||||
self.file.handle.seek(ofs, os.SEEK_SET)
|
||||
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
else:
|
||||
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
field = dna_struct.field_from_path(
|
||||
self.file.header, self.file.handle, path)
|
||||
|
||||
return (self.file.handle.tell(), field.dna_name.array_size)
|
||||
|
||||
def get(self, path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
use_nil=True, use_str=True,
|
||||
base_index=0,
|
||||
):
|
||||
|
||||
ofs = self.file_offset
|
||||
if base_index != 0:
|
||||
assert(base_index < self.count)
|
||||
ofs += (self.size // self.count) * base_index
|
||||
self.file.handle.seek(ofs, os.SEEK_SET)
|
||||
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
else:
|
||||
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
return dna_struct.field_get(
|
||||
self.file.header, self.file.handle, path,
|
||||
default=default,
|
||||
use_nil=use_nil, use_str=use_str,
|
||||
)
|
||||
|
||||
def get_recursive_iter(self, path, path_root=b"",
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
use_nil=True, use_str=True,
|
||||
base_index=0,
|
||||
):
|
||||
if path_root:
|
||||
path_full = (
|
||||
(path_root if type(path_root) is tuple else (path_root, )) +
|
||||
(path if type(path) is tuple else (path, )))
|
||||
else:
|
||||
path_full = path
|
||||
|
||||
try:
|
||||
yield (path_full, self.get(path_full, default, sdna_index_refine, use_nil, use_str, base_index))
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
struct_index = self.file.sdna_index_from_id.get(dna_type.dna_type_id, None)
|
||||
if struct_index is None:
|
||||
yield (path_full, "<%s>" % dna_type.dna_type_id.decode('ascii'))
|
||||
else:
|
||||
struct = self.file.structs[struct_index]
|
||||
for f in struct.fields:
|
||||
yield from self.get_recursive_iter(
|
||||
f.dna_name.name_only, path_full, default, None, use_nil, use_str, 0)
|
||||
|
||||
def items_recursive_iter(self):
|
||||
for k in self.keys():
|
||||
yield from self.get_recursive_iter(k, use_str=False)
|
||||
|
||||
def get_data_hash(self):
|
||||
"""
|
||||
Generates a 'hash' that can be used instead of addr_old as block id, and that should be 'stable' across .blend
|
||||
file load & save (i.e. it does not changes due to pointer addresses variations).
|
||||
"""
|
||||
# TODO This implementation is most likely far from optimal... and CRC32 is not renown as the best hashing
|
||||
# algo either. But for now does the job!
|
||||
import zlib
|
||||
def _is_pointer(self, k):
|
||||
return self.file.structs[self.sdna_index].field_from_path(
|
||||
self.file.header, self.file.handle, k).dna_name.is_pointer
|
||||
|
||||
hsh = 1
|
||||
for k, v in self.items_recursive_iter():
|
||||
if not _is_pointer(self, k):
|
||||
hsh = zlib.adler32(str(v).encode(), hsh)
|
||||
return hsh
|
||||
|
||||
def set(self, path, value,
|
||||
sdna_index_refine=None,
|
||||
):
|
||||
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
else:
|
||||
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
self.file.handle.seek(self.file_offset, os.SEEK_SET)
|
||||
self.file.is_modified = True
|
||||
return dna_struct.field_set(
|
||||
self.file.header, self.file.handle, path, value)
|
||||
|
||||
# ---------------
|
||||
# Utility get/set
|
||||
#
|
||||
# avoid inline pointer casting
|
||||
def get_pointer(
|
||||
self, path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
base_index=0,
|
||||
):
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
result = self.get(path, default, sdna_index_refine=sdna_index_refine, base_index=base_index)
|
||||
|
||||
# default
|
||||
if type(result) is not int:
|
||||
return result
|
||||
|
||||
assert(self.file.structs[sdna_index_refine].field_from_path(
|
||||
self.file.header, self.file.handle, path).dna_name.is_pointer)
|
||||
if result != 0:
|
||||
# possible (but unlikely)
|
||||
# that this fails and returns None
|
||||
# maybe we want to raise some exception in this case
|
||||
return self.file.find_block_from_offset(result)
|
||||
else:
|
||||
return None
|
||||
|
||||
# ----------------------
|
||||
# Python convenience API
|
||||
|
||||
# dict like access
|
||||
def __getitem__(self, item):
|
||||
return self.get(item, use_str=False)
|
||||
|
||||
def __setitem__(self, item, value):
|
||||
self.set(item, value)
|
||||
|
||||
def keys(self):
|
||||
return (f.dna_name.name_only for f in self.dna_type.fields)
|
||||
|
||||
def values(self):
|
||||
for k in self.keys():
|
||||
try:
|
||||
yield self[k]
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
yield "<%s>" % dna_type.dna_type_id.decode('ascii')
|
||||
|
||||
def items(self):
|
||||
for k in self.keys():
|
||||
try:
|
||||
yield (k, self[k])
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
yield (k, "<%s>" % dna_type.dna_type_id.decode('ascii'))
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Read Magic
|
||||
#
|
||||
# magic = str
|
||||
# pointer_size = int
|
||||
# is_little_endian = bool
|
||||
# version = int
|
||||
|
||||
|
||||
class BlendFileHeader:
|
||||
"""
|
||||
BlendFileHeader allocates the first 12 bytes of a blend file
|
||||
it contains information about the hardware architecture
|
||||
"""
|
||||
__slots__ = (
|
||||
# str
|
||||
"magic",
|
||||
# int 4/8
|
||||
"pointer_size",
|
||||
# bool
|
||||
"is_little_endian",
|
||||
# int
|
||||
"version",
|
||||
# str, used to pass to 'struct'
|
||||
"endian_str",
|
||||
# int, used to index common types
|
||||
"endian_index",
|
||||
)
|
||||
|
||||
def __init__(self, handle):
|
||||
FILEHEADER = struct.Struct(b'7s1s1s3s')
|
||||
|
||||
log.debug("reading blend-file-header")
|
||||
values = FILEHEADER.unpack(handle.read(FILEHEADER.size))
|
||||
self.magic = values[0]
|
||||
pointer_size_id = values[1]
|
||||
if pointer_size_id == b'-':
|
||||
self.pointer_size = 8
|
||||
elif pointer_size_id == b'_':
|
||||
self.pointer_size = 4
|
||||
else:
|
||||
assert(0)
|
||||
endian_id = values[2]
|
||||
if endian_id == b'v':
|
||||
self.is_little_endian = True
|
||||
self.endian_str = b'<'
|
||||
self.endian_index = 0
|
||||
elif endian_id == b'V':
|
||||
self.is_little_endian = False
|
||||
self.endian_index = 1
|
||||
self.endian_str = b'>'
|
||||
else:
|
||||
assert(0)
|
||||
|
||||
version_id = values[3]
|
||||
self.version = int(version_id)
|
||||
|
||||
def create_block_header_struct(self):
|
||||
return struct.Struct(b''.join((
|
||||
self.endian_str,
|
||||
b'4sI',
|
||||
b'I' if self.pointer_size == 4 else b'Q',
|
||||
b'II',
|
||||
)))
|
||||
|
||||
|
||||
class DNAName:
|
||||
"""
|
||||
DNAName is a C-type name stored in the DNA
|
||||
"""
|
||||
__slots__ = (
|
||||
"name_full",
|
||||
"name_only",
|
||||
"is_pointer",
|
||||
"is_method_pointer",
|
||||
"array_size",
|
||||
)
|
||||
|
||||
def __init__(self, name_full):
|
||||
self.name_full = name_full
|
||||
self.name_only = self.calc_name_only()
|
||||
self.is_pointer = self.calc_is_pointer()
|
||||
self.is_method_pointer = self.calc_is_method_pointer()
|
||||
self.array_size = self.calc_array_size()
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (type(self).__qualname__, self.name_full)
|
||||
|
||||
def as_reference(self, parent):
|
||||
if parent is None:
|
||||
result = b''
|
||||
else:
|
||||
result = parent + b'.'
|
||||
|
||||
result = result + self.name_only
|
||||
return result
|
||||
|
||||
def calc_name_only(self):
|
||||
result = self.name_full.strip(b'*()')
|
||||
index = result.find(b'[')
|
||||
if index != -1:
|
||||
result = result[:index]
|
||||
return result
|
||||
|
||||
def calc_is_pointer(self):
|
||||
return (b'*' in self.name_full)
|
||||
|
||||
def calc_is_method_pointer(self):
|
||||
return (b'(*' in self.name_full)
|
||||
|
||||
def calc_array_size(self):
|
||||
result = 1
|
||||
temp = self.name_full
|
||||
index = temp.find(b'[')
|
||||
|
||||
while index != -1:
|
||||
index_2 = temp.find(b']')
|
||||
result *= int(temp[index + 1:index_2])
|
||||
temp = temp[index_2 + 1:]
|
||||
index = temp.find(b'[')
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class DNAField:
|
||||
"""
|
||||
DNAField is a coupled DNAStruct and DNAName
|
||||
and cache offset for reuse
|
||||
"""
|
||||
__slots__ = (
|
||||
# DNAName
|
||||
"dna_name",
|
||||
# tuple of 3 items
|
||||
# [bytes (struct name), int (struct size), DNAStruct]
|
||||
"dna_type",
|
||||
# size on-disk
|
||||
"dna_size",
|
||||
# cached info (avoid looping over fields each time)
|
||||
"dna_offset",
|
||||
)
|
||||
|
||||
def __init__(self, dna_type, dna_name, dna_size, dna_offset):
|
||||
self.dna_type = dna_type
|
||||
self.dna_name = dna_name
|
||||
self.dna_size = dna_size
|
||||
self.dna_offset = dna_offset
|
||||
|
||||
|
||||
class DNAStruct:
|
||||
"""
|
||||
DNAStruct is a C-type structure stored in the DNA
|
||||
"""
|
||||
__slots__ = (
|
||||
"dna_type_id",
|
||||
"size",
|
||||
"fields",
|
||||
"field_from_name",
|
||||
"user_data",
|
||||
)
|
||||
|
||||
def __init__(self, dna_type_id):
|
||||
self.dna_type_id = dna_type_id
|
||||
self.fields = []
|
||||
self.field_from_name = {}
|
||||
self.user_data = None
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (type(self).__qualname__, self.dna_type_id)
|
||||
|
||||
def field_from_path(self, header, handle, path):
|
||||
"""
|
||||
Support lookups as bytes or a tuple of bytes and optional index.
|
||||
|
||||
C style 'id.name' --> (b'id', b'name')
|
||||
C style 'array[4]' --> ('array', 4)
|
||||
"""
|
||||
if type(path) is tuple:
|
||||
name = path[0]
|
||||
if len(path) >= 2 and type(path[1]) is not bytes:
|
||||
name_tail = path[2:]
|
||||
index = path[1]
|
||||
assert(type(index) is int)
|
||||
else:
|
||||
name_tail = path[1:]
|
||||
index = 0
|
||||
else:
|
||||
name = path
|
||||
name_tail = None
|
||||
index = 0
|
||||
|
||||
assert(type(name) is bytes)
|
||||
|
||||
field = self.field_from_name.get(name)
|
||||
|
||||
if field is not None:
|
||||
handle.seek(field.dna_offset, os.SEEK_CUR)
|
||||
if index != 0:
|
||||
if field.dna_name.is_pointer:
|
||||
index_offset = header.pointer_size * index
|
||||
else:
|
||||
index_offset = field.dna_type.size * index
|
||||
assert(index_offset < field.dna_size)
|
||||
handle.seek(index_offset, os.SEEK_CUR)
|
||||
if not name_tail: # None or ()
|
||||
return field
|
||||
else:
|
||||
return field.dna_type.field_from_path(header, handle, name_tail)
|
||||
|
||||
def field_get(self, header, handle, path,
|
||||
default=...,
|
||||
use_nil=True, use_str=True,
|
||||
):
|
||||
field = self.field_from_path(header, handle, path)
|
||||
if field is None:
|
||||
if default is not ...:
|
||||
return default
|
||||
else:
|
||||
raise KeyError("%r not found in %r (%r)" %
|
||||
(path, [f.dna_name.name_only for f in self.fields], self.dna_type_id))
|
||||
|
||||
dna_type = field.dna_type
|
||||
dna_name = field.dna_name
|
||||
|
||||
if dna_name.is_pointer:
|
||||
return DNA_IO.read_pointer(handle, header)
|
||||
elif dna_type.dna_type_id == b'int':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_int(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_int(handle, header)
|
||||
elif dna_type.dna_type_id == b'short':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_short(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_short(handle, header)
|
||||
elif dna_type.dna_type_id == b'uint64_t':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_ulong(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_ulong(handle, header)
|
||||
elif dna_type.dna_type_id == b'float':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_float(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_float(handle, header)
|
||||
elif dna_type.dna_type_id == b'char':
|
||||
if use_str:
|
||||
if use_nil:
|
||||
return DNA_IO.read_string0(handle, dna_name.array_size)
|
||||
else:
|
||||
return DNA_IO.read_string(handle, dna_name.array_size)
|
||||
else:
|
||||
if use_nil:
|
||||
return DNA_IO.read_bytes0(handle, dna_name.array_size)
|
||||
else:
|
||||
return DNA_IO.read_bytes(handle, dna_name.array_size)
|
||||
else:
|
||||
raise NotImplementedError("%r exists but isn't pointer, can't resolve field %r" %
|
||||
(path, dna_name.name_only), dna_name, dna_type)
|
||||
|
||||
def field_set(self, header, handle, path, value):
|
||||
assert(type(path) == bytes)
|
||||
|
||||
field = self.field_from_path(header, handle, path)
|
||||
if field is None:
|
||||
raise KeyError("%r not found in %r" %
|
||||
(path, [f.dna_name.name_only for f in self.fields]))
|
||||
|
||||
dna_type = field.dna_type
|
||||
dna_name = field.dna_name
|
||||
|
||||
if dna_type.dna_type_id == b'char':
|
||||
if type(value) is str:
|
||||
return DNA_IO.write_string(handle, value, dna_name.array_size)
|
||||
else:
|
||||
return DNA_IO.write_bytes(handle, value, dna_name.array_size)
|
||||
elif dna_type.dna_type_id == b'int':
|
||||
DNA_IO.write_int(handle, header, value)
|
||||
else:
|
||||
raise NotImplementedError("Setting %r is not yet supported for %r" %
|
||||
(dna_type, dna_name), dna_name, dna_type)
|
||||
|
||||
|
||||
class DNA_IO:
|
||||
"""
|
||||
Module like class, for read-write utility functions.
|
||||
|
||||
Only stores static methods & constants.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
raise RuntimeError("%s should not be instantiated" % cls)
|
||||
|
||||
@staticmethod
|
||||
def write_string(handle, astring, fieldlen):
|
||||
assert(isinstance(astring, str))
|
||||
if len(astring) >= fieldlen:
|
||||
stringw = astring[0:fieldlen]
|
||||
else:
|
||||
stringw = astring + '\0'
|
||||
handle.write(stringw.encode('utf-8'))
|
||||
|
||||
@staticmethod
|
||||
def write_bytes(handle, astring, fieldlen):
|
||||
assert(isinstance(astring, (bytes, bytearray)))
|
||||
if len(astring) >= fieldlen:
|
||||
stringw = astring[0:fieldlen]
|
||||
else:
|
||||
stringw = astring + b'\0'
|
||||
|
||||
handle.write(stringw)
|
||||
|
||||
@staticmethod
|
||||
def read_bytes(handle, length):
|
||||
data = handle.read(length)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def read_bytes0(handle, length):
|
||||
data = handle.read(length)
|
||||
return DNA_IO.read_data0(data)
|
||||
|
||||
@staticmethod
|
||||
def read_string(handle, length):
|
||||
return DNA_IO.read_bytes(handle, length).decode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def read_string0(handle, length):
|
||||
return DNA_IO.read_bytes0(handle, length).decode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def read_data0_offset(data, offset):
|
||||
add = data.find(b'\0', offset) - offset
|
||||
return data[offset:offset + add]
|
||||
|
||||
@staticmethod
|
||||
def read_data0(data):
|
||||
add = data.find(b'\0')
|
||||
return data[:add]
|
||||
|
||||
USHORT = struct.Struct(b'<H'), struct.Struct(b'>H')
|
||||
|
||||
@staticmethod
|
||||
def read_ushort(handle, fileheader):
|
||||
st = DNA_IO.USHORT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
SSHORT = struct.Struct(b'<h'), struct.Struct(b'>h')
|
||||
|
||||
@staticmethod
|
||||
def read_short(handle, fileheader):
|
||||
st = DNA_IO.SSHORT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
UINT = struct.Struct(b'<I'), struct.Struct(b'>I')
|
||||
|
||||
@staticmethod
|
||||
def read_uint(handle, fileheader):
|
||||
st = DNA_IO.UINT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
SINT = struct.Struct(b'<i'), struct.Struct(b'>i')
|
||||
|
||||
@staticmethod
|
||||
def read_int(handle, fileheader):
|
||||
st = DNA_IO.SINT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
@staticmethod
|
||||
def write_int(handle, fileheader, value):
|
||||
assert isinstance(value, int), 'value must be int, but is %r: %r' % (type(value), value)
|
||||
st = DNA_IO.SINT[fileheader.endian_index]
|
||||
to_write = st.pack(value)
|
||||
handle.write(to_write)
|
||||
|
||||
FLOAT = struct.Struct(b'<f'), struct.Struct(b'>f')
|
||||
|
||||
@staticmethod
|
||||
def read_float(handle, fileheader):
|
||||
st = DNA_IO.FLOAT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
ULONG = struct.Struct(b'<Q'), struct.Struct(b'>Q')
|
||||
|
||||
@staticmethod
|
||||
def read_ulong(handle, fileheader):
|
||||
st = DNA_IO.ULONG[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
@staticmethod
|
||||
def read_pointer(handle, header):
|
||||
"""
|
||||
reads an pointer from a file handle
|
||||
the pointer size is given by the header (BlendFileHeader)
|
||||
"""
|
||||
if header.pointer_size == 4:
|
||||
st = DNA_IO.UINT[header.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
if header.pointer_size == 8:
|
||||
st = DNA_IO.ULONG[header.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
@@ -27,14 +27,11 @@ import bgl
|
||||
import blf
|
||||
import os
|
||||
|
||||
from bpy.types import AddonPreferences
|
||||
from bpy.props import (BoolProperty, EnumProperty,
|
||||
FloatProperty, FloatVectorProperty,
|
||||
IntProperty, StringProperty)
|
||||
|
||||
import pillarsdk
|
||||
from . import async_loop, pillar, cache
|
||||
|
||||
REQUIRED_ROLES_FOR_TEXTURE_BROWSER = {'subscriber', 'demo'}
|
||||
|
||||
icon_width = 128
|
||||
icon_height = 128
|
||||
target_item_width = 400
|
||||
@@ -44,13 +41,27 @@ library_path = '/tmp'
|
||||
library_icons_path = os.path.join(os.path.dirname(__file__), "icons")
|
||||
|
||||
|
||||
class UpNode(pillarsdk.Node):
|
||||
class SpecialFolderNode(pillarsdk.Node):
|
||||
pass
|
||||
|
||||
|
||||
class UpNode(SpecialFolderNode):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self['_id'] = 'UP'
|
||||
self['node_type'] = 'UP'
|
||||
|
||||
|
||||
class ProjectNode(SpecialFolderNode):
|
||||
def __init__(self, project):
|
||||
super().__init__()
|
||||
|
||||
assert isinstance(project, pillarsdk.Project), 'wrong type for project: %r' % type(project)
|
||||
|
||||
self.merge(project.to_dict())
|
||||
self['node_type'] = 'PROJECT'
|
||||
|
||||
|
||||
class MenuItem:
|
||||
"""GUI menu item for the 3D View GUI."""
|
||||
|
||||
@@ -66,19 +77,30 @@ class MenuItem:
|
||||
'SPINNER': os.path.join(library_icons_path, 'spinner.png'),
|
||||
}
|
||||
|
||||
SUPPORTED_NODE_TYPES = {'UP', 'group_texture', 'texture'}
|
||||
SUPPORTED_NODE_TYPES = {'UP', 'PROJECT', 'group_texture', 'texture'}
|
||||
|
||||
def __init__(self, node, file_desc, thumb_path: str, label_text):
|
||||
self.log = logging.getLogger('%s.MenuItem' % __name__)
|
||||
if node['node_type'] not in self.SUPPORTED_NODE_TYPES:
|
||||
self.log.info('Invalid node type in node: %s', node)
|
||||
raise TypeError('Node of type %r not supported; supported are %r.' % (
|
||||
node.group_texture, self.SUPPORTED_NODE_TYPES))
|
||||
node['node_type'], self.SUPPORTED_NODE_TYPES))
|
||||
|
||||
assert isinstance(node, pillarsdk.Node), 'wrong type for node: %r' % type(node)
|
||||
assert isinstance(node['_id'], str), 'wrong type for node["_id"]: %r' % type(node['_id'])
|
||||
self.node = node # pillarsdk.Node, contains 'node_type' key to indicate type
|
||||
self.file_desc = file_desc # pillarsdk.File object, or None if a 'folder' node.
|
||||
self.label_text = label_text
|
||||
self._thumb_path = ''
|
||||
self.icon = None
|
||||
self._is_folder = node['node_type'] == 'group_texture' or isinstance(node, UpNode)
|
||||
self._is_folder = (node['node_type'] == 'group_texture' or
|
||||
isinstance(node, SpecialFolderNode))
|
||||
|
||||
# Determine sorting order.
|
||||
# by default, sort all the way at the end and folders first.
|
||||
self._order = 0 if self._is_folder else 10000
|
||||
if node and node.properties and node.properties.order is not None:
|
||||
self._order = node.properties.order
|
||||
|
||||
self.thumb_path = thumb_path
|
||||
|
||||
@@ -88,6 +110,10 @@ class MenuItem:
|
||||
self.width = 0
|
||||
self.height = 0
|
||||
|
||||
def sort_key(self):
|
||||
"""Key for sorting lists of MenuItems."""
|
||||
return self._order, self.label_text
|
||||
|
||||
@property
|
||||
def thumb_path(self) -> str:
|
||||
return self._thumb_path
|
||||
@@ -176,29 +202,24 @@ class MenuItem:
|
||||
return self.x < mouse_x < self.x + self.width and self.y < mouse_y < self.y + self.height
|
||||
|
||||
|
||||
class BlenderCloudBrowser(bpy.types.Operator):
|
||||
class BlenderCloudBrowser(pillar.PillarOperatorMixin,
|
||||
async_loop.AsyncModalOperatorMixin,
|
||||
bpy.types.Operator):
|
||||
bl_idname = 'pillar.browser'
|
||||
bl_label = 'Blender Cloud Texture Browser'
|
||||
|
||||
_draw_handle = None
|
||||
|
||||
_state = 'INITIALIZING'
|
||||
|
||||
project_uuid = '5672beecc0261b2005ed1a33' # Blender Cloud project UUID
|
||||
node = None # The Node object we're currently showing, or None if we're at the project top.
|
||||
node_uuid = '' # Blender Cloud node UUID we're currently showing, i.e. None-safe self.node['_id']
|
||||
current_path = pillar.CloudPath('/')
|
||||
project_name = ''
|
||||
|
||||
# This contains a stack of Node objects that lead up to the currently browsed node.
|
||||
# This allows us to display the "up" item.
|
||||
path_stack = []
|
||||
|
||||
async_task = None # asyncio task for fetching thumbnails
|
||||
signalling_future = None # asyncio future for signalling that we want to cancel everything.
|
||||
timer = None
|
||||
log = logging.getLogger('%s.BlenderCloudBrowser' % __name__)
|
||||
|
||||
_menu_item_lock = threading.Lock()
|
||||
current_path = ''
|
||||
current_display_content = []
|
||||
loaded_images = set()
|
||||
thumbnails_cache = ''
|
||||
@@ -209,15 +230,15 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
|
||||
def invoke(self, context, event):
|
||||
# Refuse to start if the file hasn't been saved.
|
||||
if not context.blend_data.is_saved:
|
||||
if context.blend_data.is_dirty:
|
||||
self.report({'ERROR'}, 'Please save your Blend file before using '
|
||||
'the Blender Cloud addon.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
wm = context.window_manager
|
||||
self.project_uuid = wm.blender_cloud_project
|
||||
self.node_uuid = wm.blender_cloud_node
|
||||
self.path_stack = []
|
||||
|
||||
self.current_path = pillar.CloudPath(wm.last_blender_cloud_location)
|
||||
self.path_stack = [] # list of nodes that make up the current path.
|
||||
|
||||
self.thumbnails_cache = cache.cache_directory('thumbnails')
|
||||
self.mouse_x = event.mouse_x
|
||||
@@ -235,25 +256,17 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
|
||||
self.current_display_content = []
|
||||
self.loaded_images = set()
|
||||
self.check_credentials()
|
||||
|
||||
context.window_manager.modal_handler_add(self)
|
||||
self.timer = context.window_manager.event_timer_add(1 / 30, context.window)
|
||||
context.window.cursor_modal_set('DEFAULT')
|
||||
async_loop.AsyncModalOperatorMixin.invoke(self, context, event)
|
||||
self._new_async_task(self.async_execute(context))
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def modal(self, context, event):
|
||||
task = self.async_task
|
||||
if self._state != 'EXCEPTION' and task.done() and not task.cancelled():
|
||||
ex = task.exception()
|
||||
if ex is not None:
|
||||
self._state = 'EXCEPTION'
|
||||
self.log.error('Exception while running task: %s', ex)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
if self._state == 'QUIT':
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
result = async_loop.AsyncModalOperatorMixin.modal(self, context, event)
|
||||
if not {'PASS_THROUGH', 'RUNNING_MODAL'}.intersection(result):
|
||||
return result
|
||||
|
||||
if event.type == 'TAB' and event.value == 'RELEASE':
|
||||
self.log.info('Ensuring async loop is running')
|
||||
@@ -268,9 +281,21 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
self.mouse_x = event.mouse_x
|
||||
self.mouse_y = event.mouse_y
|
||||
|
||||
if self._state == 'BROWSING' and event.type == 'LEFTMOUSE' and event.value == 'RELEASE':
|
||||
left_mouse_release = event.type == 'LEFTMOUSE' and event.value == 'RELEASE'
|
||||
if self._state == 'PLEASE_SUBSCRIBE' and left_mouse_release:
|
||||
self.open_browser_subscribe()
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
|
||||
if self._state == 'BROWSING':
|
||||
selected = self.get_clicked()
|
||||
|
||||
if selected:
|
||||
context.window.cursor_set('HAND')
|
||||
else:
|
||||
context.window.cursor_set('DEFAULT')
|
||||
|
||||
if left_mouse_release:
|
||||
if selected is None:
|
||||
# No item clicked, ignore it.
|
||||
return {'RUNNING_MODAL'}
|
||||
@@ -282,43 +307,37 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
# This can happen when the thumbnail information isn't loaded yet.
|
||||
# Just ignore the click for now.
|
||||
# TODO: think of a way to handle this properly.
|
||||
self.log.debug('Selected item %r has no file_desc', selected)
|
||||
return {'RUNNING_MODAL'}
|
||||
self.handle_item_selection(context, selected)
|
||||
|
||||
elif event.type in {'RIGHTMOUSE', 'ESC'}:
|
||||
if event.type in {'RIGHTMOUSE', 'ESC'}:
|
||||
self._finish(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def check_credentials(self):
|
||||
async def async_execute(self, context):
|
||||
self._state = 'CHECKING_CREDENTIALS'
|
||||
self.log.debug('Checking credentials')
|
||||
self._new_async_task(self._check_credentials())
|
||||
|
||||
async def _check_credentials(self):
|
||||
"""Checks credentials with Pillar, and if ok goes to the BROWSING state."""
|
||||
|
||||
try:
|
||||
await pillar.check_pillar_credentials()
|
||||
except pillar.CredentialsNotSyncedError:
|
||||
self.log.info('Credentials not synced, re-syncing automatically.')
|
||||
else:
|
||||
self.log.info('Credentials okay, browsing assets.')
|
||||
await self.async_download_previews()
|
||||
return
|
||||
|
||||
try:
|
||||
await pillar.refresh_pillar_credentials()
|
||||
except pillar.UserNotLoggedInError:
|
||||
self.error('User not logged in on Blender ID.')
|
||||
else:
|
||||
self.log.info('Credentials refreshed and ok, browsing assets.')
|
||||
await self.async_download_previews()
|
||||
return
|
||||
user_id = await self.check_credentials(context, REQUIRED_ROLES_FOR_TEXTURE_BROWSER)
|
||||
except pillar.NotSubscribedToCloudError:
|
||||
self.log.info('User not subscribed to Blender Cloud.')
|
||||
self._show_subscribe_screen()
|
||||
return None
|
||||
|
||||
if user_id is None:
|
||||
raise pillar.UserNotLoggedInError()
|
||||
# self._new_async_task(self._check_credentials())
|
||||
|
||||
await self.async_download_previews()
|
||||
|
||||
def _show_subscribe_screen(self):
|
||||
"""Shows the "You need to subscribe" screen."""
|
||||
|
||||
self._state = 'PLEASE_SUBSCRIBE'
|
||||
bpy.context.window.cursor_set('HAND')
|
||||
|
||||
def descend_node(self, node):
|
||||
"""Descends the node hierarchy by visiting this node.
|
||||
@@ -326,58 +345,40 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
Also keeps track of the current node, so that we know where the "up" button should go.
|
||||
"""
|
||||
|
||||
# Going up or down?
|
||||
if self.path_stack and isinstance(node, UpNode):
|
||||
self.log.debug('Going up, pop the stack; pre-pop stack is %r', self.path_stack)
|
||||
node = self.path_stack.pop()
|
||||
assert isinstance(node, pillarsdk.Node), 'Wrong type %s' % node
|
||||
|
||||
if isinstance(node, UpNode):
|
||||
# Going up.
|
||||
self.log.debug('Going up to %r', self.current_path)
|
||||
self.current_path = self.current_path.parent
|
||||
if self.path_stack:
|
||||
self.path_stack.pop()
|
||||
if not self.path_stack:
|
||||
self.project_name = ''
|
||||
else:
|
||||
# Going down, keep track of where we were (project top-level is None)
|
||||
self.path_stack.append(self.node)
|
||||
self.log.debug('Going up, push the stack; post-push stack is %r', self.path_stack)
|
||||
# Going down, keep track of where we were
|
||||
if isinstance(node, ProjectNode):
|
||||
self.project_name = node['name']
|
||||
|
||||
self.current_path /= node['_id']
|
||||
self.log.debug('Going down to %r', self.current_path)
|
||||
self.path_stack.append(node)
|
||||
|
||||
# Set 'current' to the given node
|
||||
self.node_uuid = node['_id'] if node else None
|
||||
self.node = node
|
||||
self.browse_assets()
|
||||
|
||||
def _stop_async_task(self):
|
||||
self.log.debug('Stopping async task')
|
||||
if self.async_task is None:
|
||||
self.log.debug('No async task, trivially stopped')
|
||||
return
|
||||
|
||||
# Signal that we want to stop.
|
||||
self.async_task.cancel()
|
||||
if not self.signalling_future.done():
|
||||
self.log.info("Signalling that we want to cancel anything that's running.")
|
||||
self.signalling_future.cancel()
|
||||
|
||||
# Wait until the asynchronous task is done.
|
||||
if not self.async_task.done():
|
||||
self.log.info("blocking until async task is done.")
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(self.async_task)
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
return
|
||||
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
self.async_task.result() # This re-raises any exception of the task.
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
except Exception:
|
||||
self.log.exception("Exception from asynchronous task")
|
||||
@property
|
||||
def node(self):
|
||||
if not self.path_stack:
|
||||
return None
|
||||
return self.path_stack[-1]
|
||||
|
||||
def _finish(self, context):
|
||||
self.log.debug('Finishing the modal operator')
|
||||
self._stop_async_task()
|
||||
async_loop.AsyncModalOperatorMixin._finish(self, context)
|
||||
self.clear_images()
|
||||
|
||||
context.space_data.draw_handler_remove(self._draw_handle, 'WINDOW')
|
||||
context.window_manager.event_timer_remove(self.timer)
|
||||
context.window.cursor_modal_restore()
|
||||
|
||||
if self.maximized_area:
|
||||
bpy.ops.screen.screen_full_area(use_hide_panels=True)
|
||||
@@ -406,6 +407,8 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
self.current_display_content.append(menu_item)
|
||||
self.loaded_images.add(menu_item.icon.filepath_raw)
|
||||
|
||||
self.sort_menu()
|
||||
|
||||
return menu_item
|
||||
|
||||
def update_menu_item(self, node, *args) -> MenuItem:
|
||||
@@ -421,11 +424,23 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
else:
|
||||
raise ValueError('Unable to find MenuItem(node_uuid=%r)' % node_uuid)
|
||||
|
||||
self.sort_menu()
|
||||
|
||||
def sort_menu(self):
|
||||
"""Sorts the self.current_display_content list."""
|
||||
|
||||
if not self.current_display_content:
|
||||
return
|
||||
|
||||
with self._menu_item_lock:
|
||||
self.current_display_content.sort(key=MenuItem.sort_key)
|
||||
|
||||
async def async_download_previews(self):
|
||||
self._state = 'BROWSING'
|
||||
|
||||
thumbnails_directory = self.thumbnails_cache
|
||||
self.log.info('Asynchronously downloading previews to %r', thumbnails_directory)
|
||||
self.log.info('Current BCloud path is %r', self.current_path)
|
||||
self.clear_images()
|
||||
|
||||
def thumbnail_loading(node, texture_node):
|
||||
@@ -434,63 +449,58 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
def thumbnail_loaded(node, file_desc, thumb_path):
|
||||
self.update_menu_item(node, file_desc, thumb_path, file_desc['filename'])
|
||||
|
||||
# Download either by group_texture node UUID or by project UUID (which
|
||||
# shows all top-level nodes)
|
||||
if self.node_uuid:
|
||||
self.log.debug('Getting subnodes for parent node %r', self.node_uuid)
|
||||
children = await pillar.get_nodes(parent_node_uuid=self.node_uuid,
|
||||
node_type='group_textures')
|
||||
|
||||
# Make sure we can go up again.
|
||||
if self.path_stack:
|
||||
self.add_menu_item(UpNode(), None, 'FOLDER', '.. up ..')
|
||||
elif self.project_uuid:
|
||||
self.log.debug('Getting subnodes for project node %r', self.project_uuid)
|
||||
children = await pillar.get_nodes(self.project_uuid, '')
|
||||
project_uuid = self.current_path.project_uuid
|
||||
node_uuid = self.current_path.node_uuid
|
||||
|
||||
if node_uuid:
|
||||
# Query for sub-nodes of this node.
|
||||
self.log.debug('Getting subnodes for parent node %r', node_uuid)
|
||||
children = await pillar.get_nodes(parent_node_uuid=node_uuid,
|
||||
node_type='group_texture')
|
||||
elif project_uuid:
|
||||
# Query for top-level nodes.
|
||||
self.log.debug('Getting subnodes for project node %r', project_uuid)
|
||||
children = await pillar.get_nodes(project_uuid=project_uuid,
|
||||
parent_node_uuid='',
|
||||
node_type='group_texture')
|
||||
else:
|
||||
# TODO: add "nothing here" icon and trigger re-draw
|
||||
self.log.warning("Not node UUID and no project UUID, I can't do anything!")
|
||||
# Query for projects
|
||||
self.log.debug('No node UUID and no project UUID, listing available projects')
|
||||
children = await pillar.get_texture_projects()
|
||||
for proj_dict in children:
|
||||
self.add_menu_item(ProjectNode(proj_dict), None, 'FOLDER', proj_dict['name'])
|
||||
return
|
||||
|
||||
# Make sure we can go up again.
|
||||
self.add_menu_item(UpNode(), None, 'FOLDER', '.. up ..')
|
||||
|
||||
# Download all child nodes
|
||||
self.log.debug('Iterating over child nodes of %r', self.node_uuid)
|
||||
self.log.debug('Iterating over child nodes of %r', self.current_path)
|
||||
for child in children:
|
||||
# print(' - %(_id)s = %(name)s' % child)
|
||||
if child['node_type'] not in MenuItem.SUPPORTED_NODE_TYPES:
|
||||
self.log.debug('Skipping node of type %r', child['node_type'])
|
||||
continue
|
||||
self.add_menu_item(child, None, 'FOLDER', child['name'])
|
||||
|
||||
# There are only sub-nodes at the project level, no texture nodes,
|
||||
# so we won't have to bother looking for textures.
|
||||
if not self.node_uuid:
|
||||
if not node_uuid:
|
||||
return
|
||||
|
||||
directory = os.path.join(thumbnails_directory, self.project_uuid, self.node_uuid)
|
||||
directory = os.path.join(thumbnails_directory, project_uuid, node_uuid)
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
|
||||
self.log.debug('Fetching texture thumbnails for node %r', self.node_uuid)
|
||||
await pillar.fetch_texture_thumbs(self.node_uuid, 's', directory,
|
||||
self.log.debug('Fetching texture thumbnails for node %r', node_uuid)
|
||||
await pillar.fetch_texture_thumbs(node_uuid, 's', directory,
|
||||
thumbnail_loading=thumbnail_loading,
|
||||
thumbnail_loaded=thumbnail_loaded,
|
||||
future=self.signalling_future)
|
||||
|
||||
def browse_assets(self):
|
||||
self.log.debug('Browsing assets at project %r node %r', self.project_uuid, self.node_uuid)
|
||||
self.log.debug('Browsing assets at %r', self.current_path)
|
||||
self._new_async_task(self.async_download_previews())
|
||||
|
||||
def _new_async_task(self, async_task: asyncio.coroutine, future: asyncio.Future=None):
|
||||
"""Stops the currently running async task, and starts another one."""
|
||||
|
||||
self.log.debug('Setting up a new task %r, so any existing task must be stopped', async_task)
|
||||
self._stop_async_task()
|
||||
|
||||
# Download the previews asynchronously.
|
||||
self.signalling_future = future or asyncio.Future()
|
||||
self.async_task = asyncio.ensure_future(async_task)
|
||||
self.log.debug('Created new task %r', self.async_task)
|
||||
|
||||
# Start the async manager so everything happens.
|
||||
async_loop.ensure_async_loop()
|
||||
|
||||
def draw_menu(self, context):
|
||||
"""Draws the GUI with OpenGL."""
|
||||
|
||||
@@ -499,6 +509,7 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
'BROWSING': self._draw_browser,
|
||||
'DOWNLOADING_TEXTURE': self._draw_downloading,
|
||||
'EXCEPTION': self._draw_exception,
|
||||
'PLEASE_SUBSCRIBE': self._draw_subscribe,
|
||||
}
|
||||
|
||||
if self._state in drawers:
|
||||
@@ -510,7 +521,7 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
bgl.glColor4f(1.0, 1.0, 1.0, 1.0)
|
||||
blf.size(font_id, 20, 72)
|
||||
blf.position(font_id, 5, 5, 0)
|
||||
blf.draw(font_id, self._state)
|
||||
blf.draw(font_id, '%s %s' % (self._state, self.project_name))
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
@staticmethod
|
||||
@@ -641,6 +652,11 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
blf.draw(font_id, line)
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
def _draw_subscribe(self, context):
|
||||
self._draw_text_on_colour(context,
|
||||
'Click to subscribe to the Blender Cloud',
|
||||
(0.0, 0.0, 0.2, 0.6))
|
||||
|
||||
def get_clicked(self) -> MenuItem:
|
||||
|
||||
for item in self.current_display_content:
|
||||
@@ -652,11 +668,13 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
def handle_item_selection(self, context, item: MenuItem):
|
||||
"""Called when the user clicks on a menu item that doesn't represent a folder."""
|
||||
|
||||
from pillarsdk.utils import sanitize_filename
|
||||
|
||||
self.clear_images()
|
||||
self._state = 'DOWNLOADING_TEXTURE'
|
||||
|
||||
node_path_components = [node['name'] for node in self.path_stack if node is not None]
|
||||
local_path_components = [self.project_uuid] + node_path_components + [self.node['name']]
|
||||
node_path_components = (node['name'] for node in self.path_stack if node is not None)
|
||||
local_path_components = [sanitize_filename(comp) for comp in node_path_components]
|
||||
|
||||
top_texture_directory = bpy.path.abspath(context.scene.local_texture_dir)
|
||||
local_path = os.path.join(top_texture_directory, *local_path_components)
|
||||
@@ -689,6 +707,13 @@ class BlenderCloudBrowser(bpy.types.Operator):
|
||||
future=signalling_future))
|
||||
self.async_task.add_done_callback(texture_download_completed)
|
||||
|
||||
def open_browser_subscribe(self):
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open_new_tab('https://cloud.blender.org/join')
|
||||
|
||||
self.report({'INFO'}, 'We just started a browser for you.')
|
||||
|
||||
|
||||
# store keymaps here to access after registration
|
||||
addon_keymaps = []
|
||||
@@ -717,13 +742,10 @@ def register():
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(BlenderCloudBrowser)
|
||||
|
||||
# handle the keymap
|
||||
for km, kmi in addon_keymaps:
|
||||
km.keymap_items.remove(kmi)
|
||||
addon_keymaps.clear()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
if 'bl_rna' in BlenderCloudBrowser.__dict__: # <-- check if we already removed!
|
||||
bpy.utils.unregister_class(BlenderCloudBrowser)
|
||||
|
@@ -4,6 +4,7 @@ import os
|
||||
import functools
|
||||
import logging
|
||||
from contextlib import closing, contextmanager
|
||||
import urllib.parse
|
||||
import pathlib
|
||||
|
||||
import requests
|
||||
@@ -17,7 +18,7 @@ from . import cache
|
||||
|
||||
SUBCLIENT_ID = 'PILLAR'
|
||||
|
||||
_pillar_api = None # will become a pillarsdk.Api object.
|
||||
_pillar_api = {} # will become a mapping from bool (cached/non-cached) to pillarsdk.Api objects.
|
||||
log = logging.getLogger(__name__)
|
||||
uncached_session = requests.session()
|
||||
_testing_blender_id_profile = None # Just for testing, overrides what is returned by blender_id_profile.
|
||||
@@ -31,14 +32,15 @@ class UserNotLoggedInError(RuntimeError):
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
return 'UserNotLoggedInError'
|
||||
return self.__class__.__name__
|
||||
|
||||
|
||||
class CredentialsNotSyncedError(UserNotLoggedInError):
|
||||
"""Raised when the user may be logged in on Blender ID, but has no Blender Cloud token."""
|
||||
|
||||
def __str__(self):
|
||||
return 'CredentialsNotSyncedError'
|
||||
|
||||
class NotSubscribedToCloudError(UserNotLoggedInError):
|
||||
"""Raised when the user may be logged in on Blender ID, but has no Blender Cloud token."""
|
||||
|
||||
|
||||
class PillarError(RuntimeError):
|
||||
@@ -62,6 +64,8 @@ class CloudPath(pathlib.PurePosixPath):
|
||||
@property
|
||||
def project_uuid(self) -> str:
|
||||
assert self.parts[0] == '/'
|
||||
if len(self.parts) <= 1:
|
||||
return None
|
||||
return self.parts[1]
|
||||
|
||||
@property
|
||||
@@ -71,11 +75,10 @@ class CloudPath(pathlib.PurePosixPath):
|
||||
|
||||
@property
|
||||
def node_uuid(self) -> str:
|
||||
node_uuids = self.node_uuids
|
||||
|
||||
if not node_uuids:
|
||||
if len(self.parts) <= 2:
|
||||
return None
|
||||
return node_uuids[-1]
|
||||
|
||||
return self.parts[-1]
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -107,60 +110,81 @@ def blender_id_profile() -> 'blender_id.BlenderIdProfile':
|
||||
return blender_id.get_active_profile()
|
||||
|
||||
|
||||
def pillar_api(pillar_endpoint: str = None) -> pillarsdk.Api:
|
||||
def blender_id_subclient() -> dict:
|
||||
"""Returns the subclient dict, containing the 'subclient_user_id' and 'token' keys."""
|
||||
|
||||
profile = blender_id_profile()
|
||||
if not profile:
|
||||
raise UserNotLoggedInError()
|
||||
|
||||
subclient = profile.subclients.get(SUBCLIENT_ID)
|
||||
if not subclient:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
return subclient
|
||||
|
||||
|
||||
def pillar_api(pillar_endpoint: str = None, caching=True) -> pillarsdk.Api:
|
||||
"""Returns the Pillar SDK API object for the current user.
|
||||
|
||||
The user must be logged in.
|
||||
|
||||
:param pillar_endpoint: URL of the Pillar server, for testing purposes. If not specified,
|
||||
it will use the addon preferences.
|
||||
:param caching: whether to return a caching or non-caching API
|
||||
"""
|
||||
|
||||
global _pillar_api
|
||||
|
||||
# Only return the Pillar API object if the user is still logged in.
|
||||
profile = blender_id_profile()
|
||||
if not profile:
|
||||
raise UserNotLoggedInError()
|
||||
subclient = blender_id_subclient()
|
||||
|
||||
subclient = profile.subclients.get(SUBCLIENT_ID)
|
||||
if not subclient:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
if _pillar_api is None:
|
||||
if not _pillar_api:
|
||||
# Allow overriding the endpoint before importing Blender-specific stuff.
|
||||
if pillar_endpoint is None:
|
||||
from . import blender
|
||||
pillar_endpoint = blender.preferences().pillar_server
|
||||
|
||||
pillarsdk.Api.requests_session = cache.requests_session()
|
||||
|
||||
_pillar_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
||||
_caching_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
||||
username=subclient['subclient_user_id'],
|
||||
password=SUBCLIENT_ID,
|
||||
token=subclient['token'])
|
||||
_caching_api.requests_session = cache.requests_session()
|
||||
|
||||
return _pillar_api
|
||||
_noncaching_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
||||
username=subclient['subclient_user_id'],
|
||||
password=SUBCLIENT_ID,
|
||||
token=subclient['token'])
|
||||
_noncaching_api.requests_session = uncached_session
|
||||
|
||||
_pillar_api = {
|
||||
True: _caching_api,
|
||||
False: _noncaching_api,
|
||||
}
|
||||
|
||||
return _pillar_api[caching]
|
||||
|
||||
|
||||
# No more than this many Pillar calls should be made simultaneously
|
||||
pillar_semaphore = asyncio.Semaphore(3)
|
||||
|
||||
|
||||
async def pillar_call(pillar_func, *args, **kwargs):
|
||||
partial = functools.partial(pillar_func, *args, api=pillar_api(), **kwargs)
|
||||
async def pillar_call(pillar_func, *args, caching=True, **kwargs):
|
||||
partial = functools.partial(pillar_func, *args, api=pillar_api(caching=caching), **kwargs)
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
async with pillar_semaphore:
|
||||
return await loop.run_in_executor(None, partial)
|
||||
|
||||
|
||||
async def check_pillar_credentials():
|
||||
async def check_pillar_credentials(required_roles: set):
|
||||
"""Tries to obtain the user at Pillar using the user's credentials.
|
||||
|
||||
:param required_roles: set of roles to require -- having one of those is enough.
|
||||
:raises UserNotLoggedInError: when the user is not logged in on Blender ID.
|
||||
:raises CredentialsNotSyncedError: when the user is logged in on Blender ID but
|
||||
doesn't have a valid subclient token for Pillar.
|
||||
:returns: the Pillar User ID of the current user.
|
||||
"""
|
||||
|
||||
profile = blender_id_profile()
|
||||
@@ -171,13 +195,28 @@ async def check_pillar_credentials():
|
||||
if not subclient:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
try:
|
||||
await get_project_uuid('textures') # Any query will do.
|
||||
except pillarsdk.UnauthorizedAccess:
|
||||
pillar_user_id = subclient['subclient_user_id']
|
||||
if not pillar_user_id:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
try:
|
||||
db_user = await pillar_call(pillarsdk.User.me)
|
||||
except (pillarsdk.UnauthorizedAccess, pillarsdk.ResourceNotFound, pillarsdk.ForbiddenAccess):
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
async def refresh_pillar_credentials():
|
||||
roles = db_user.roles or set()
|
||||
log.debug('User has roles %r', roles)
|
||||
if required_roles and not required_roles.intersection(set(roles)):
|
||||
# Delete the subclient info. This forces a re-check later, which can
|
||||
# then pick up on the user's new status.
|
||||
del profile.subclients[SUBCLIENT_ID]
|
||||
profile.save_json()
|
||||
raise NotSubscribedToCloudError()
|
||||
|
||||
return pillar_user_id
|
||||
|
||||
|
||||
async def refresh_pillar_credentials(required_roles: set):
|
||||
"""Refreshes the authentication token on Pillar.
|
||||
|
||||
:raises blender_id.BlenderIdCommError: when Blender ID refuses to send a token to Pillar.
|
||||
@@ -193,11 +232,15 @@ async def refresh_pillar_credentials():
|
||||
|
||||
# Create a subclient token and send it to Pillar.
|
||||
# May raise a blender_id.BlenderIdCommError
|
||||
try:
|
||||
blender_id.create_subclient_token(SUBCLIENT_ID, pillar_endpoint)
|
||||
except blender_id.communication.BlenderIdCommError as ex:
|
||||
log.warning("Unable to create authentication token: %s", ex)
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
# Test the new URL
|
||||
_pillar_api = None
|
||||
await get_project_uuid('textures') # Any query will do.
|
||||
return await check_pillar_credentials(required_roles)
|
||||
|
||||
|
||||
async def get_project_uuid(project_url: str) -> str:
|
||||
@@ -217,7 +260,7 @@ async def get_project_uuid(project_url: str) -> str:
|
||||
|
||||
|
||||
async def get_nodes(project_uuid: str = None, parent_node_uuid: str = None,
|
||||
node_type: str = None) -> list:
|
||||
node_type=None) -> list:
|
||||
"""Gets nodes for either a project or given a parent node.
|
||||
|
||||
@param project_uuid: the UUID of the project, or None if only querying by parent_node_uuid.
|
||||
@@ -242,7 +285,10 @@ async def get_nodes(project_uuid: str = None, parent_node_uuid: str = None,
|
||||
where['project'] = project_uuid
|
||||
|
||||
if node_type:
|
||||
if isinstance(node_type, str):
|
||||
where['node_type'] = node_type
|
||||
else:
|
||||
where['node_type'] = {'$in': node_type}
|
||||
|
||||
children = await pillar_call(pillarsdk.Node.all, {
|
||||
'projection': {'name': 1, 'parent': 1, 'node_type': 1,
|
||||
@@ -250,12 +296,24 @@ async def get_nodes(project_uuid: str = None, parent_node_uuid: str = None,
|
||||
'properties.files': 1,
|
||||
'properties.content_type': 1, 'picture': 1},
|
||||
'where': where,
|
||||
'sort': 'properties.order',
|
||||
'embed': ['parent']})
|
||||
|
||||
return children['_items']
|
||||
|
||||
|
||||
async def get_texture_projects() -> list:
|
||||
"""Returns project dicts that contain textures."""
|
||||
|
||||
try:
|
||||
children = await pillar_call(pillarsdk.Project.all_from_endpoint,
|
||||
'/bcloud/texture-libraries')
|
||||
except pillarsdk.ResourceNotFound as ex:
|
||||
log.warning('Unable to find texture projects: %s', ex)
|
||||
raise PillarError('Unable to find texture projects: %s' % ex)
|
||||
|
||||
return children['_items']
|
||||
|
||||
|
||||
async def download_to_file(url, filename, *,
|
||||
header_store: str,
|
||||
chunk_size=100 * 1024,
|
||||
@@ -367,7 +425,7 @@ async def fetch_thumbnail_info(file: pillarsdk.File, directory: str, desired_siz
|
||||
finished.
|
||||
"""
|
||||
|
||||
thumb_link = await pillar_call(file.thumbnail_file, desired_size)
|
||||
thumb_link = await pillar_call(file.thumbnail, desired_size)
|
||||
|
||||
if thumb_link is None:
|
||||
raise ValueError("File {} has no thumbnail of size {}"
|
||||
@@ -439,8 +497,22 @@ async def download_texture_thumbnail(texture_node, desired_size: str,
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Find the File that belongs to this texture node
|
||||
pic_uuid = texture_node['picture']
|
||||
# Find out which file to use for the thumbnail picture.
|
||||
pic_uuid = texture_node.picture
|
||||
if not pic_uuid:
|
||||
# Fall back to the first texture file, if it exists.
|
||||
log.debug('Node %r does not have a picture, falling back to first file.',
|
||||
texture_node['_id'])
|
||||
files = texture_node.properties and texture_node.properties.files
|
||||
if not files:
|
||||
log.info('Node %r does not have a picture nor files, skipping.', texture_node['_id'])
|
||||
return
|
||||
pic_uuid = files[0].file
|
||||
if not pic_uuid:
|
||||
log.info('Node %r does not have a picture nor files, skipping.', texture_node['_id'])
|
||||
return
|
||||
|
||||
# Load the File that belongs to this texture node's picture.
|
||||
loop.call_soon_threadsafe(thumbnail_loading, texture_node, texture_node)
|
||||
file_desc = await pillar_call(pillarsdk.File.find, pic_uuid, params={
|
||||
'projection': {'filename': 1, 'variations': 1, 'width': 1, 'height': 1},
|
||||
@@ -477,8 +549,9 @@ async def download_file_by_uuid(file_uuid,
|
||||
metadata_directory: str,
|
||||
*,
|
||||
map_type: str = None,
|
||||
file_loading: callable,
|
||||
file_loaded: callable,
|
||||
file_loading: callable = None,
|
||||
file_loaded: callable = None,
|
||||
file_loaded_sync: callable = None,
|
||||
future: asyncio.Future):
|
||||
if is_cancelled(future):
|
||||
log.debug('download_file_by_uuid(%r) cancelled.', file_uuid)
|
||||
@@ -495,10 +568,16 @@ async def download_file_by_uuid(file_uuid,
|
||||
metadata_file = os.path.join(metadata_directory, 'files', '%s.json' % file_uuid)
|
||||
save_as_json(file_desc, metadata_file)
|
||||
|
||||
file_path = os.path.join(target_directory,
|
||||
sanitize_filename('%s-%s' % (map_type, file_desc['filename'])))
|
||||
root, ext = os.path.splitext(file_desc['filename'])
|
||||
if map_type is None or root.endswith(map_type):
|
||||
target_filename = '%s%s' % (root, ext)
|
||||
else:
|
||||
target_filename = '%s-%s%s' % (root, map_type, ext)
|
||||
|
||||
file_path = os.path.join(target_directory, sanitize_filename(target_filename))
|
||||
file_url = file_desc['link']
|
||||
# log.debug('Texture %r:\n%s', file_uuid, pprint.pformat(file_desc.to_dict()))
|
||||
if file_loading is not None:
|
||||
loop.call_soon_threadsafe(file_loading, file_path, file_desc)
|
||||
|
||||
# Cached headers are stored in the project space
|
||||
@@ -507,7 +586,10 @@ async def download_file_by_uuid(file_uuid,
|
||||
|
||||
await download_to_file(file_url, file_path, header_store=header_store, future=future)
|
||||
|
||||
if file_loaded is not None:
|
||||
loop.call_soon_threadsafe(file_loaded, file_path, file_desc)
|
||||
if file_loaded_sync is not None:
|
||||
await file_loaded_sync(file_path, file_desc)
|
||||
|
||||
|
||||
async def download_texture(texture_node,
|
||||
@@ -533,7 +615,88 @@ async def download_texture(texture_node,
|
||||
return await asyncio.gather(*downloaders, return_exceptions=True)
|
||||
|
||||
|
||||
async def upload_file(project_id: str, file_path: pathlib.Path, *,
|
||||
future: asyncio.Future) -> str:
|
||||
"""Uploads a file to the Blender Cloud, returning a file document ID."""
|
||||
|
||||
from .blender import PILLAR_SERVER_URL
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
url = urllib.parse.urljoin(PILLAR_SERVER_URL, '/storage/stream/%s' % project_id)
|
||||
|
||||
# Upload the file in a different thread.
|
||||
def upload():
|
||||
auth_token = blender_id_subclient()['token']
|
||||
|
||||
with file_path.open(mode='rb') as infile:
|
||||
return uncached_session.post(url,
|
||||
files={'file': infile},
|
||||
auth=(auth_token, SUBCLIENT_ID))
|
||||
|
||||
# Check for cancellation even before we start our POST request
|
||||
if is_cancelled(future):
|
||||
log.debug('Uploading was cancelled before doing the POST')
|
||||
raise asyncio.CancelledError('Uploading was cancelled')
|
||||
|
||||
log.debug('Performing POST %s', url)
|
||||
response = await loop.run_in_executor(None, upload)
|
||||
log.debug('Status %i from POST %s', response.status_code, url)
|
||||
response.raise_for_status()
|
||||
|
||||
resp = response.json()
|
||||
log.debug('Upload response: %s', resp)
|
||||
|
||||
try:
|
||||
file_id = resp['file_id']
|
||||
except KeyError:
|
||||
log.error('No file ID in upload response: %s', resp)
|
||||
raise PillarError('No file ID in upload response: %s' % resp)
|
||||
|
||||
log.info('Uploaded %s to file ID %s', file_path, file_id)
|
||||
return file_id
|
||||
|
||||
|
||||
def is_cancelled(future: asyncio.Future) -> bool:
|
||||
# assert future is not None # for debugging purposes.
|
||||
cancelled = future is not None and future.cancelled()
|
||||
return cancelled
|
||||
|
||||
|
||||
class PillarOperatorMixin:
|
||||
async def check_credentials(self, context, required_roles) -> bool:
|
||||
"""Checks credentials with Pillar, and if ok returns the user ID.
|
||||
|
||||
Returns None if the user cannot be found, or if the user is not a Cloud subscriber.
|
||||
"""
|
||||
|
||||
# self.report({'INFO'}, 'Checking Blender Cloud credentials')
|
||||
|
||||
try:
|
||||
user_id = await check_pillar_credentials(required_roles)
|
||||
except NotSubscribedToCloudError:
|
||||
self._log_subscription_needed()
|
||||
raise
|
||||
except CredentialsNotSyncedError:
|
||||
self.log.info('Credentials not synced, re-syncing automatically.')
|
||||
else:
|
||||
self.log.info('Credentials okay.')
|
||||
return user_id
|
||||
|
||||
try:
|
||||
user_id = await refresh_pillar_credentials(required_roles)
|
||||
except NotSubscribedToCloudError:
|
||||
self._log_subscription_needed()
|
||||
raise
|
||||
except UserNotLoggedInError:
|
||||
self.log.error('User not logged in on Blender ID.')
|
||||
else:
|
||||
self.log.info('Credentials refreshed and ok.')
|
||||
return user_id
|
||||
|
||||
return None
|
||||
|
||||
def _log_subscription_needed(self):
|
||||
self.log.warning(
|
||||
'Please subscribe to the blender cloud at https://cloud.blender.org/join')
|
||||
self.report({'INFO'},
|
||||
'Please subscribe to the blender cloud at https://cloud.blender.org/join')
|
||||
|
598
blender_cloud/settings_sync.py
Normal file
598
blender_cloud/settings_sync.py
Normal file
@@ -0,0 +1,598 @@
|
||||
"""Synchronises settings & startup file with the Cloud.
|
||||
Caching is disabled on many PillarSDK calls, as synchronisation can happen
|
||||
rapidly between multiple machines. This means that information can be outdated
|
||||
in seconds, rather than the minutes the cache system assumes.
|
||||
"""
|
||||
import functools
|
||||
import logging
|
||||
import pathlib
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
import bpy
|
||||
|
||||
import asyncio
|
||||
|
||||
import pillarsdk
|
||||
from pillarsdk import exceptions as sdk_exceptions
|
||||
from .pillar import pillar_call
|
||||
from . import async_loop, pillar, cache, blendfile
|
||||
|
||||
SETTINGS_FILES_TO_UPLOAD = ['userpref.blend', 'startup.blend']
|
||||
|
||||
# These are RNA keys inside the userpref.blend file, and their
|
||||
# Python properties names. These settings will not be synced.
|
||||
LOCAL_SETTINGS_RNA = [
|
||||
(b'dpi', 'system.dpi'),
|
||||
(b'virtual_pixel', 'system.virtual_pixel_mode'),
|
||||
(b'compute_device_id', 'system.compute_device'),
|
||||
(b'compute_device_type', 'system.compute_device_type'),
|
||||
(b'fontdir', 'filepaths.font_directory'),
|
||||
(b'textudir', 'filepaths.texture_directory'),
|
||||
(b'renderdir', 'filepaths.render_output_directory'),
|
||||
(b'pythondir', 'filepaths.script_directory'),
|
||||
(b'sounddir', 'filepaths.sound_directory'),
|
||||
(b'tempdir', 'filepaths.temporary_directory'),
|
||||
(b'render_cachedir', 'filepaths.render_cache_directory'),
|
||||
(b'i18ndir', 'filepaths.i18n_branches_directory'),
|
||||
(b'image_editor', 'filepaths.image_editor'),
|
||||
(b'anim_player', 'filepaths.animation_player'),
|
||||
]
|
||||
|
||||
REQUIRES_ROLES_FOR_SYNC = set() # no roles needed.
|
||||
HOME_PROJECT_ENDPOINT = '/bcloud/home-project'
|
||||
SYNC_GROUP_NODE_NAME = 'Blender Sync'
|
||||
SYNC_GROUP_NODE_DESC = 'The [Blender Cloud Addon](https://cloud.blender.org/services' \
|
||||
'#blender-addon) will synchronize your Blender settings here.'
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def set_blender_sync_status(set_status: str):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.status = set_status
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
bss.status = 'IDLE'
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def async_set_blender_sync_status(set_status: str):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.status = set_status
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
finally:
|
||||
bss.status = 'IDLE'
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
async def get_home_project(params=None) -> pillarsdk.Project:
|
||||
"""Returns the home project."""
|
||||
|
||||
log.debug('Getting home project')
|
||||
try:
|
||||
return await pillar_call(pillarsdk.Project.find_from_endpoint,
|
||||
HOME_PROJECT_ENDPOINT, params=params)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
log.warning('Access to the home project was denied. '
|
||||
'Double-check that you are logged in with valid BlenderID credentials.')
|
||||
raise
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
log.warning('No home project available.')
|
||||
raise
|
||||
|
||||
|
||||
async def get_home_project_id():
|
||||
home_proj = await get_home_project({'projection': {'_id': 1}})
|
||||
home_proj_id = home_proj['_id']
|
||||
return home_proj_id
|
||||
|
||||
|
||||
async def find_sync_group_id(home_project_id: str,
|
||||
user_id: str,
|
||||
blender_version: str,
|
||||
*,
|
||||
may_create=True) -> str:
|
||||
"""Finds the group node in which to store sync assets.
|
||||
|
||||
If the group node doesn't exist and may_create=True, it creates it.
|
||||
"""
|
||||
|
||||
# Find the top-level sync group node. This should have been
|
||||
# created by Pillar while creating the home project.
|
||||
try:
|
||||
sync_group, created = await find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': SYNC_GROUP_NODE_NAME,
|
||||
'user': user_id},
|
||||
projection={'_id': 1},
|
||||
may_create=False)
|
||||
except pillar.PillarError:
|
||||
raise pillar.PillarError('Unable to find sync folder on the Cloud')
|
||||
|
||||
if not may_create and sync_group is None:
|
||||
log.info("Sync folder doesn't exist, and not creating it either.")
|
||||
return None, None
|
||||
|
||||
# Find/create the sub-group for the requested Blender version
|
||||
try:
|
||||
sub_sync_group, created = await find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': sync_group['_id'],
|
||||
'name': blender_version,
|
||||
'user': user_id},
|
||||
additional_create_props={
|
||||
'description': 'Sync folder for Blender %s' % blender_version,
|
||||
'properties': {'status': 'published'},
|
||||
},
|
||||
projection={'_id': 1},
|
||||
may_create=may_create)
|
||||
except pillar.PillarError:
|
||||
raise pillar.PillarError('Unable to create sync folder on the Cloud')
|
||||
|
||||
if not may_create and sub_sync_group is None:
|
||||
log.info("Sync folder for Blender version %s doesn't exist, "
|
||||
"and not creating it either.", blender_version)
|
||||
return sync_group['_id'], None
|
||||
|
||||
return sync_group['_id'], sub_sync_group['_id']
|
||||
|
||||
|
||||
async def find_or_create_node(where: dict,
|
||||
additional_create_props: dict = None,
|
||||
projection: dict = None,
|
||||
may_create: bool = True) -> (pillarsdk.Node, bool):
|
||||
"""Finds a node by the `filter_props`, creates it using the additional props.
|
||||
|
||||
:returns: tuple (node, created), where 'created' is a bool indicating whether
|
||||
a new node was created, or an exising one is returned.
|
||||
"""
|
||||
|
||||
params = {
|
||||
'where': where,
|
||||
}
|
||||
if projection:
|
||||
params['projection'] = projection
|
||||
|
||||
found_node = await pillar_call(pillarsdk.Node.find_first, params, caching=False)
|
||||
|
||||
created = False
|
||||
if found_node is None:
|
||||
if not may_create:
|
||||
return None, False
|
||||
|
||||
log.info('Creating new sync group node')
|
||||
|
||||
# Augment the node properties to form a complete node.
|
||||
node_props = where.copy()
|
||||
if additional_create_props:
|
||||
node_props.update(additional_create_props)
|
||||
|
||||
found_node = pillarsdk.Node.new(node_props)
|
||||
created_ok = await pillar_call(found_node.create)
|
||||
if not created_ok:
|
||||
log.error('Blender Cloud addon: unable to create node on the Cloud.')
|
||||
raise pillar.PillarError('Unable to create node on the Cloud')
|
||||
created = True
|
||||
|
||||
return found_node, created
|
||||
|
||||
|
||||
async def attach_file_to_group(file_path: pathlib.Path,
|
||||
home_project_id: str,
|
||||
group_node_id: str,
|
||||
user_id: str,
|
||||
*,
|
||||
future=None) -> pillarsdk.Node:
|
||||
"""Creates an Asset node and attaches a file document to it."""
|
||||
|
||||
# First upload the file...
|
||||
file_id = await pillar.upload_file(home_project_id, file_path,
|
||||
future=future)
|
||||
|
||||
# Then attach it to a node.
|
||||
node, created = await find_or_create_node(
|
||||
where={
|
||||
'project': home_project_id,
|
||||
'node_type': 'asset',
|
||||
'parent': group_node_id,
|
||||
'name': file_path.name,
|
||||
'user': user_id},
|
||||
additional_create_props={
|
||||
'properties': {'file': file_id},
|
||||
})
|
||||
|
||||
if not created:
|
||||
# Update the existing node.
|
||||
node.properties = {'file': file_id}
|
||||
updated_ok = await pillar_call(node.update)
|
||||
if not updated_ok:
|
||||
log.error(
|
||||
'Blender Cloud addon: unable to update asset node on the Cloud for file %s.',
|
||||
file_path)
|
||||
raise pillar.PillarError(
|
||||
'Unable to update asset node on the Cloud for file %s' % file_path.name)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
async def available_blender_versions(home_project_id: str, user_id: str) -> list:
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
|
||||
# Get the available Blender versions.
|
||||
sync_group = await pillar_call(
|
||||
pillarsdk.Node.find_first,
|
||||
params={
|
||||
'where': {'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': SYNC_GROUP_NODE_NAME,
|
||||
'user': user_id},
|
||||
'projection': {'_id': 1},
|
||||
},
|
||||
caching=False)
|
||||
|
||||
if sync_group is None:
|
||||
bss.report({'ERROR'}, 'No synced Blender settings in your Blender Cloud')
|
||||
log.debug('-- unable to find sync group for home_project_id=%r and user_id=%r',
|
||||
home_project_id, user_id)
|
||||
return []
|
||||
|
||||
sync_nodes = await pillar_call(
|
||||
pillarsdk.Node.all,
|
||||
params={
|
||||
'where': {'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': sync_group['_id'],
|
||||
'user': user_id},
|
||||
'projection': {'_id': 1, 'name': 1},
|
||||
'sort': '-name',
|
||||
},
|
||||
caching=False)
|
||||
|
||||
if not sync_nodes or not sync_nodes._items:
|
||||
bss.report({'ERROR'}, 'No synced Blender settings in your Blender Cloud.')
|
||||
return []
|
||||
|
||||
versions = [node.name for node in sync_nodes._items]
|
||||
log.debug('Versions: %s', versions)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class PILLAR_OT_sync(pillar.PillarOperatorMixin,
|
||||
async_loop.AsyncModalOperatorMixin,
|
||||
bpy.types.Operator):
|
||||
bl_idname = 'pillar.sync'
|
||||
bl_label = 'Synchronise with Blender Cloud'
|
||||
bl_description = 'Synchronises Blender settings with Blender Cloud'
|
||||
|
||||
log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
home_project_id = None
|
||||
sync_group_id = None # top-level sync group node ID
|
||||
sync_group_versioned_id = None # sync group node ID for the given Blender version.
|
||||
|
||||
action = bpy.props.EnumProperty(
|
||||
items=[
|
||||
('PUSH', 'Push', 'Push settings to the Blender Cloud'),
|
||||
('PULL', 'Pull', 'Pull settings from the Blender Cloud'),
|
||||
('REFRESH', 'Refresh', 'Refresh available versions'),
|
||||
('SELECT', 'Select', 'Select version to sync'),
|
||||
],
|
||||
name='action')
|
||||
|
||||
CURRENT_BLENDER_VERSION = '%i.%i' % bpy.app.version[:2]
|
||||
blender_version = bpy.props.StringProperty(name='blender_version',
|
||||
description='Blender version to sync for',
|
||||
default=CURRENT_BLENDER_VERSION)
|
||||
|
||||
def bss_report(self, level, message):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.report(level, message)
|
||||
|
||||
def invoke(self, context, event):
|
||||
if self.action == 'SELECT':
|
||||
# Synchronous action
|
||||
return self.action_select(context)
|
||||
|
||||
if self.action in {'PUSH', 'PULL'} and not self.blender_version:
|
||||
self.bss_report({'ERROR'}, 'No Blender version to sync for was given.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
async_loop.AsyncModalOperatorMixin.invoke(self, context, event)
|
||||
|
||||
self.log.info('Starting synchronisation')
|
||||
self._new_async_task(self.async_execute(context))
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def action_select(self, context):
|
||||
"""Allows selection of the Blender version to use.
|
||||
|
||||
This is a synchronous action, as it requires a dialog box.
|
||||
"""
|
||||
|
||||
self.log.info('Performing action SELECT')
|
||||
|
||||
# Do a refresh before we can show the dropdown.
|
||||
fut = asyncio.ensure_future(self.async_execute(context, action_override='REFRESH'))
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(fut)
|
||||
|
||||
self._state = 'SELECTING'
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
def draw(self, context):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
self.layout.prop(bss, 'version', text='Blender version')
|
||||
|
||||
def execute(self, context):
|
||||
if self.action != 'SELECT':
|
||||
log.debug('Ignoring execute() for action %r', self.action)
|
||||
return {'FINISHED'}
|
||||
|
||||
log.debug('Performing execute() for action %r', self.action)
|
||||
# Perform the sync when the user closes the dialog box.
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bpy.ops.pillar.sync('INVOKE_DEFAULT',
|
||||
action='PULL',
|
||||
blender_version=bss.version)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@async_set_blender_sync_status('SYNCING')
|
||||
async def async_execute(self, context, *, action_override=None):
|
||||
"""Entry point of the asynchronous operator."""
|
||||
|
||||
action = action_override or self.action
|
||||
self.bss_report({'INFO'}, 'Communicating with Blender Cloud')
|
||||
self.log.info('Performing action %s', action)
|
||||
|
||||
try:
|
||||
# Refresh credentials
|
||||
try:
|
||||
self.user_id = await self.check_credentials(context, REQUIRES_ROLES_FOR_SYNC)
|
||||
log.debug('Found user ID: %s', self.user_id)
|
||||
except pillar.NotSubscribedToCloudError:
|
||||
self.log.exception('User not subscribed to cloud.')
|
||||
self.bss_report({'SUBSCRIBE'}, 'Please subscribe to the Blender Cloud.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
except pillar.CredentialsNotSyncedError:
|
||||
self.log.exception('Error checking/refreshing credentials.')
|
||||
self.bss_report({'ERROR'}, 'Please log in on Blender ID first.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
# Find the home project.
|
||||
try:
|
||||
self.home_project_id = await get_home_project_id()
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Forbidden access to home project.')
|
||||
self.bss_report({'ERROR'}, 'Did not get access to home project.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
self.bss_report({'ERROR'}, 'Home project not found.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
# Only create the folder structure if we're pushing.
|
||||
may_create = self.action == 'PUSH'
|
||||
try:
|
||||
gid, subgid = await find_sync_group_id(self.home_project_id,
|
||||
self.user_id,
|
||||
self.blender_version,
|
||||
may_create=may_create)
|
||||
self.sync_group_id = gid
|
||||
self.sync_group_versioned_id = subgid
|
||||
self.log.debug('Found top-level group node ID: %s', self.sync_group_id)
|
||||
self.log.debug('Found group node ID for %s: %s',
|
||||
self.blender_version, self.sync_group_versioned_id)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Unable to find Group ID')
|
||||
self.bss_report({'ERROR'}, 'Unable to find sync folder.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
# Perform the requested action.
|
||||
action_method = {
|
||||
'PUSH': self.action_push,
|
||||
'PULL': self.action_pull,
|
||||
'REFRESH': self.action_refresh,
|
||||
}[action]
|
||||
await action_method(context)
|
||||
except Exception as ex:
|
||||
self.log.exception('Unexpected exception caught.')
|
||||
self.bss_report({'ERROR'}, 'Unexpected error: %s' % ex)
|
||||
|
||||
self._state = 'QUIT'
|
||||
|
||||
async def action_push(self, context):
|
||||
"""Sends files to the Pillar server."""
|
||||
|
||||
self.log.info('Saved user preferences to disk before pushing to cloud.')
|
||||
bpy.ops.wm.save_userpref()
|
||||
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource('CONFIG'))
|
||||
|
||||
for fname in SETTINGS_FILES_TO_UPLOAD:
|
||||
path = config_dir / fname
|
||||
if not path.exists():
|
||||
self.log.debug('Skipping non-existing %s', path)
|
||||
continue
|
||||
|
||||
self.bss_report({'INFO'}, 'Uploading %s' % fname)
|
||||
await attach_file_to_group(path,
|
||||
self.home_project_id,
|
||||
self.sync_group_versioned_id,
|
||||
self.user_id,
|
||||
future=self.signalling_future)
|
||||
|
||||
await self.action_refresh(context)
|
||||
|
||||
# After pushing, change the 'pull' version to the current version of Blender.
|
||||
# Or to the latest version, if by some mistake somewhere the current push
|
||||
# isn't available after all.
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
if self.CURRENT_BLENDER_VERSION in bss.available_blender_versions:
|
||||
bss.version = self.CURRENT_BLENDER_VERSION
|
||||
else:
|
||||
bss.version = max(bss.available_blender_versions)
|
||||
|
||||
self.bss_report({'INFO'}, 'Settings pushed to Blender Cloud.')
|
||||
|
||||
async def action_pull(self, context):
|
||||
"""Loads files from the Pillar server."""
|
||||
|
||||
# If the sync group node doesn't exist, offer a list of groups that do.
|
||||
if self.sync_group_id is None:
|
||||
self.bss_report({'ERROR'},
|
||||
'There are no synced Blender settings in your Blender Cloud.')
|
||||
return
|
||||
|
||||
if self.sync_group_versioned_id is None:
|
||||
self.bss_report({'ERROR'}, 'Therre are no synced Blender settings for version %s' %
|
||||
self.blender_version)
|
||||
return
|
||||
|
||||
self.bss_report({'INFO'}, 'Pulling settings from Blender Cloud')
|
||||
with tempfile.TemporaryDirectory(prefix='bcloud-sync') as tempdir:
|
||||
for fname in SETTINGS_FILES_TO_UPLOAD:
|
||||
await self.download_settings_file(fname, tempdir)
|
||||
|
||||
self.bss_report({'WARNING'}, 'Settings pulled from Cloud, restart Blender to load them.')
|
||||
|
||||
async def action_refresh(self, context):
|
||||
self.bss_report({'INFO'}, 'Refreshing available Blender versions.')
|
||||
|
||||
# Clear the LRU cache of available_blender_versions so that we can
|
||||
# obtain new versions (if someone synced from somewhere else, for example)
|
||||
available_blender_versions.cache_clear()
|
||||
|
||||
versions = await available_blender_versions(self.home_project_id, self.user_id)
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.available_blender_versions = versions
|
||||
|
||||
if versions:
|
||||
# There are versions to sync, so we can remove the status message.
|
||||
# However, if there aren't any, the status message shows why, and
|
||||
# shouldn't be erased.
|
||||
self.bss_report({'INFO'}, '')
|
||||
|
||||
async def download_settings_file(self, fname: str, temp_dir: str):
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource('CONFIG'))
|
||||
meta_path = cache.cache_directory('home-project', 'blender-sync')
|
||||
|
||||
self.bss_report({'INFO'}, 'Downloading %s from Cloud' % fname)
|
||||
|
||||
# Get the asset node
|
||||
node_props = {'project': self.home_project_id,
|
||||
'node_type': 'asset',
|
||||
'parent': self.sync_group_versioned_id,
|
||||
'name': fname}
|
||||
node = await pillar_call(pillarsdk.Node.find_first, {
|
||||
'where': node_props,
|
||||
'projection': {'_id': 1, 'properties.file': 1}
|
||||
}, caching=False)
|
||||
if node is None:
|
||||
self.bss_report({'INFO'}, 'Unable to find %s on Blender Cloud' % fname)
|
||||
self.log.info('Unable to find node on Blender Cloud for %s', fname)
|
||||
return
|
||||
|
||||
async def file_downloaded(file_path: str, file_desc: pillarsdk.File):
|
||||
# Allow the caller to adjust the file before we move it into place.
|
||||
|
||||
if fname.lower() == 'userpref.blend':
|
||||
await self.update_userpref_blend(file_path)
|
||||
|
||||
# Move the file next to the final location; as it may be on a
|
||||
# different filesystem than the temporary directory, this can
|
||||
# fail, and we don't want to destroy the existing file.
|
||||
local_temp = config_dir / (fname + '~')
|
||||
local_final = config_dir / fname
|
||||
|
||||
# Make a backup copy of the file as it was before pulling.
|
||||
if local_final.exists():
|
||||
local_bak = config_dir / (fname + '-pre-bcloud-pull')
|
||||
self.move_file(local_final, local_bak)
|
||||
|
||||
self.move_file(file_path, local_temp)
|
||||
self.move_file(local_temp, local_final)
|
||||
|
||||
file_id = node.properties.file
|
||||
await pillar.download_file_by_uuid(file_id,
|
||||
temp_dir,
|
||||
str(meta_path),
|
||||
file_loaded_sync=file_downloaded,
|
||||
future=self.signalling_future)
|
||||
|
||||
def move_file(self, src, dst):
|
||||
self.log.info('Moving %s to %s', src, dst)
|
||||
shutil.move(str(src), str(dst))
|
||||
|
||||
async def update_userpref_blend(self, file_path: str):
|
||||
self.log.info('Overriding machine-local settings in %s', file_path)
|
||||
|
||||
# Remember some settings that should not be overwritten from the Cloud.
|
||||
up = bpy.context.user_preferences
|
||||
remembered = {}
|
||||
for rna_key, python_key in LOCAL_SETTINGS_RNA:
|
||||
assert '.' in python_key, 'Sorry, this code assumes there is a dot in the Python key'
|
||||
|
||||
try:
|
||||
value = up.path_resolve(python_key)
|
||||
except ValueError:
|
||||
# Setting doesn't exist. This can happen, for example Cycles
|
||||
# settings on a build that doesn't have Cycles enabled.
|
||||
continue
|
||||
|
||||
# Map enums from strings (in Python) to ints (in DNA).
|
||||
dot_index = python_key.rindex('.')
|
||||
parent_key, prop_key = python_key[:dot_index], python_key[dot_index + 1:]
|
||||
parent = up.path_resolve(parent_key)
|
||||
prop = parent.bl_rna.properties[prop_key]
|
||||
if prop.type == 'ENUM':
|
||||
log.debug('Rewriting %s from %r to %r',
|
||||
python_key, value, prop.enum_items[value].value)
|
||||
value = prop.enum_items[value].value
|
||||
else:
|
||||
log.debug('Keeping value of %s: %r', python_key, value)
|
||||
|
||||
remembered[rna_key] = value
|
||||
log.debug('Overriding values: %s', remembered)
|
||||
|
||||
# Rewrite the userprefs.blend file to override the options.
|
||||
with blendfile.open_blend(file_path, 'rb+') as blend:
|
||||
prefs = next(block for block in blend.blocks
|
||||
if block.code == b'USER')
|
||||
|
||||
for key, value in remembered.items():
|
||||
self.log.debug('prefs[%r] = %r' % (key, prefs[key]))
|
||||
self.log.debug(' -> setting prefs[%r] = %r' % (key, value))
|
||||
prefs[key] = value
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(PILLAR_OT_sync)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(PILLAR_OT_sync)
|
@@ -18,8 +18,9 @@ def load_wheel(module_name, fname_prefix):
|
||||
|
||||
try:
|
||||
module = __import__(module_name)
|
||||
except ImportError:
|
||||
pass
|
||||
except ImportError as ex:
|
||||
log.debug('Unable to import %s directly, will try wheel: %s',
|
||||
module_name, ex)
|
||||
else:
|
||||
log.debug('Was able to load %s from %s, no need to load wheel %s',
|
||||
module_name, module.__file__, fname_prefix)
|
||||
@@ -30,7 +31,9 @@ def load_wheel(module_name, fname_prefix):
|
||||
if not wheels:
|
||||
raise RuntimeError('Unable to find wheel at %r' % path_pattern)
|
||||
|
||||
sys.path.append(wheels[0])
|
||||
# If there are multiple wheels that match, load the latest one.
|
||||
wheels.sort()
|
||||
sys.path.append(wheels[-1])
|
||||
module = __import__(module_name)
|
||||
log.debug('Loaded %s from %s', module_name, module.__file__)
|
||||
|
||||
|
8
clear_wheels.sh
Executable file
8
clear_wheels.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
git clean -n -d -X blender_cloud/wheels/
|
||||
|
||||
echo "Press [ENTER] to actually delete those files."
|
||||
read dummy
|
||||
|
||||
git clean -f -d -X blender_cloud/wheels/
|
@@ -1,7 +1,7 @@
|
||||
# Primary requirements:
|
||||
CacheControl==0.11.6
|
||||
-e git+https://github.com/sybrenstuvel/cachecontrol.git@sybren-filecache-delete-crash-fix#egg=CacheControl
|
||||
lockfile==0.12.2
|
||||
pillarsdk==1.0.0
|
||||
pillarsdk==1.3.0
|
||||
wheel==0.29.0
|
||||
|
||||
# Secondary requirements:
|
||||
|
14
setup.py
14
setup.py
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import glob
|
||||
import sys
|
||||
import shutil
|
||||
@@ -14,6 +15,7 @@ from distutils.command.install_egg_info import install_egg_info
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
requirement_re = re.compile('[><=]+')
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
|
||||
def set_default_path(var, default):
|
||||
@@ -83,9 +85,13 @@ class BuildWheels(Command):
|
||||
# Build CacheControl.
|
||||
if not list(self.wheels_path.glob('CacheControl*.whl')):
|
||||
log.info('Building CacheControl in %s', self.cachecontrol_path)
|
||||
# self.git_clone(self.cachecontrol_path,
|
||||
# 'https://github.com/ionrock/cachecontrol.git',
|
||||
# 'v%s' % requirements['CacheControl'][1])
|
||||
# FIXME: we need my clone until pull request #125 has been merged & released
|
||||
self.git_clone(self.cachecontrol_path,
|
||||
'https://github.com/ionrock/cachecontrol.git',
|
||||
'v%s' % requirements['CacheControl'][1])
|
||||
'https://github.com/sybrenstuvel/cachecontrol.git',
|
||||
'sybren-filecache-delete-crash-fix')
|
||||
self.build_copy_wheel(self.cachecontrol_path)
|
||||
|
||||
# Ensure that the wheels are added to the data files.
|
||||
@@ -173,7 +179,7 @@ setup(
|
||||
'wheels': BuildWheels},
|
||||
name='blender_cloud',
|
||||
description='The Blender Cloud addon allows browsing the Blender Cloud from Blender.',
|
||||
version='1.0.0',
|
||||
version='1.2.2',
|
||||
author='Sybren A. Stüvel',
|
||||
author_email='sybren@stuvel.eu',
|
||||
packages=find_packages('.'),
|
||||
|
Reference in New Issue
Block a user