Compare commits
262 Commits
version-1.
...
master
Author | SHA1 | Date | |
---|---|---|---|
d940735453 | |||
7d71067b3d | |||
b0b804410d | |||
d55f2dcee1 | |||
2fbb5ac788 | |||
b47b407589 | |||
a136366804 | |||
6718e1646f | |||
9d7f9a979e | |||
326a793de0 | |||
88ccb0f376 | |||
5b8895278a | |||
eb37d20039 | |||
4f49e8ca0b | |||
c931700fec | |||
6285826bfc | |||
25150397c0 | |||
c67b161e3d | |||
f76dcb964e | |||
2d868ec724 | |||
666ae0fa90 | |||
49844e17b2 | |||
06432a3534 | |||
3a2e9bc672 | |||
ce331c7b22 | |||
8b5dc65d84 | |||
3bc7dcfa9e | |||
d9fe24ece7 | |||
dd00bc9cb5 | |||
14778e5c08 | |||
8b49c5505e | |||
883f125722 | |||
2fbe7e1258 | |||
405b823c81 | |||
9e952035d3 | |||
d77acfb9c8 | |||
70de9741df | |||
cc37e73bc6 | |||
e32e75e3db | |||
6fa5ab5481 | |||
379580de86 | |||
db30b3df76 | |||
5de99baaef | |||
2184b39d27 | |||
23b1f7de7d | |||
28f68c6fbf | |||
b00cb233cc | |||
2142e9e7fc | |||
1dea802932 | |||
077bd1abdb | |||
5a2c528681 | |||
53b12376d1 | |||
8495868ea6 | |||
cf810de41b | |||
c457767edf | |||
985b3f6a7d | |||
a45bf3cd5c | |||
3789742cc8 | |||
58f374e175 | |||
99e90e1008 | |||
dd83d3ee60 | |||
e74e014c66 | |||
01541f181e | |||
a69f4d3fd9 | |||
3ffea46f23 | |||
94c5811e42 | |||
676ad1ed14 | |||
79e6fa37f4 | |||
e06fa3ea75 | |||
fb6352dc7d | |||
97ad8bf5ba | |||
b0f7719add | |||
dada275e32 | |||
6bce1ccf90 | |||
bbe524c099 | |||
462da038ec | |||
8d7799655e | |||
cb0393868e | |||
5a61a7a6c4 | |||
60d1fbff50 | |||
352fe239f2 | |||
09c1bf67b4 | |||
23235afe71 | |||
ff9624d4f3 | |||
48c60f73d7 | |||
12eaaa5bae | |||
f7396350db | |||
cc97288018 | |||
26105add9c | |||
ea81cc5769 | |||
25b6053836 | |||
65a05403dc | |||
770b0121fa | |||
2b155eac45 | |||
d36959e91b | |||
9028c38c68 | |||
f04f07eaf1 | |||
6c38a432bc | |||
53fa3e628a | |||
924fb45cb2 | |||
b5619757bc | |||
ae41745743 | |||
ffab83f921 | |||
8bef2f48a5 | |||
74b46ff0db | |||
e1934b20d9 | |||
0caf761863 | |||
bc864737ae | |||
f454a99c4b | |||
40732e0487 | |||
b86bffbdbb | |||
67f9d40fd3 | |||
c4de4e9990 | |||
6d2e6efa13 | |||
ff9ae0117d | |||
974d33e3a3 | |||
8de3a0bba2 | |||
6f705b917f | |||
02b694f5d4 | |||
663ebae572 | |||
cb5a116dff | |||
5821611d89 | |||
8bd1faa575 | |||
8899bff5e4 | |||
4fd4ad7448 | |||
4f32b49ad3 | |||
1f13b4d249 | |||
ef57dba5d3 | |||
419249ee19 | |||
113eb8f7ab | |||
85f911cb59 | |||
564c2589b1 | |||
80155ed4f4 | |||
d8c5c4eecd | |||
3972ce4543 | |||
d75a055149 | |||
649542daad | |||
1d99751d20 | |||
69028e0cfd | |||
dc7ad296bf | |||
3f2479067c | |||
6fefe4ffd8 | |||
62c1c966f6 | |||
57aadc1817 | |||
7204d4a24c | |||
641b51496a | |||
0562d57513 | |||
ac19e48895 | |||
73d96e5c89 | |||
4bfdac223a | |||
5d6777c74b | |||
f4322f1d1f | |||
13a8595cc0 | |||
af413059b0 | |||
4d26ad248e | |||
d019fd0cf0 | |||
fb9ffbbc23 | |||
f6d797512a | |||
8367abeeb9 | |||
2f5f82b1a8 | |||
a04137ec6a | |||
87c90a7f72 | |||
4de8122920 | |||
21d2257be0 | |||
bc4036573c | |||
87cf1e12fa | |||
b35d7bc5f3 | |||
973dafcc3a | |||
62d16fff35 | |||
ed3de414ef | |||
b0a03c81f5 | |||
99f0764986 | |||
f9c2dda9fa | |||
0a7dea568a | |||
40c31e8be2 | |||
394395a7f5 | |||
f1478bf3d9 | |||
2fce27f8cb | |||
59e6491110 | |||
afca7abe18 | |||
4aae107396 | |||
096a5f5803 | |||
79dc5c91f7 | |||
0a99b9e22e | |||
0452fd845b | |||
c0a8602e17 | |||
10c51b3af5 | |||
0be5c16926 | |||
4158c4eed5 | |||
de4a93de98 | |||
331e9e6ca0 | |||
1d81f4bc38 | |||
5f58f8b6f7 | |||
164f65f30c | |||
b82bc14fcf | |||
9e5dcd0b55 | |||
531ddad8f5 | |||
7e0dd0384d | |||
da0c8dd944 | |||
8065ab88a4 | |||
6baf43e53b | |||
f1fa273370 | |||
bf96638c88 | |||
bc8a985228 | |||
ba14c33b6d | |||
0a7e7195a2 | |||
ecab0f6163 | |||
3c91ccced6 | |||
c9ed6c7d23 | |||
5fa01daf9e | |||
77664fb6d7 | |||
45cffc5365 | |||
fb5433d473 | |||
a17fe45712 | |||
1bfba64bdc | |||
cdb4bf4f4f | |||
15254b8951 | |||
3ed5f2c187 | |||
0be3bf7f49 | |||
f207e14664 | |||
9932003400 | |||
e7035e6f0c | |||
014a36d24e | |||
068451a7aa | |||
56fb1ec3df | |||
e93094cb88 | |||
33718a1a35 | |||
db82dbe730 | |||
8d405330ee | |||
66ddc7b47b | |||
2fa8cb4054 | |||
e7b5c75046 | |||
1d93bd9e5e | |||
ac2d0c033c | |||
61fa63eb1d | |||
7022412889 | |||
b4f71745b0 | |||
1d41fce1ae | |||
e636fde4ce | |||
82a9dc5226 | |||
1f40915ac8 | |||
32693c0f64 | |||
c38748eb05 | |||
ac85bea111 | |||
7b5613ce77 | |||
ec5f317dac | |||
a51f61d9b5 | |||
13bc9a89c8 | |||
996b722813 | |||
e7f2567bfc | |||
ff8e71c542 | |||
543da5c8d8 | |||
01ae0f5f54 | |||
1e80446870 | |||
8d5c97931e | |||
1a0c00b87a | |||
32befc51f8 | |||
06126862d4 | |||
7b8713881e | |||
7c65851b75 | |||
ec72091268 | |||
cf7adb065f |
228
CHANGELOG.md
Normal file
228
CHANGELOG.md
Normal file
@ -0,0 +1,228 @@
|
||||
# Blender Cloud changelog
|
||||
|
||||
|
||||
## Version 1.25 (2022-02-25)
|
||||
|
||||
- Compatibility with Blender 3.1 (Python 3.10).
|
||||
- Bump blender-asset-tracer to version 1.11, for UDIM support.
|
||||
|
||||
|
||||
## Version 1.24 (2022-02-04)
|
||||
|
||||
- Bump blender-asset-tracer version 1.8 → 1.10, for fixing a bug where files were doubly-compressed.
|
||||
|
||||
|
||||
## Version 1.23 (2021-11-09)
|
||||
|
||||
- Bump blender-asset-tracer version 1.7 → 1.8, for compatibility with sending read-only blend files to Flamenco.
|
||||
|
||||
|
||||
## Version 1.22 (2021-11-05)
|
||||
|
||||
- Fix Windows incompatibility when using Shaman URLs as job storage path.
|
||||
- Bump blender-asset-tracer version 1.6 → 1.7, for compatibility with files compressed by Blender 3.0.
|
||||
|
||||
|
||||
## Version 1.21 (2021-07-27)
|
||||
|
||||
- Bump blender-asset-tracer version 1.5.1 → 1.6, for better compatibility with Geometry Nodes.
|
||||
|
||||
## Version 1.20 (2021-07-22)
|
||||
|
||||
- Bump blender-asset-tracer version 1.3.1 -> 1.5.1.
|
||||
- Blender-asset-tracer "Strict Pointer Mode" disabled, to avoid issues with
|
||||
not-entirely-synced library overrides.
|
||||
|
||||
## Version 1.19 (2021-02-23)
|
||||
|
||||
- Another Python 3.9+ compatibility fix.
|
||||
|
||||
## Version 1.18 (2021-02-16)
|
||||
|
||||
- Add compatibility with Python 3.9 (as used in Blender 2.93).
|
||||
- Drop compatibility with Blender 2.79 and older. The last version of the
|
||||
Blender Cloud add-on with 2.79 and older is version 1.17.
|
||||
|
||||
## Version 1.17 (2021-02-04)
|
||||
|
||||
- This is the last version compatible with Blender 2.77a - 2.79.
|
||||
- Upgrade BAT to version 1.3.1, which brings compatibility with Geometry Nodes and
|
||||
fixes some issues on Windows.
|
||||
|
||||
## Version 1.16 (2020-03-03)
|
||||
|
||||
- Fixed Windows compatibility issue with the handling of Shaman URLs.
|
||||
|
||||
## Version 1.15 (2019-12-12)
|
||||
|
||||
- Avoid creating BAT pack when the to-be-rendered file is already inside the job storage
|
||||
directory. This assumes that the paths are already correct for the Flamenco Workers.
|
||||
|
||||
## Version 1.14 (2019-10-10)
|
||||
|
||||
- Upgraded BAT to 1.2 for missing smoke caches, compatibility with Blender 2.81, and some
|
||||
Windows-specific fixes.
|
||||
- Removed warnings on the terminal when running Blender 2.80+
|
||||
|
||||
## Version 1.13 (2019-04-18)
|
||||
|
||||
- Upgraded BAT to 1.1.1 for a compatibility fix with Blender 2.79
|
||||
- Flamenco: Support for Flamenco Manager settings versioning + for settings version 2.
|
||||
When using Blender Cloud Add-on 1.12 or older, Flamenco Server will automatically convert the
|
||||
Manager settings to version 1.
|
||||
- More Blender 2.80 compatibility fixes
|
||||
|
||||
## Version 1.12 (2019-03-25)
|
||||
|
||||
- Flamenco: Change how progressive render tasks are created. Instead of the artist setting a fixed
|
||||
number of sample chunks, they can now set a maximum number of samples for each render task.
|
||||
Initial render tasks are created with a low number of samples, and subsequent tasks have an
|
||||
increasing number of samples, up to the set maximum. The total number of samples of the final
|
||||
render is still equal to the number of samples configured in the blend file.
|
||||
Requires Flamenco Server 2.2 or newer.
|
||||
- Flamenco: Added a hidden "Submit & Quit" button. This button can be enabled in the add-on
|
||||
preferences and and then be available on the Flamenco Render panel. Pressing the button will
|
||||
silently close Blender after the job has been submitted to Flamenco (for example to click,
|
||||
walk away, and free up memory for when the same machine is part of the render farm).
|
||||
- Flamenco: Name render jobs just 'thefile' instead of 'Render thefile.flamenco.blend'.
|
||||
This makes the job overview on Flamenco Server cleaner.
|
||||
- Flamenco: support Shaman servers. See https://www.flamenco.io/docs/user_manual/shaman/
|
||||
for more info.
|
||||
- Flamenco: The 'blender-video-chunks' job type now also allows MP4 and MOV video containers.
|
||||
|
||||
## Version 1.11.1 (2019-01-04)
|
||||
|
||||
- Bundled missing Texture Browser icons.
|
||||
|
||||
## Version 1.11.0 (2019-01-04)
|
||||
|
||||
- Texture Browser now works on Blender 2.8.
|
||||
- Blender Sync: Fixed compatibility issue with Blender 2.8.
|
||||
|
||||
## Version 1.10.0 (2019-01-02)
|
||||
|
||||
- Bundles Blender-Asset-Tracer 0.8.
|
||||
- Fix crashing Blender when running in background mode (e.g. without GUI).
|
||||
- Flamenco: Include extra job parameters to allow for encoding a video at the end of a render
|
||||
job that produced an image sequence.
|
||||
- Flamenco: Compress all blend files, and not just the one we save from Blender.
|
||||
- Flamenco: Store more info in the `jobinfo.json` file. This is mostly useful for debugging issues
|
||||
on the render farm, as now things like the exclusion filter and Manager settings are logged too.
|
||||
- Flamenco: Allow BAT-packing of only those assets that are referred to by relative path (e.g.
|
||||
a path starting with `//`). Assets with an absolute path are ignored, and assumed to be reachable
|
||||
at the same path by the Workers.
|
||||
- Flamenco: Added 'blender-video-chunks' job type, meant for rendering the edit of a film from the
|
||||
VSE. This job type requires that the file is configured for rendering to Matroska video
|
||||
files.
|
||||
|
||||
Audio is only extracted when there is an audio codec configured. This is a bit arbitrary, but it's
|
||||
at least a way to tell whether the artist is considering that there is audio of any relevance in
|
||||
the current blend file.
|
||||
|
||||
## Version 1.9.4 (2018-11-01)
|
||||
|
||||
- Fixed Python 3.6 and Blender 2.79b incompatibilities accidentally introduced in 1.9.3.
|
||||
|
||||
## Version 1.9.3 (2018-10-30)
|
||||
|
||||
- Fix drawing of Attract strips in the VSE on Blender 2.8.
|
||||
|
||||
## Version 1.9.2 (2018-09-17)
|
||||
|
||||
- No changes, just a different filename to force a refresh on our
|
||||
hosting platform.
|
||||
|
||||
## Version 1.9.1 (2018-09-17)
|
||||
|
||||
- Fix issue with Python 3.7, which is used by current daily builds of Blender.
|
||||
|
||||
## Version 1.9 (2018-09-05)
|
||||
|
||||
- Last version to support Blender versions before 2.80!
|
||||
- Replace BAM with BAT🦇.
|
||||
- Don't crash the texture browser when an invalid texture is seen.
|
||||
- Support colour strips as Attract shots.
|
||||
- Flamenco: allow jobs to be created in 'paused' state.
|
||||
- Flamenco: only show Flamenco Managers that are linked to the currently selected project.
|
||||
|
||||
## Version 1.8 (2018-01-03)
|
||||
|
||||
- Distinguish between 'please subscribe' (to get a new subscription) and 'please renew' (to renew an
|
||||
existing subscription).
|
||||
- When re-opening the Texture Browser it now opens in the same folder as where it was when closed.
|
||||
- In the texture browser, draw the components of the texture (i.e. which map types are available),
|
||||
such as 'bump, normal, specular'.
|
||||
- Use Interface Scale setting from user preferences to draw the Texture Browser text.
|
||||
- Store project-specific settings in the preferences, such as filesystem paths, for each project,
|
||||
and restore those settings when the project is selected again. Does not touch settings that
|
||||
haven't been set for the newly selected project. These settings are only saved when a setting
|
||||
is updated, so to save your current settings need to update a single setting; this saves all
|
||||
settings for the project.
|
||||
- Added button in the User Preferences to open a Cloud project in your webbrowser.
|
||||
|
||||
## Version 1.7.5 (2017-10-06)
|
||||
|
||||
- Sorting the project list alphabetically.
|
||||
- Renamed 'Job File Path' to 'Job Storage Path' so it's more explicit.
|
||||
- Allow overriding the render output path on a per-scene basis.
|
||||
|
||||
## Version 1.7.4 (2017-09-05)
|
||||
|
||||
- Fix [T52621](https://developer.blender.org/T52621): Fixed class name collision upon add-on
|
||||
registration. This is checked since Blender 2.79.
|
||||
- Fix [T48852](https://developer.blender.org/T48852): Screenshot no longer shows "Communicating with
|
||||
Blender Cloud".
|
||||
|
||||
## Version 1.7.3 (2017-08-08)
|
||||
|
||||
- Default to scene frame range when no frame range is given.
|
||||
- Refuse to render on Flamenco before blend file is saved at least once.
|
||||
- Fixed some Windows-specific issues.
|
||||
|
||||
## Version 1.7.2 (2017-06-22)
|
||||
|
||||
- Fixed compatibility with Blender 2.78c.
|
||||
|
||||
## Version 1.7.1 (2017-06-13)
|
||||
|
||||
- Fixed asyncio issues on Windows
|
||||
|
||||
## Version 1.7.0 (2017-06-09)
|
||||
|
||||
- Fixed reloading after upgrading from 1.4.4 (our last public release).
|
||||
- Fixed bug handling a symlinked project path.
|
||||
- Added support for Manager-defined path replacement variables.
|
||||
|
||||
## Version 1.6.4 (2017-04-21)
|
||||
|
||||
- Added file exclusion filter for Flamenco. A filter like `*.abc;*.mkv;*.mov` can be
|
||||
used to prevent certain files from being copied to the job storage directory.
|
||||
Requires a Blender that is bundled with BAM 1.1.7 or newer.
|
||||
|
||||
## Version 1.6.3 (2017-03-21)
|
||||
|
||||
- Fixed bug where local project path wasn't shown for projects only set up for Flamenco
|
||||
(and not Attract).
|
||||
- Added this CHANGELOG.md file, which will contain user-relevant changes.
|
||||
|
||||
## Version 1.6.2 (2017-03-17)
|
||||
|
||||
- Flamenco: when opening non-existing file path, open parent instead
|
||||
- Fix T50954: Improve Blender Cloud add-on project selector
|
||||
|
||||
## Version 1.6.1 (2017-03-07)
|
||||
|
||||
- Show error in GUI when Blender Cloud is unreachable
|
||||
- Fixed sample count when using branched path tracing
|
||||
|
||||
## Version 1.6.0 (2017-02-14)
|
||||
|
||||
- Default to frame chunk size of 1 (instead of 10).
|
||||
- Turn off "use overwrite" and "use placeholder" for Flamenco blend files.
|
||||
- Fixed bugs when blendfile is outside the project directory
|
||||
|
||||
|
||||
## Older versions
|
||||
|
||||
For the history of older versions, please refer to the
|
||||
[Git history](https://developer.blender.org/diffusion/BCA/)
|
@ -19,22 +19,22 @@
|
||||
# <pep8 compliant>
|
||||
|
||||
bl_info = {
|
||||
'name': 'Blender Cloud',
|
||||
"name": "Blender Cloud",
|
||||
"author": "Sybren A. Stüvel, Francesco Siddi, Inês Almeida, Antony Riakiotakis",
|
||||
'version': (1, 6, 2),
|
||||
'blender': (2, 77, 0),
|
||||
'location': 'Addon Preferences panel, and Ctrl+Shift+Alt+A anywhere for texture browser',
|
||||
'description': 'Texture library browser and Blender Sync. Requires the Blender ID addon '
|
||||
'and Blender 2.77a or newer.',
|
||||
'wiki_url': 'https://wiki.blender.org/index.php/Extensions:2.6/Py/'
|
||||
'Scripts/System/BlenderCloud',
|
||||
'category': 'System',
|
||||
"version": (1, 25),
|
||||
"blender": (2, 80, 0),
|
||||
"location": "Addon Preferences panel, and Ctrl+Shift+Alt+A anywhere for texture browser",
|
||||
"description": "Texture library browser and Blender Sync. Requires the Blender ID addon "
|
||||
"and Blender 2.80 or newer.",
|
||||
"wiki_url": "https://wiki.blender.org/index.php/Extensions:2.6/Py/"
|
||||
"Scripts/System/BlenderCloud",
|
||||
"category": "System",
|
||||
}
|
||||
|
||||
import logging
|
||||
|
||||
# Support reloading
|
||||
if 'pillar' in locals():
|
||||
if "pillar" in locals():
|
||||
import importlib
|
||||
|
||||
wheels = importlib.reload(wheels)
|
||||
@ -60,41 +60,60 @@ def register():
|
||||
_monkey_patch_requests()
|
||||
|
||||
# Support reloading
|
||||
if '%s.blender' % __name__ in sys.modules:
|
||||
if "%s.blender" % __name__ in sys.modules:
|
||||
import importlib
|
||||
|
||||
def reload_mod(name):
|
||||
modname = '%s.%s' % (__name__, name)
|
||||
module = importlib.reload(sys.modules[modname])
|
||||
sys.modules[modname] = module
|
||||
return module
|
||||
|
||||
reload_mod('blendfile')
|
||||
reload_mod('home_project')
|
||||
reload_mod('utils')
|
||||
|
||||
blender = reload_mod('blender')
|
||||
async_loop = reload_mod('async_loop')
|
||||
texture_browser = reload_mod('texture_browser')
|
||||
settings_sync = reload_mod('settings_sync')
|
||||
image_sharing = reload_mod('image_sharing')
|
||||
attract = reload_mod('attract')
|
||||
flamenco = reload_mod('flamenco')
|
||||
modname = "%s.%s" % (__name__, name)
|
||||
try:
|
||||
old_module = sys.modules[modname]
|
||||
except KeyError:
|
||||
# Wasn't loaded before -- can happen after an upgrade.
|
||||
new_module = importlib.import_module(modname)
|
||||
else:
|
||||
from . import (blender, texture_browser, async_loop, settings_sync, blendfile, home_project,
|
||||
image_sharing, attract, flamenco)
|
||||
new_module = importlib.reload(old_module)
|
||||
|
||||
sys.modules[modname] = new_module
|
||||
return new_module
|
||||
|
||||
reload_mod("blendfile")
|
||||
reload_mod("home_project")
|
||||
reload_mod("utils")
|
||||
reload_mod("pillar")
|
||||
|
||||
async_loop = reload_mod("async_loop")
|
||||
flamenco = reload_mod("flamenco")
|
||||
attract = reload_mod("attract")
|
||||
texture_browser = reload_mod("texture_browser")
|
||||
settings_sync = reload_mod("settings_sync")
|
||||
image_sharing = reload_mod("image_sharing")
|
||||
blender = reload_mod("blender")
|
||||
project_specific = reload_mod("project_specific")
|
||||
else:
|
||||
from . import (
|
||||
blender,
|
||||
texture_browser,
|
||||
async_loop,
|
||||
settings_sync,
|
||||
blendfile,
|
||||
home_project,
|
||||
image_sharing,
|
||||
attract,
|
||||
flamenco,
|
||||
project_specific,
|
||||
)
|
||||
|
||||
async_loop.setup_asyncio_executor()
|
||||
async_loop.register()
|
||||
|
||||
flamenco.register()
|
||||
attract.register()
|
||||
texture_browser.register()
|
||||
blender.register()
|
||||
settings_sync.register()
|
||||
image_sharing.register()
|
||||
attract.register()
|
||||
blender.register()
|
||||
|
||||
blender.handle_project_update()
|
||||
project_specific.handle_project_update()
|
||||
|
||||
|
||||
def _monkey_patch_requests():
|
||||
@ -108,15 +127,23 @@ def _monkey_patch_requests():
|
||||
if requests.__build__ >= 0x020601:
|
||||
return
|
||||
|
||||
log.info('Monkey-patching requests version %s', requests.__version__)
|
||||
log.info("Monkey-patching requests version %s", requests.__version__)
|
||||
from requests.packages.urllib3.response import HTTPResponse
|
||||
|
||||
HTTPResponse.chunked = False
|
||||
HTTPResponse.chunk_left = None
|
||||
|
||||
|
||||
def unregister():
|
||||
from . import (blender, texture_browser, async_loop, settings_sync, image_sharing, attract,
|
||||
flamenco)
|
||||
from . import (
|
||||
blender,
|
||||
texture_browser,
|
||||
async_loop,
|
||||
settings_sync,
|
||||
image_sharing,
|
||||
attract,
|
||||
flamenco,
|
||||
)
|
||||
|
||||
image_sharing.unregister()
|
||||
attract.unregister()
|
||||
|
@ -14,7 +14,7 @@ See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||
|
||||
__version_info__ = (1, 4, 0)
|
||||
__version__ = '.'.join(map(str, __version_info__))
|
||||
__version__ = ".".join(map(str, __version_info__))
|
||||
|
||||
|
||||
import sys
|
||||
@ -25,23 +25,23 @@ PY3 = sys.version_info[0] == 3
|
||||
if PY3:
|
||||
unicode = str
|
||||
|
||||
if sys.platform.startswith('java'):
|
||||
if sys.platform.startswith("java"):
|
||||
import platform
|
||||
|
||||
os_name = platform.java_ver()[3][0]
|
||||
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||
system = 'win32'
|
||||
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||
system = 'darwin'
|
||||
if os_name.startswith("Windows"): # "Windows XP", "Windows 7", etc.
|
||||
system = "win32"
|
||||
elif os_name.startswith("Mac"): # "Mac OS X", etc.
|
||||
system = "darwin"
|
||||
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||
# are actually checked for and the rest of the module expects
|
||||
# *sys.platform* style strings.
|
||||
system = 'linux2'
|
||||
system = "linux2"
|
||||
else:
|
||||
system = sys.platform
|
||||
|
||||
|
||||
|
||||
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific data dir for this application.
|
||||
|
||||
@ -84,12 +84,12 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Application Support/')
|
||||
elif system == "darwin":
|
||||
path = os.path.expanduser("~/Library/Application Support/")
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||
path = os.getenv("XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
@ -137,16 +137,19 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('/Library/Application Support')
|
||||
elif system == "darwin":
|
||||
path = os.path.expanduser("/Library/Application Support")
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
# XDG default for $XDG_DATA_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_DATA_DIRS',
|
||||
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
path = os.getenv(
|
||||
"XDG_DATA_DIRS", os.pathsep.join(["/usr/local/share", "/usr/share"])
|
||||
)
|
||||
pathlist = [
|
||||
os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)
|
||||
]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
@ -195,7 +198,7 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||
path = os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
@ -240,8 +243,10 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False)
|
||||
else:
|
||||
# XDG default for $XDG_CONFIG_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
path = os.getenv("XDG_CONFIG_DIRS", "/etc/xdg")
|
||||
pathlist = [
|
||||
os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)
|
||||
]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
@ -298,14 +303,14 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
path = os.path.join(path, appname)
|
||||
if opinion:
|
||||
path = os.path.join(path, "Cache")
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Caches')
|
||||
elif system == "darwin":
|
||||
path = os.path.expanduser("~/Library/Caches")
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||
path = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname.lower().replace(' ', '-'))
|
||||
path = os.path.join(path, appname.lower().replace(" ", "-"))
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
@ -344,9 +349,7 @@ def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
This can be disabled with the `opinion=False` option.
|
||||
"""
|
||||
if system == "darwin":
|
||||
path = os.path.join(
|
||||
os.path.expanduser('~/Library/Logs'),
|
||||
appname)
|
||||
path = os.path.join(os.path.expanduser("~/Library/Logs"), appname)
|
||||
elif system == "win32":
|
||||
path = user_data_dir(appname, appauthor, version)
|
||||
version = False
|
||||
@ -364,8 +367,10 @@ def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
|
||||
class AppDirs(object):
|
||||
"""Convenience wrapper for getting application dirs."""
|
||||
def __init__(self, appname, appauthor=None, version=None, roaming=False,
|
||||
multipath=False):
|
||||
|
||||
def __init__(
|
||||
self, appname, appauthor=None, version=None, roaming=False, multipath=False
|
||||
):
|
||||
self.appname = appname
|
||||
self.appauthor = appauthor
|
||||
self.version = version
|
||||
@ -374,36 +379,39 @@ class AppDirs(object):
|
||||
|
||||
@property
|
||||
def user_data_dir(self):
|
||||
return user_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
return user_data_dir(
|
||||
self.appname, self.appauthor, version=self.version, roaming=self.roaming
|
||||
)
|
||||
|
||||
@property
|
||||
def site_data_dir(self):
|
||||
return site_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
return site_data_dir(
|
||||
self.appname, self.appauthor, version=self.version, multipath=self.multipath
|
||||
)
|
||||
|
||||
@property
|
||||
def user_config_dir(self):
|
||||
return user_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
return user_config_dir(
|
||||
self.appname, self.appauthor, version=self.version, roaming=self.roaming
|
||||
)
|
||||
|
||||
@property
|
||||
def site_config_dir(self):
|
||||
return site_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
return site_config_dir(
|
||||
self.appname, self.appauthor, version=self.version, multipath=self.multipath
|
||||
)
|
||||
|
||||
@property
|
||||
def user_cache_dir(self):
|
||||
return user_cache_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
return user_cache_dir(self.appname, self.appauthor, version=self.version)
|
||||
|
||||
@property
|
||||
def user_log_dir(self):
|
||||
return user_log_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
return user_log_dir(self.appname, self.appauthor, version=self.version)
|
||||
|
||||
|
||||
#---- internal support stuff
|
||||
# ---- internal support stuff
|
||||
|
||||
|
||||
def _get_win_folder_from_registry(csidl_name):
|
||||
"""This is a fallback technique at best. I'm not sure if using the
|
||||
@ -420,7 +428,7 @@ def _get_win_folder_from_registry(csidl_name):
|
||||
|
||||
key = _winreg.OpenKey(
|
||||
_winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders",
|
||||
)
|
||||
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||
return dir
|
||||
@ -428,6 +436,7 @@ def _get_win_folder_from_registry(csidl_name):
|
||||
|
||||
def _get_win_folder_with_pywin32(csidl_name):
|
||||
from win32com.shell import shellcon, shell
|
||||
|
||||
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||
# Try to make this a unicode path because SHGetFolderPath does
|
||||
# not return unicode strings when there is unicode data in the
|
||||
@ -445,6 +454,7 @@ def _get_win_folder_with_pywin32(csidl_name):
|
||||
if has_high_char:
|
||||
try:
|
||||
import win32api
|
||||
|
||||
dir = win32api.GetShortPathName(dir)
|
||||
except ImportError:
|
||||
pass
|
||||
@ -479,15 +489,22 @@ def _get_win_folder_with_ctypes(csidl_name):
|
||||
|
||||
return buf.value
|
||||
|
||||
|
||||
def _get_win_folder_with_jna(csidl_name):
|
||||
import array
|
||||
from com.sun import jna
|
||||
from com.sun.jna.platform import win32
|
||||
|
||||
buf_size = win32.WinDef.MAX_PATH * 2
|
||||
buf = array.zeros('c', buf_size)
|
||||
buf = array.zeros("c", buf_size)
|
||||
shell = win32.Shell32.INSTANCE
|
||||
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||
shell.SHGetFolderPath(
|
||||
None,
|
||||
getattr(win32.ShlObj, csidl_name),
|
||||
None,
|
||||
win32.ShlObj.SHGFP_TYPE_CURRENT,
|
||||
buf,
|
||||
)
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
@ -498,38 +515,47 @@ def _get_win_folder_with_jna(csidl_name):
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf = array.zeros('c', buf_size)
|
||||
buf = array.zeros("c", buf_size)
|
||||
kernel = win32.Kernel32.INSTANCE
|
||||
if kernal.GetShortPathName(dir, buf, buf_size):
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
return dir
|
||||
|
||||
|
||||
if system == "win32":
|
||||
try:
|
||||
import win32com.shell
|
||||
|
||||
_get_win_folder = _get_win_folder_with_pywin32
|
||||
except ImportError:
|
||||
try:
|
||||
from ctypes import windll
|
||||
from ctypes import windll # type: ignore
|
||||
|
||||
_get_win_folder = _get_win_folder_with_ctypes
|
||||
except ImportError:
|
||||
try:
|
||||
import com.sun.jna
|
||||
|
||||
_get_win_folder = _get_win_folder_with_jna
|
||||
except ImportError:
|
||||
_get_win_folder = _get_win_folder_from_registry
|
||||
|
||||
|
||||
#---- self test code
|
||||
# ---- self test code
|
||||
|
||||
if __name__ == "__main__":
|
||||
appname = "MyApp"
|
||||
appauthor = "MyCompany"
|
||||
|
||||
props = ("user_data_dir", "site_data_dir",
|
||||
"user_config_dir", "site_config_dir",
|
||||
"user_cache_dir", "user_log_dir")
|
||||
props = (
|
||||
"user_data_dir",
|
||||
"site_data_dir",
|
||||
"user_config_dir",
|
||||
"site_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_log_dir",
|
||||
)
|
||||
|
||||
print("-- app dirs (with optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||
|
@ -23,6 +23,7 @@ import traceback
|
||||
import concurrent.futures
|
||||
import logging
|
||||
import gc
|
||||
import typing
|
||||
|
||||
import bpy
|
||||
|
||||
@ -33,27 +34,34 @@ _loop_kicking_operator_running = False
|
||||
|
||||
|
||||
def setup_asyncio_executor():
|
||||
"""Sets up AsyncIO to run on a single thread.
|
||||
"""Sets up AsyncIO to run properly on each platform."""
|
||||
|
||||
This ensures that only one Pillar HTTP call is performed at the same time. Other
|
||||
calls that could be performed in parallel are queued, and thus we can
|
||||
reliably cancel them.
|
||||
"""
|
||||
import sys
|
||||
|
||||
executor = concurrent.futures.ThreadPoolExecutor()
|
||||
|
||||
if sys.platform == 'win32':
|
||||
if sys.platform == "win32":
|
||||
asyncio.get_event_loop().close()
|
||||
# On Windows, the default event loop is SelectorEventLoop, which does
|
||||
# not support subprocesses. ProactorEventLoop should be used instead.
|
||||
# Source: https://docs.python.org/3/library/asyncio-subprocess.html
|
||||
#
|
||||
# NOTE: this is actually the default even loop in Python 3.9+.
|
||||
loop = asyncio.ProactorEventLoop()
|
||||
asyncio.set_event_loop(loop)
|
||||
else:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
executor = concurrent.futures.ThreadPoolExecutor(max_workers=10)
|
||||
loop.set_default_executor(executor)
|
||||
# loop.set_debug(True)
|
||||
|
||||
from . import pillar
|
||||
|
||||
# Python 3.8 deprecated the 'loop' parameter, 3.10 removed it.
|
||||
kwargs = {"loop": loop} if sys.version_info < (3, 8) else {}
|
||||
|
||||
# No more than this many Pillar calls should be made simultaneously
|
||||
pillar.pillar_semaphore = asyncio.Semaphore(3, **kwargs)
|
||||
|
||||
|
||||
def kick_async_loop(*args) -> bool:
|
||||
"""Performs a single iteration of the asyncio event loop.
|
||||
@ -68,17 +76,23 @@ def kick_async_loop(*args) -> bool:
|
||||
stop_after_this_kick = False
|
||||
|
||||
if loop.is_closed():
|
||||
log.warning('loop closed, stopping immediately.')
|
||||
log.warning("loop closed, stopping immediately.")
|
||||
return True
|
||||
|
||||
all_tasks = asyncio.Task.all_tasks()
|
||||
# Passing an explicit loop is required. Without it, the function uses
|
||||
# asyncio.get_running_loop(), which raises a RuntimeError as the current
|
||||
# loop isn't running.
|
||||
all_tasks = asyncio.all_tasks(loop=loop)
|
||||
|
||||
if not len(all_tasks):
|
||||
log.debug('no more scheduled tasks, stopping after this kick.')
|
||||
log.debug("no more scheduled tasks, stopping after this kick.")
|
||||
stop_after_this_kick = True
|
||||
|
||||
elif all(task.done() for task in all_tasks):
|
||||
log.debug('all %i tasks are done, fetching results and stopping after this kick.',
|
||||
len(all_tasks))
|
||||
log.debug(
|
||||
"all %i tasks are done, fetching results and stopping after this kick.",
|
||||
len(all_tasks),
|
||||
)
|
||||
stop_after_this_kick = True
|
||||
|
||||
# Clean up circular references between tasks.
|
||||
@ -91,12 +105,12 @@ def kick_async_loop(*args) -> bool:
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
res = task.result()
|
||||
log.debug(' task #%i: result=%r', task_idx, res)
|
||||
log.debug(" task #%i: result=%r", task_idx, res)
|
||||
except asyncio.CancelledError:
|
||||
# No problem, we want to stop anyway.
|
||||
log.debug(' task #%i: cancelled', task_idx)
|
||||
log.debug(" task #%i: cancelled", task_idx)
|
||||
except Exception:
|
||||
print('{}: resulted in exception'.format(task))
|
||||
print("{}: resulted in exception".format(task))
|
||||
traceback.print_exc()
|
||||
|
||||
# for ref in gc.get_referrers(task):
|
||||
@ -109,26 +123,26 @@ def kick_async_loop(*args) -> bool:
|
||||
|
||||
|
||||
def ensure_async_loop():
|
||||
log.debug('Starting asyncio loop')
|
||||
log.debug("Starting asyncio loop")
|
||||
result = bpy.ops.asyncio.loop()
|
||||
log.debug('Result of starting modal operator is %r', result)
|
||||
log.debug("Result of starting modal operator is %r", result)
|
||||
|
||||
|
||||
def erase_async_loop():
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
log.debug('Erasing async loop')
|
||||
log.debug("Erasing async loop")
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.stop()
|
||||
|
||||
|
||||
class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
bl_idname = 'asyncio.loop'
|
||||
bl_label = 'Runs the asyncio main loop'
|
||||
bl_idname = "asyncio.loop"
|
||||
bl_label = "Runs the asyncio main loop"
|
||||
|
||||
timer = None
|
||||
log = logging.getLogger(__name__ + '.AsyncLoopModalOperator')
|
||||
log = logging.getLogger(__name__ + ".AsyncLoopModalOperator")
|
||||
|
||||
def __del__(self):
|
||||
global _loop_kicking_operator_running
|
||||
@ -145,16 +159,16 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
if _loop_kicking_operator_running:
|
||||
self.log.debug('Another loop-kicking operator is already running.')
|
||||
return {'PASS_THROUGH'}
|
||||
self.log.debug("Another loop-kicking operator is already running.")
|
||||
return {"PASS_THROUGH"}
|
||||
|
||||
context.window_manager.modal_handler_add(self)
|
||||
_loop_kicking_operator_running = True
|
||||
|
||||
wm = context.window_manager
|
||||
self.timer = wm.event_timer_add(0.00001, context.window)
|
||||
self.timer = wm.event_timer_add(0.00001, window=context.window)
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
def modal(self, context, event):
|
||||
global _loop_kicking_operator_running
|
||||
@ -163,10 +177,10 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
# erase_async_loop(). This is a signal that we really should stop
|
||||
# running.
|
||||
if not _loop_kicking_operator_running:
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
if event.type != 'TIMER':
|
||||
return {'PASS_THROUGH'}
|
||||
if event.type != "TIMER":
|
||||
return {"PASS_THROUGH"}
|
||||
|
||||
# self.log.debug('KICKING LOOP')
|
||||
stop_after_this_kick = kick_async_loop()
|
||||
@ -174,29 +188,33 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
context.window_manager.event_timer_remove(self.timer)
|
||||
_loop_kicking_operator_running = False
|
||||
|
||||
self.log.debug('Stopped asyncio loop kicking')
|
||||
return {'FINISHED'}
|
||||
self.log.debug("Stopped asyncio loop kicking")
|
||||
return {"FINISHED"}
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class AsyncModalOperatorMixin:
|
||||
async_task = None # asyncio task for fetching thumbnails
|
||||
signalling_future = None # asyncio future for signalling that we want to cancel everything.
|
||||
log = logging.getLogger('%s.AsyncModalOperatorMixin' % __name__)
|
||||
signalling_future = (
|
||||
None # asyncio future for signalling that we want to cancel everything.
|
||||
)
|
||||
log = logging.getLogger("%s.AsyncModalOperatorMixin" % __name__)
|
||||
|
||||
_state = 'INITIALIZING'
|
||||
_state = "INITIALIZING"
|
||||
stop_upon_exception = False
|
||||
|
||||
def invoke(self, context, event):
|
||||
context.window_manager.modal_handler_add(self)
|
||||
self.timer = context.window_manager.event_timer_add(1 / 15, context.window)
|
||||
self.timer = context.window_manager.event_timer_add(
|
||||
1 / 15, window=context.window
|
||||
)
|
||||
|
||||
self.log.info('Starting')
|
||||
self.log.info("Starting")
|
||||
self._new_async_task(self.async_execute(context))
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
async def async_execute(self, context):
|
||||
"""Entry point of the asynchronous operator.
|
||||
@ -207,7 +225,7 @@ class AsyncModalOperatorMixin:
|
||||
|
||||
def quit(self):
|
||||
"""Signals the state machine to stop this operator from running."""
|
||||
self._state = 'QUIT'
|
||||
self._state = "QUIT"
|
||||
|
||||
def execute(self, context):
|
||||
return self.invoke(context, None)
|
||||
@ -215,46 +233,50 @@ class AsyncModalOperatorMixin:
|
||||
def modal(self, context, event):
|
||||
task = self.async_task
|
||||
|
||||
if self._state != 'EXCEPTION' and task and task.done() and not task.cancelled():
|
||||
if self._state != "EXCEPTION" and task and task.done() and not task.cancelled():
|
||||
ex = task.exception()
|
||||
if ex is not None:
|
||||
self._state = 'EXCEPTION'
|
||||
self.log.error('Exception while running task: %s', ex)
|
||||
self._state = "EXCEPTION"
|
||||
self.log.error("Exception while running task: %s", ex)
|
||||
if self.stop_upon_exception:
|
||||
self.quit()
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
if self._state == 'QUIT':
|
||||
if self._state == "QUIT":
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
return {"PASS_THROUGH"}
|
||||
|
||||
def _finish(self, context):
|
||||
self._stop_async_task()
|
||||
context.window_manager.event_timer_remove(self.timer)
|
||||
|
||||
def _new_async_task(self, async_task: asyncio.coroutine, future: asyncio.Future = None):
|
||||
def _new_async_task(
|
||||
self, async_task: typing.Coroutine, future: asyncio.Future = None
|
||||
):
|
||||
"""Stops the currently running async task, and starts another one."""
|
||||
|
||||
self.log.debug('Setting up a new task %r, so any existing task must be stopped', async_task)
|
||||
self.log.debug(
|
||||
"Setting up a new task %r, so any existing task must be stopped", async_task
|
||||
)
|
||||
self._stop_async_task()
|
||||
|
||||
# Download the previews asynchronously.
|
||||
self.signalling_future = future or asyncio.Future()
|
||||
self.async_task = asyncio.ensure_future(async_task)
|
||||
self.log.debug('Created new task %r', self.async_task)
|
||||
self.log.debug("Created new task %r", self.async_task)
|
||||
|
||||
# Start the async manager so everything happens.
|
||||
ensure_async_loop()
|
||||
|
||||
def _stop_async_task(self):
|
||||
self.log.debug('Stopping async task')
|
||||
self.log.debug("Stopping async task")
|
||||
if self.async_task is None:
|
||||
self.log.debug('No async task, trivially stopped')
|
||||
self.log.debug("No async task, trivially stopped")
|
||||
return
|
||||
|
||||
# Signal that we want to stop.
|
||||
@ -270,14 +292,14 @@ class AsyncModalOperatorMixin:
|
||||
try:
|
||||
loop.run_until_complete(self.async_task)
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
self.log.info("Asynchronous task was cancelled")
|
||||
return
|
||||
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
self.async_task.result() # This re-raises any exception of the task.
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
self.log.info("Asynchronous task was cancelled")
|
||||
except Exception:
|
||||
self.log.exception("Exception from asynchronous task")
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -18,26 +18,86 @@
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
import logging
|
||||
import collections
|
||||
import typing
|
||||
|
||||
import bpy
|
||||
import bgl
|
||||
import gpu
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
strip_status_colour = {
|
||||
None: (0.7, 0.7, 0.7),
|
||||
'approved': (0.6392156862745098, 0.8784313725490196, 0.30196078431372547),
|
||||
'final': (0.9058823529411765, 0.9607843137254902, 0.8274509803921568),
|
||||
'in_progress': (1.0, 0.7450980392156863, 0.0),
|
||||
'on_hold': (0.796078431372549, 0.6196078431372549, 0.08235294117647059),
|
||||
'review': (0.8941176470588236, 0.9607843137254902, 0.9764705882352941),
|
||||
'todo': (1.0, 0.5019607843137255, 0.5019607843137255)
|
||||
"approved": (0.6392156862745098, 0.8784313725490196, 0.30196078431372547),
|
||||
"final": (0.9058823529411765, 0.9607843137254902, 0.8274509803921568),
|
||||
"in_progress": (1.0, 0.7450980392156863, 0.0),
|
||||
"on_hold": (0.796078431372549, 0.6196078431372549, 0.08235294117647059),
|
||||
"review": (0.8941176470588236, 0.9607843137254902, 0.9764705882352941),
|
||||
"todo": (1.0, 0.5019607843137255, 0.5019607843137255),
|
||||
}
|
||||
|
||||
CONFLICT_COLOUR = (0.576, 0.118, 0.035) # RGB tuple
|
||||
CONFLICT_COLOUR = (0.576, 0.118, 0.035, 1.0) # RGBA tuple
|
||||
|
||||
gpu_vertex_shader = """
|
||||
uniform mat4 ModelViewProjectionMatrix;
|
||||
|
||||
layout (location = 0) in vec2 pos;
|
||||
layout (location = 1) in vec4 color;
|
||||
|
||||
out vec4 lineColor; // output to the fragment shader
|
||||
|
||||
void main()
|
||||
{
|
||||
gl_Position = ModelViewProjectionMatrix * vec4(pos.x, pos.y, 0.0, 1.0);
|
||||
lineColor = color;
|
||||
}
|
||||
"""
|
||||
|
||||
gpu_fragment_shader = """
|
||||
out vec4 fragColor;
|
||||
in vec4 lineColor;
|
||||
|
||||
void main()
|
||||
{
|
||||
fragColor = lineColor;
|
||||
}
|
||||
"""
|
||||
|
||||
Float2 = typing.Tuple[float, float]
|
||||
Float3 = typing.Tuple[float, float, float]
|
||||
Float4 = typing.Tuple[float, float, float, float]
|
||||
|
||||
|
||||
def get_strip_rectf(strip):
|
||||
class AttractLineDrawer:
|
||||
def __init__(self):
|
||||
self._format = gpu.types.GPUVertFormat()
|
||||
self._pos_id = self._format.attr_add(
|
||||
id="pos", comp_type="F32", len=2, fetch_mode="FLOAT"
|
||||
)
|
||||
self._color_id = self._format.attr_add(
|
||||
id="color", comp_type="F32", len=4, fetch_mode="FLOAT"
|
||||
)
|
||||
|
||||
self.shader = gpu.types.GPUShader(gpu_vertex_shader, gpu_fragment_shader)
|
||||
|
||||
def draw(self, coords: typing.List[Float2], colors: typing.List[Float4]):
|
||||
if not coords:
|
||||
return
|
||||
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
bgl.glLineWidth(2.0)
|
||||
|
||||
vbo = gpu.types.GPUVertBuf(len=len(coords), format=self._format)
|
||||
vbo.attr_fill(id=self._pos_id, data=coords)
|
||||
vbo.attr_fill(id=self._color_id, data=colors)
|
||||
|
||||
batch = gpu.types.GPUBatch(type="LINES", buf=vbo)
|
||||
batch.program_set(self.shader)
|
||||
batch.draw()
|
||||
|
||||
|
||||
def get_strip_rectf(strip) -> Float4:
|
||||
# Get x and y in terms of the grid's frames and channels
|
||||
x1 = strip.frame_final_start
|
||||
x2 = strip.frame_final_end
|
||||
@ -47,59 +107,60 @@ def get_strip_rectf(strip):
|
||||
return x1, y1, x2, y2
|
||||
|
||||
|
||||
def draw_underline_in_strip(strip_coords, pixel_size_x, color):
|
||||
from bgl import glColor4f, glRectf, glEnable, glDisable, GL_BLEND
|
||||
import bgl
|
||||
|
||||
context = bpy.context
|
||||
|
||||
def underline_in_strip(
|
||||
strip_coords: Float4,
|
||||
pixel_size_x: float,
|
||||
color: Float4,
|
||||
out_coords: typing.List[Float2],
|
||||
out_colors: typing.List[Float4],
|
||||
):
|
||||
# Strip coords
|
||||
s_x1, s_y1, s_x2, s_y2 = strip_coords
|
||||
|
||||
# be careful not to draw over the current frame line
|
||||
cf_x = context.scene.frame_current_final
|
||||
cf_x = bpy.context.scene.frame_current_final
|
||||
|
||||
bgl.glPushAttrib(bgl.GL_COLOR_BUFFER_BIT | bgl.GL_LINE_BIT)
|
||||
# TODO(Sybren): figure out how to pass one colour per line,
|
||||
# instead of one colour per vertex.
|
||||
out_coords.append((s_x1, s_y1))
|
||||
out_colors.append(color)
|
||||
|
||||
glColor4f(*color)
|
||||
glEnable(GL_BLEND)
|
||||
bgl.glLineWidth(2)
|
||||
bgl.glBegin(bgl.GL_LINES)
|
||||
|
||||
bgl.glVertex2f(s_x1, s_y1)
|
||||
if s_x1 < cf_x < s_x2:
|
||||
# Bad luck, the line passes our strip
|
||||
bgl.glVertex2f(cf_x - pixel_size_x, s_y1)
|
||||
bgl.glVertex2f(cf_x + pixel_size_x, s_y1)
|
||||
bgl.glVertex2f(s_x2, s_y1)
|
||||
# Bad luck, the line passes our strip, so draw two lines.
|
||||
out_coords.append((cf_x - pixel_size_x, s_y1))
|
||||
out_colors.append(color)
|
||||
|
||||
bgl.glEnd()
|
||||
bgl.glPopAttrib()
|
||||
out_coords.append((cf_x + pixel_size_x, s_y1))
|
||||
out_colors.append(color)
|
||||
|
||||
out_coords.append((s_x2, s_y1))
|
||||
out_colors.append(color)
|
||||
|
||||
|
||||
def draw_strip_conflict(strip_coords, pixel_size_x):
|
||||
def strip_conflict(
|
||||
strip_coords: Float4,
|
||||
out_coords: typing.List[Float2],
|
||||
out_colors: typing.List[Float4],
|
||||
):
|
||||
"""Draws conflicting states between strips."""
|
||||
|
||||
import bgl
|
||||
|
||||
s_x1, s_y1, s_x2, s_y2 = strip_coords
|
||||
bgl.glPushAttrib(bgl.GL_COLOR_BUFFER_BIT | bgl.GL_LINE_BIT)
|
||||
|
||||
# Always draw the full rectangle, the conflict should be resolved and thus stand out.
|
||||
bgl.glColor3f(*CONFLICT_COLOUR)
|
||||
bgl.glLineWidth(2)
|
||||
# TODO(Sybren): draw a rectangle instead of a line.
|
||||
out_coords.append((s_x1, s_y2))
|
||||
out_colors.append(CONFLICT_COLOUR)
|
||||
|
||||
bgl.glBegin(bgl.GL_LINE_LOOP)
|
||||
bgl.glVertex2f(s_x1, s_y1)
|
||||
bgl.glVertex2f(s_x2, s_y1)
|
||||
bgl.glVertex2f(s_x2, s_y2)
|
||||
bgl.glVertex2f(s_x1, s_y2)
|
||||
bgl.glEnd()
|
||||
out_coords.append((s_x2, s_y1))
|
||||
out_colors.append(CONFLICT_COLOUR)
|
||||
|
||||
bgl.glPopAttrib()
|
||||
out_coords.append((s_x2, s_y2))
|
||||
out_colors.append(CONFLICT_COLOUR)
|
||||
|
||||
out_coords.append((s_x1, s_y1))
|
||||
out_colors.append(CONFLICT_COLOUR)
|
||||
|
||||
|
||||
def draw_callback_px():
|
||||
def draw_callback_px(line_drawer: AttractLineDrawer):
|
||||
context = bpy.context
|
||||
|
||||
if not context.scene.sequence_editor:
|
||||
@ -115,6 +176,10 @@ def draw_callback_px():
|
||||
|
||||
strips = shown_strips(context)
|
||||
|
||||
coords = [] # type: typing.List[Float2]
|
||||
colors = [] # type: typing.List[Float4]
|
||||
|
||||
# Collect all the lines (vertex coords + vertex colours) to draw.
|
||||
for strip in strips:
|
||||
if not strip.atc_object_id:
|
||||
continue
|
||||
@ -123,8 +188,12 @@ def draw_callback_px():
|
||||
strip_coords = get_strip_rectf(strip)
|
||||
|
||||
# check if any of the coordinates are out of bounds
|
||||
if strip_coords[0] > xwin2 or strip_coords[2] < xwin1 or strip_coords[1] > ywin2 or \
|
||||
strip_coords[3] < ywin1:
|
||||
if (
|
||||
strip_coords[0] > xwin2
|
||||
or strip_coords[2] < xwin1
|
||||
or strip_coords[1] > ywin2
|
||||
or strip_coords[3] < ywin1
|
||||
):
|
||||
continue
|
||||
|
||||
# Draw
|
||||
@ -136,9 +205,11 @@ def draw_callback_px():
|
||||
|
||||
alpha = 1.0 if strip.atc_is_synced else 0.5
|
||||
|
||||
draw_underline_in_strip(strip_coords, pixel_size_x, color + (alpha,))
|
||||
underline_in_strip(strip_coords, pixel_size_x, color + (alpha,), coords, colors)
|
||||
if strip.atc_is_synced and strip.atc_object_id_conflict:
|
||||
draw_strip_conflict(strip_coords, pixel_size_x)
|
||||
strip_conflict(strip_coords, coords, colors)
|
||||
|
||||
line_drawer.draw(coords, colors)
|
||||
|
||||
|
||||
def tag_redraw_all_sequencer_editors():
|
||||
@ -147,9 +218,9 @@ def tag_redraw_all_sequencer_editors():
|
||||
# Py cant access notifiers
|
||||
for window in context.window_manager.windows:
|
||||
for area in window.screen.areas:
|
||||
if area.type == 'SEQUENCE_EDITOR':
|
||||
if area.type == "SEQUENCE_EDITOR":
|
||||
for region in area.regions:
|
||||
if region.type == 'WINDOW':
|
||||
if region.type == "WINDOW":
|
||||
region.tag_redraw()
|
||||
|
||||
|
||||
@ -162,8 +233,16 @@ def callback_enable():
|
||||
if cb_handle:
|
||||
return
|
||||
|
||||
cb_handle[:] = bpy.types.SpaceSequenceEditor.draw_handler_add(
|
||||
draw_callback_px, (), 'WINDOW', 'POST_VIEW'),
|
||||
# Doing GPU stuff in the background crashes Blender, so let's not.
|
||||
if bpy.app.background:
|
||||
return
|
||||
|
||||
line_drawer = AttractLineDrawer()
|
||||
cb_handle[:] = (
|
||||
bpy.types.SpaceSequenceEditor.draw_handler_add(
|
||||
draw_callback_px, (line_drawer,), "WINDOW", "POST_VIEW"
|
||||
),
|
||||
)
|
||||
|
||||
tag_redraw_all_sequencer_editors()
|
||||
|
||||
@ -173,7 +252,7 @@ def callback_disable():
|
||||
return
|
||||
|
||||
try:
|
||||
bpy.types.SpaceSequenceEditor.draw_handler_remove(cb_handle[0], 'WINDOW')
|
||||
bpy.types.SpaceSequenceEditor.draw_handler_remove(cb_handle[0], "WINDOW")
|
||||
except ValueError:
|
||||
# Thrown when already removed.
|
||||
pass
|
||||
|
@ -23,65 +23,77 @@ Separated from __init__.py so that we can import & run from non-Blender environm
|
||||
import functools
|
||||
import logging
|
||||
import os.path
|
||||
import tempfile
|
||||
|
||||
import bpy
|
||||
from bpy.types import AddonPreferences, Operator, WindowManager, Scene, PropertyGroup
|
||||
from bpy.props import StringProperty, EnumProperty, PointerProperty, BoolProperty, IntProperty
|
||||
from bpy.props import (
|
||||
StringProperty,
|
||||
EnumProperty,
|
||||
PointerProperty,
|
||||
BoolProperty,
|
||||
IntProperty,
|
||||
)
|
||||
import rna_prop_ui
|
||||
|
||||
from . import pillar, async_loop, flamenco
|
||||
from . import pillar, async_loop, flamenco, project_specific
|
||||
from .utils import pyside_cache, redraw
|
||||
|
||||
PILLAR_WEB_SERVER_URL = 'https://cloud.blender.org/'
|
||||
# PILLAR_WEB_SERVER_URL = 'http://pillar-web:5001/'
|
||||
PILLAR_SERVER_URL = '%sapi/' % PILLAR_WEB_SERVER_URL
|
||||
PILLAR_WEB_SERVER_URL = os.environ.get("BCLOUD_SERVER", "https://cloud.blender.org/")
|
||||
PILLAR_SERVER_URL = "%sapi/" % PILLAR_WEB_SERVER_URL
|
||||
|
||||
ADDON_NAME = 'blender_cloud'
|
||||
ADDON_NAME = "blender_cloud"
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
icons = None
|
||||
|
||||
|
||||
@pyside_cache('version')
|
||||
@pyside_cache
|
||||
def blender_syncable_versions(self, context):
|
||||
"""Returns the list of items used by SyncStatusProperties.version EnumProperty."""
|
||||
|
||||
bss = context.window_manager.blender_sync_status
|
||||
versions = bss.available_blender_versions
|
||||
if not versions:
|
||||
return [('', 'No settings stored in your Blender Cloud', '')]
|
||||
return [(v, v, '') for v in versions]
|
||||
return [("", "No settings stored in your Blender Cloud", "")]
|
||||
return [(v, v, "") for v in versions]
|
||||
|
||||
|
||||
class SyncStatusProperties(PropertyGroup):
|
||||
status = EnumProperty(
|
||||
status: EnumProperty(
|
||||
items=[
|
||||
('NONE', 'NONE', 'We have done nothing at all yet.'),
|
||||
('IDLE', 'IDLE', 'User requested something, which is done, and we are now idle.'),
|
||||
('SYNCING', 'SYNCING', 'Synchronising with Blender Cloud.'),
|
||||
("NONE", "NONE", "We have done nothing at all yet."),
|
||||
(
|
||||
"IDLE",
|
||||
"IDLE",
|
||||
"User requested something, which is done, and we are now idle.",
|
||||
),
|
||||
("SYNCING", "SYNCING", "Synchronising with Blender Cloud."),
|
||||
],
|
||||
name='status',
|
||||
description='Current status of Blender Sync',
|
||||
update=redraw)
|
||||
name="status",
|
||||
description="Current status of Blender Sync",
|
||||
update=redraw,
|
||||
)
|
||||
|
||||
version = EnumProperty(
|
||||
version: EnumProperty(
|
||||
items=blender_syncable_versions,
|
||||
name='Version of Blender from which to pull',
|
||||
description='Version of Blender from which to pull')
|
||||
name="Version of Blender from which to pull",
|
||||
description="Version of Blender from which to pull",
|
||||
)
|
||||
|
||||
message = StringProperty(name='message', update=redraw)
|
||||
level = EnumProperty(
|
||||
message: StringProperty(name="message", update=redraw)
|
||||
level: EnumProperty(
|
||||
items=[
|
||||
('INFO', 'INFO', ''),
|
||||
('WARNING', 'WARNING', ''),
|
||||
('ERROR', 'ERROR', ''),
|
||||
('SUBSCRIBE', 'SUBSCRIBE', ''),
|
||||
("INFO", "INFO", ""),
|
||||
("WARNING", "WARNING", ""),
|
||||
("ERROR", "ERROR", ""),
|
||||
("SUBSCRIBE", "SUBSCRIBE", ""),
|
||||
],
|
||||
name='level',
|
||||
update=redraw)
|
||||
name="level",
|
||||
update=redraw,
|
||||
)
|
||||
|
||||
def report(self, level: set, message: str):
|
||||
assert len(level) == 1, 'level should be a set of one string, not %r' % level
|
||||
assert len(level) == 1, "level should be a set of one string, not %r" % level
|
||||
self.level = level.pop()
|
||||
self.message = message
|
||||
|
||||
@ -98,21 +110,21 @@ class SyncStatusProperties(PropertyGroup):
|
||||
# because I don't know how to store a variable list of strings in a proper RNA property.
|
||||
@property
|
||||
def available_blender_versions(self) -> list:
|
||||
return self.get('available_blender_versions', [])
|
||||
return self.get("available_blender_versions", [])
|
||||
|
||||
@available_blender_versions.setter
|
||||
def available_blender_versions(self, new_versions):
|
||||
self['available_blender_versions'] = new_versions
|
||||
self["available_blender_versions"] = new_versions
|
||||
|
||||
|
||||
@pyside_cache('project')
|
||||
@pyside_cache
|
||||
def bcloud_available_projects(self, context):
|
||||
"""Returns the list of items used by BlenderCloudProjectGroup.project EnumProperty."""
|
||||
|
||||
projs = preferences().project.available_projects
|
||||
if not projs:
|
||||
return [('', 'No projects available in your Blender Cloud', '')]
|
||||
return [(p['_id'], p['name'], '') for p in projs]
|
||||
return [("", "No projects available in your Blender Cloud", "")]
|
||||
return [(p["_id"], p["name"], "") for p in projs]
|
||||
|
||||
|
||||
@functools.lru_cache(1)
|
||||
@ -122,140 +134,146 @@ def project_extensions(project_id) -> set:
|
||||
At the moment of writing these are 'attract' and 'flamenco'.
|
||||
"""
|
||||
|
||||
log.debug('Finding extensions for project %s', project_id)
|
||||
log.debug("Finding extensions for project %s", project_id)
|
||||
|
||||
# We can't use our @property, since the preferences may be loaded from a
|
||||
# preferences blend file, in which case it is not constructed from Python code.
|
||||
available_projects = preferences().project.get('available_projects', [])
|
||||
available_projects = preferences().project.get("available_projects", [])
|
||||
if not available_projects:
|
||||
log.debug('No projects available.')
|
||||
log.debug("No projects available.")
|
||||
return set()
|
||||
|
||||
proj = next((p for p in available_projects
|
||||
if p['_id'] == project_id), None)
|
||||
proj = next((p for p in available_projects if p["_id"] == project_id), None)
|
||||
if proj is None:
|
||||
log.debug('Project %s not found in available projects.', project_id)
|
||||
log.debug("Project %s not found in available projects.", project_id)
|
||||
return set()
|
||||
|
||||
return set(proj.get('enabled_for', ()))
|
||||
|
||||
|
||||
def handle_project_update(_=None, _2=None):
|
||||
"""Handles changing projects, which may cause extensions to be disabled/enabled.
|
||||
|
||||
Ignores arguments so that it can be used as property update callback.
|
||||
"""
|
||||
|
||||
project_id = preferences().project.project
|
||||
log.info('Updating internal state to reflect extensions enabled on current project %s.',
|
||||
project_id)
|
||||
|
||||
project_extensions.cache_clear()
|
||||
|
||||
from blender_cloud import attract, flamenco
|
||||
attract.deactivate()
|
||||
flamenco.deactivate()
|
||||
|
||||
enabled_for = project_extensions(project_id)
|
||||
log.info('Project extensions: %s', enabled_for)
|
||||
if 'attract' in enabled_for:
|
||||
attract.activate()
|
||||
if 'flamenco' in enabled_for:
|
||||
flamenco.activate()
|
||||
return set(proj.get("enabled_for", ()))
|
||||
|
||||
|
||||
class BlenderCloudProjectGroup(PropertyGroup):
|
||||
status = EnumProperty(
|
||||
status: EnumProperty(
|
||||
items=[
|
||||
('NONE', 'NONE', 'We have done nothing at all yet'),
|
||||
('IDLE', 'IDLE', 'User requested something, which is done, and we are now idle'),
|
||||
('FETCHING', 'FETCHING', 'Fetching available projects from Blender Cloud'),
|
||||
("NONE", "NONE", "We have done nothing at all yet"),
|
||||
(
|
||||
"IDLE",
|
||||
"IDLE",
|
||||
"User requested something, which is done, and we are now idle",
|
||||
),
|
||||
("FETCHING", "FETCHING", "Fetching available projects from Blender Cloud"),
|
||||
],
|
||||
name='status',
|
||||
update=redraw)
|
||||
name="status",
|
||||
update=redraw,
|
||||
)
|
||||
|
||||
project = EnumProperty(
|
||||
project: EnumProperty(
|
||||
items=bcloud_available_projects,
|
||||
name='Cloud project',
|
||||
description='Which Blender Cloud project to work with',
|
||||
update=handle_project_update
|
||||
name="Cloud project",
|
||||
description="Which Blender Cloud project to work with",
|
||||
update=project_specific.handle_project_update,
|
||||
)
|
||||
|
||||
# List of projects is stored in 'available_projects' ID property,
|
||||
# because I don't know how to store a variable list of strings in a proper RNA property.
|
||||
@property
|
||||
def available_projects(self) -> list:
|
||||
return self.get('available_projects', [])
|
||||
return self.get("available_projects", [])
|
||||
|
||||
@available_projects.setter
|
||||
def available_projects(self, new_projects):
|
||||
self['available_projects'] = new_projects
|
||||
handle_project_update()
|
||||
self["available_projects"] = new_projects
|
||||
project_specific.handle_project_update()
|
||||
|
||||
|
||||
class BlenderCloudPreferences(AddonPreferences):
|
||||
bl_idname = ADDON_NAME
|
||||
|
||||
# The following two properties are read-only to limit the scope of the
|
||||
# The following property is read-only to limit the scope of the
|
||||
# addon and allow for proper testing within this scope.
|
||||
pillar_server = StringProperty(
|
||||
name='Blender Cloud Server',
|
||||
description='URL of the Blender Cloud backend server',
|
||||
pillar_server: StringProperty(
|
||||
name="Blender Cloud Server",
|
||||
description="URL of the Blender Cloud backend server",
|
||||
default=PILLAR_SERVER_URL,
|
||||
get=lambda self: PILLAR_SERVER_URL
|
||||
get=lambda self: PILLAR_SERVER_URL,
|
||||
)
|
||||
|
||||
local_texture_dir = StringProperty(
|
||||
name='Default Blender Cloud Texture Storage Directory',
|
||||
subtype='DIR_PATH',
|
||||
default='//textures')
|
||||
local_texture_dir: StringProperty(
|
||||
name="Default Blender Cloud Texture Storage Directory",
|
||||
subtype="DIR_PATH",
|
||||
default="//textures",
|
||||
)
|
||||
|
||||
open_browser_after_share = BoolProperty(
|
||||
name='Open Browser after Sharing File',
|
||||
description='When enabled, Blender will open a webbrowser',
|
||||
default=True
|
||||
open_browser_after_share: BoolProperty(
|
||||
name="Open Browser after Sharing File",
|
||||
description="When enabled, Blender will open a webbrowser",
|
||||
default=True,
|
||||
)
|
||||
|
||||
# TODO: store project-dependent properties with the project, so that people
|
||||
# can switch projects and the Attract and Flamenco properties switch with it.
|
||||
project = PointerProperty(type=BlenderCloudProjectGroup)
|
||||
project: PointerProperty(type=BlenderCloudProjectGroup)
|
||||
|
||||
attract_project_local_path = StringProperty(
|
||||
name='Local Project Path',
|
||||
description='Local path of your Attract project, used to search for blend files; '
|
||||
'usually best to set to an absolute path',
|
||||
subtype='DIR_PATH',
|
||||
default='//../')
|
||||
cloud_project_local_path: StringProperty(
|
||||
name="Local Project Path",
|
||||
description="Local path of your Attract project, used to search for blend files; "
|
||||
"usually best to set to an absolute path",
|
||||
subtype="DIR_PATH",
|
||||
default="//../",
|
||||
update=project_specific.store,
|
||||
)
|
||||
|
||||
flamenco_manager = PointerProperty(type=flamenco.FlamencoManagerGroup)
|
||||
# TODO: before making Flamenco public, change the defaults to something less Institute-specific.
|
||||
# NOTE: The assumption is that the workers can also find the files in the same path.
|
||||
# This assumption is true for the Blender Institute.
|
||||
flamenco_job_file_path = StringProperty(
|
||||
name='Job File Path',
|
||||
description='Path where to store job files, should be accesible for Workers too',
|
||||
subtype='DIR_PATH',
|
||||
default='/render/_flamenco/storage')
|
||||
|
||||
# TODO: before making Flamenco public, change the defaults to something less Institute-specific.
|
||||
flamenco_job_output_path = StringProperty(
|
||||
name='Job Output Path',
|
||||
description='Path where to store output files, should be accessible for Workers',
|
||||
subtype='DIR_PATH',
|
||||
default='/render/_flamenco/output')
|
||||
flamenco_job_output_strip_components = IntProperty(
|
||||
name='Job Output Path Strip Components',
|
||||
description='The final output path comprises of the job output path, and the blend file '
|
||||
'path relative to the project with this many path components stripped off '
|
||||
'the front',
|
||||
flamenco_manager: PointerProperty(type=flamenco.FlamencoManagerGroup)
|
||||
flamenco_exclude_filter: StringProperty(
|
||||
name="File Exclude Filter",
|
||||
description='Space-separated list of filename filters, like "*.abc *.mkv", to prevent '
|
||||
"matching files from being packed into the output directory",
|
||||
default="",
|
||||
update=project_specific.store,
|
||||
)
|
||||
flamenco_job_file_path: StringProperty(
|
||||
name="Job Storage Path",
|
||||
description="Path where to store job files, should be accesible for Workers too",
|
||||
subtype="DIR_PATH",
|
||||
default=tempfile.gettempdir(),
|
||||
update=project_specific.store,
|
||||
)
|
||||
flamenco_job_output_path: StringProperty(
|
||||
name="Job Output Path",
|
||||
description="Path where to store output files, should be accessible for Workers",
|
||||
subtype="DIR_PATH",
|
||||
default=tempfile.gettempdir(),
|
||||
update=project_specific.store,
|
||||
)
|
||||
flamenco_job_output_strip_components: IntProperty(
|
||||
name="Job Output Path Strip Components",
|
||||
description="The final output path comprises of the job output path, and the blend file "
|
||||
"path relative to the project with this many path components stripped off "
|
||||
"the front",
|
||||
min=0,
|
||||
default=0,
|
||||
soft_max=4,
|
||||
update=project_specific.store,
|
||||
)
|
||||
flamenco_open_browser_after_submit = BoolProperty(
|
||||
name='Open Browser after Submitting Job',
|
||||
description='When enabled, Blender will open a webbrowser',
|
||||
default=True
|
||||
flamenco_relative_only: BoolProperty(
|
||||
name="Relative Paths Only",
|
||||
description="When enabled, only assets that are referred to with a relative path are "
|
||||
"packed, and assets referred to by an absolute path are excluded from the "
|
||||
"BAT pack. When disabled, all assets are packed",
|
||||
default=False,
|
||||
update=project_specific.store,
|
||||
)
|
||||
|
||||
flamenco_open_browser_after_submit: BoolProperty(
|
||||
name="Open Browser after Submitting Job",
|
||||
description="When enabled, Blender will open a webbrowser",
|
||||
default=True,
|
||||
)
|
||||
flamenco_show_quit_after_submit_button: BoolProperty(
|
||||
name='Show "Submit & Quit" button',
|
||||
description='When enabled, next to the "Render on Flamenco" button there will be a button '
|
||||
'"Submit & Quit" that silently quits Blender after submitting the render job '
|
||||
"to Flamenco",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def draw(self, context):
|
||||
@ -273,24 +291,30 @@ class BlenderCloudPreferences(AddonPreferences):
|
||||
blender_id_profile = blender_id.get_active_profile()
|
||||
if blender_id is None:
|
||||
|
||||
msg_icon = 'ERROR'
|
||||
text = 'This add-on requires Blender ID'
|
||||
help_text = 'Make sure that the Blender ID add-on is installed and activated'
|
||||
msg_icon = "ERROR"
|
||||
text = "This add-on requires Blender ID"
|
||||
help_text = (
|
||||
"Make sure that the Blender ID add-on is installed and activated"
|
||||
)
|
||||
elif not blender_id_profile:
|
||||
msg_icon = 'ERROR'
|
||||
text = 'You are logged out.'
|
||||
help_text = 'To login, go to the Blender ID add-on preferences.'
|
||||
msg_icon = "ERROR"
|
||||
text = "You are logged out."
|
||||
help_text = "To login, go to the Blender ID add-on preferences."
|
||||
elif bpy.app.debug and pillar.SUBCLIENT_ID not in blender_id_profile.subclients:
|
||||
msg_icon = 'QUESTION'
|
||||
text = 'No Blender Cloud credentials.'
|
||||
help_text = ('You are logged in on Blender ID, but your credentials have not '
|
||||
'been synchronized with Blender Cloud yet. Press the Update '
|
||||
'Credentials button.')
|
||||
msg_icon = "QUESTION"
|
||||
text = "No Blender Cloud credentials."
|
||||
help_text = (
|
||||
"You are logged in on Blender ID, but your credentials have not "
|
||||
"been synchronized with Blender Cloud yet. Press the Update "
|
||||
"Credentials button."
|
||||
)
|
||||
else:
|
||||
msg_icon = 'WORLD_DATA'
|
||||
text = 'You are logged in as %s.' % blender_id_profile.username
|
||||
help_text = ('To logout or change profile, '
|
||||
'go to the Blender ID add-on preferences.')
|
||||
msg_icon = "WORLD_DATA"
|
||||
text = "You are logged in as %s." % blender_id_profile.username
|
||||
help_text = (
|
||||
"To logout or change profile, "
|
||||
"go to the Blender ID add-on preferences."
|
||||
)
|
||||
|
||||
# Authentication stuff
|
||||
auth_box = layout.box()
|
||||
@ -304,182 +328,205 @@ class BlenderCloudPreferences(AddonPreferences):
|
||||
|
||||
# Texture browser stuff
|
||||
texture_box = layout.box()
|
||||
texture_box.enabled = msg_icon != 'ERROR'
|
||||
texture_box.enabled = msg_icon != "ERROR"
|
||||
sub = texture_box.column()
|
||||
sub.label(text='Local directory for downloaded textures', icon_value=icon('CLOUD'))
|
||||
sub.prop(self, "local_texture_dir", text='Default')
|
||||
sub.prop(context.scene, "local_texture_dir", text='Current scene')
|
||||
sub.label(
|
||||
text="Local directory for downloaded textures", icon_value=icon("CLOUD")
|
||||
)
|
||||
sub.prop(self, "local_texture_dir", text="Default")
|
||||
sub.prop(context.scene, "local_texture_dir", text="Current scene")
|
||||
|
||||
# Blender Sync stuff
|
||||
bss = context.window_manager.blender_sync_status
|
||||
bsync_box = layout.box()
|
||||
bsync_box.enabled = msg_icon != 'ERROR'
|
||||
row = bsync_box.row().split(percentage=0.33)
|
||||
row.label('Blender Sync with Blender Cloud', icon_value=icon('CLOUD'))
|
||||
bsync_box.enabled = msg_icon != "ERROR"
|
||||
row = bsync_box.row().split(factor=0.33)
|
||||
row.label(text="Blender Sync with Blender Cloud", icon_value=icon("CLOUD"))
|
||||
|
||||
icon_for_level = {
|
||||
'INFO': 'NONE',
|
||||
'WARNING': 'INFO',
|
||||
'ERROR': 'ERROR',
|
||||
'SUBSCRIBE': 'ERROR',
|
||||
"INFO": "NONE",
|
||||
"WARNING": "INFO",
|
||||
"ERROR": "ERROR",
|
||||
"SUBSCRIBE": "ERROR",
|
||||
}
|
||||
msg_icon = icon_for_level[bss.level] if bss.message else 'NONE'
|
||||
msg_icon = icon_for_level[bss.level] if bss.message else "NONE"
|
||||
message_container = row.row()
|
||||
message_container.label(bss.message, icon=msg_icon)
|
||||
message_container.label(text=bss.message, icon=msg_icon)
|
||||
|
||||
sub = bsync_box.column()
|
||||
|
||||
if bss.level == 'SUBSCRIBE':
|
||||
if bss.level == "SUBSCRIBE":
|
||||
self.draw_subscribe_button(sub)
|
||||
self.draw_sync_buttons(sub, bss)
|
||||
|
||||
# Image Share stuff
|
||||
share_box = layout.box()
|
||||
share_box.label('Image Sharing on Blender Cloud', icon_value=icon('CLOUD'))
|
||||
share_box.prop(self, 'open_browser_after_share')
|
||||
share_box.label(text="Image Sharing on Blender Cloud", icon_value=icon("CLOUD"))
|
||||
share_box.prop(self, "open_browser_after_share")
|
||||
|
||||
# Project selector
|
||||
project_box = layout.box()
|
||||
project_box.enabled = self.project.status in {'NONE', 'IDLE'}
|
||||
project_box.enabled = self.project.status in {"NONE", "IDLE"}
|
||||
|
||||
self.draw_project_selector(project_box, self.project)
|
||||
extensions = project_extensions(self.project.project)
|
||||
|
||||
# Attract stuff
|
||||
if 'attract' in extensions:
|
||||
attract_box = project_box.column()
|
||||
self.draw_attract_buttons(attract_box, self.project)
|
||||
|
||||
# Flamenco stuff
|
||||
if 'flamenco' in extensions:
|
||||
if "flamenco" in extensions:
|
||||
flamenco_box = project_box.column()
|
||||
self.draw_flamenco_buttons(flamenco_box, self.flamenco_manager, context)
|
||||
|
||||
def draw_subscribe_button(self, layout):
|
||||
layout.operator('pillar.subscribe', icon='WORLD')
|
||||
layout.operator("pillar.subscribe", icon="WORLD")
|
||||
|
||||
def draw_sync_buttons(self, layout, bss):
|
||||
layout.enabled = bss.status in {'NONE', 'IDLE'}
|
||||
layout.enabled = bss.status in {"NONE", "IDLE"}
|
||||
|
||||
buttons = layout.column()
|
||||
row_buttons = buttons.row().split(percentage=0.5)
|
||||
row_buttons = buttons.row().split(factor=0.5)
|
||||
row_push = row_buttons.row()
|
||||
row_pull = row_buttons.row(align=True)
|
||||
|
||||
row_push.operator('pillar.sync',
|
||||
text='Save %i.%i settings' % bpy.app.version[:2],
|
||||
icon='TRIA_UP').action = 'PUSH'
|
||||
row_push.operator(
|
||||
"pillar.sync",
|
||||
text="Save %i.%i settings" % bpy.app.version[:2],
|
||||
icon="TRIA_UP",
|
||||
).action = "PUSH"
|
||||
|
||||
versions = bss.available_blender_versions
|
||||
version = bss.version
|
||||
if bss.status in {'NONE', 'IDLE'}:
|
||||
if not versions or not version:
|
||||
row_pull.operator('pillar.sync',
|
||||
text='Find version to load',
|
||||
icon='TRIA_DOWN').action = 'REFRESH'
|
||||
if bss.status in {"NONE", "IDLE"}:
|
||||
if not versions:
|
||||
row_pull.operator(
|
||||
"pillar.sync", text="Find version to load", icon="TRIA_DOWN"
|
||||
).action = "REFRESH"
|
||||
else:
|
||||
props = row_pull.operator('pillar.sync',
|
||||
text='Load %s settings' % version,
|
||||
icon='TRIA_DOWN')
|
||||
props.action = 'PULL'
|
||||
props.blender_version = version
|
||||
row_pull.operator('pillar.sync',
|
||||
text='',
|
||||
icon='DOTSDOWN').action = 'SELECT'
|
||||
props = row_pull.operator(
|
||||
"pillar.sync",
|
||||
text="Load %s settings" % bss.version,
|
||||
icon="TRIA_DOWN",
|
||||
)
|
||||
props.action = "PULL"
|
||||
props.blender_version = bss.version
|
||||
row_pull.operator(
|
||||
"pillar.sync", text="", icon="DOWNARROW_HLT"
|
||||
).action = "SELECT"
|
||||
else:
|
||||
row_pull.label('Cloud Sync is running.')
|
||||
row_pull.label(text="Cloud Sync is running.")
|
||||
|
||||
def draw_project_selector(self, project_box, bcp: BlenderCloudProjectGroup):
|
||||
project_row = project_box.row(align=True)
|
||||
project_row.label('Project settings', icon_value=icon('CLOUD'))
|
||||
project_row.label(text="Project settings", icon_value=icon("CLOUD"))
|
||||
|
||||
row_buttons = project_row.row(align=True)
|
||||
|
||||
projects = bcp.available_projects
|
||||
project = bcp.project
|
||||
if bcp.status in {'NONE', 'IDLE'}:
|
||||
if not projects or not project:
|
||||
row_buttons.operator('pillar.projects',
|
||||
text='Find project to load',
|
||||
icon='FILE_REFRESH')
|
||||
if bcp.status in {"NONE", "IDLE"}:
|
||||
if not projects:
|
||||
row_buttons.operator(
|
||||
"pillar.projects", text="Find project to load", icon="FILE_REFRESH"
|
||||
)
|
||||
else:
|
||||
row_buttons.prop(bcp, 'project')
|
||||
row_buttons.operator('pillar.projects',
|
||||
text='',
|
||||
icon='FILE_REFRESH')
|
||||
row_buttons.prop(bcp, "project")
|
||||
row_buttons.operator("pillar.projects", text="", icon="FILE_REFRESH")
|
||||
props = row_buttons.operator(
|
||||
"pillar.project_open_in_browser", text="", icon="WORLD"
|
||||
)
|
||||
props.project_id = project
|
||||
else:
|
||||
row_buttons.label('Fetching available projects.')
|
||||
row_buttons.label(text="Fetching available projects.")
|
||||
|
||||
enabled_for = project_extensions(project)
|
||||
if project:
|
||||
if enabled_for:
|
||||
project_box.label('This project is set up for: %s' %
|
||||
', '.join(sorted(enabled_for)))
|
||||
else:
|
||||
project_box.label('This project is not set up for Attract or Flamenco')
|
||||
if not project:
|
||||
return
|
||||
|
||||
def draw_attract_buttons(self, attract_box, bcp: BlenderCloudProjectGroup):
|
||||
header_row = attract_box.row(align=True)
|
||||
header_row.label('Attract:', icon_value=icon('CLOUD'))
|
||||
attract_box.prop(self, 'attract_project_local_path',
|
||||
text='Local Attract project path')
|
||||
if not enabled_for:
|
||||
project_box.label(text="This project is not set up for Attract or Flamenco")
|
||||
return
|
||||
|
||||
def draw_flamenco_buttons(self, flamenco_box, bcp: flamenco.FlamencoManagerGroup, context):
|
||||
project_box.label(
|
||||
text="This project is set up for: %s" % ", ".join(sorted(enabled_for))
|
||||
)
|
||||
|
||||
# This is only needed when the project is set up for either Attract or Flamenco.
|
||||
project_box.prop(self, "cloud_project_local_path", text="Local Project Path")
|
||||
|
||||
def draw_flamenco_buttons(
|
||||
self, flamenco_box, bcp: flamenco.FlamencoManagerGroup, context
|
||||
):
|
||||
header_row = flamenco_box.row(align=True)
|
||||
header_row.label('Flamenco:', icon_value=icon('CLOUD'))
|
||||
header_row.label(text="Flamenco:", icon_value=icon("CLOUD"))
|
||||
|
||||
manager_box = flamenco_box.row(align=True)
|
||||
manager_split = flamenco_box.split(factor=0.32, align=True)
|
||||
manager_split.label(text="Manager:")
|
||||
manager_box = manager_split.row(align=True)
|
||||
|
||||
if bcp.status in {'NONE', 'IDLE'}:
|
||||
if not bcp.available_managers or not bcp.manager:
|
||||
manager_box.operator('flamenco.managers',
|
||||
text='Find Flamenco Managers',
|
||||
icon='FILE_REFRESH')
|
||||
if bcp.status in {"NONE", "IDLE"}:
|
||||
if not bcp.available_managers:
|
||||
manager_box.operator(
|
||||
"flamenco.managers",
|
||||
text="Find Flamenco Managers",
|
||||
icon="FILE_REFRESH",
|
||||
)
|
||||
else:
|
||||
manager_box.prop(bcp, 'manager', text='Manager')
|
||||
manager_box.operator('flamenco.managers',
|
||||
text='',
|
||||
icon='FILE_REFRESH')
|
||||
manager_box.prop(bcp, "manager", text="")
|
||||
manager_box.operator("flamenco.managers", text="", icon="FILE_REFRESH")
|
||||
else:
|
||||
manager_box.label('Fetching available managers.')
|
||||
manager_box.label(text="Fetching available managers.")
|
||||
|
||||
path_box = flamenco_box.row(align=True)
|
||||
path_box.prop(self, 'flamenco_job_file_path')
|
||||
props = path_box.operator('flamenco.explore_file_path', text='', icon='DISK_DRIVE')
|
||||
path_split = flamenco_box.split(factor=0.32, align=True)
|
||||
path_split.label(text="Job File Path:")
|
||||
path_box = path_split.row(align=True)
|
||||
path_box.prop(self, "flamenco_job_file_path", text="")
|
||||
props = path_box.operator(
|
||||
"flamenco.explore_file_path", text="", icon="DISK_DRIVE"
|
||||
)
|
||||
props.path = self.flamenco_job_file_path
|
||||
|
||||
job_output_box = flamenco_box.column(align=True)
|
||||
path_box = job_output_box.row(align=True)
|
||||
path_box.prop(self, 'flamenco_job_output_path')
|
||||
props = path_box.operator('flamenco.explore_file_path', text='', icon='DISK_DRIVE')
|
||||
path_split = job_output_box.split(factor=0.32, align=True)
|
||||
path_split.label(text="Job Output Path:")
|
||||
path_box = path_split.row(align=True)
|
||||
path_box.prop(self, "flamenco_job_output_path", text="")
|
||||
props = path_box.operator(
|
||||
"flamenco.explore_file_path", text="", icon="DISK_DRIVE"
|
||||
)
|
||||
props.path = self.flamenco_job_output_path
|
||||
job_output_box.prop(self, "flamenco_exclude_filter")
|
||||
|
||||
job_output_box.prop(self, 'flamenco_job_output_strip_components',
|
||||
text='Strip Components')
|
||||
prop_split = job_output_box.split(factor=0.32, align=True)
|
||||
prop_split.label(text="Strip Components:")
|
||||
prop_split.prop(self, "flamenco_job_output_strip_components", text="")
|
||||
|
||||
from .flamenco import render_output_path
|
||||
|
||||
path_box = job_output_box.row(align=True)
|
||||
output_path = render_output_path(context)
|
||||
if output_path:
|
||||
path_box.label(str(output_path))
|
||||
props = path_box.operator('flamenco.explore_file_path', text='', icon='DISK_DRIVE')
|
||||
path_box.label(text=str(output_path))
|
||||
props = path_box.operator(
|
||||
"flamenco.explore_file_path", text="", icon="DISK_DRIVE"
|
||||
)
|
||||
props.path = str(output_path.parent)
|
||||
else:
|
||||
path_box.label('Blend file is not in your project path, '
|
||||
'unable to give output path example.')
|
||||
path_box.label(
|
||||
text="Blend file is not in your project path, "
|
||||
"unable to give output path example."
|
||||
)
|
||||
|
||||
flamenco_box.prop(self, 'flamenco_open_browser_after_submit')
|
||||
flamenco_box.prop(self, "flamenco_relative_only")
|
||||
flamenco_box.prop(self, "flamenco_open_browser_after_submit")
|
||||
flamenco_box.prop(self, "flamenco_show_quit_after_submit_button")
|
||||
|
||||
|
||||
class PillarCredentialsUpdate(pillar.PillarOperatorMixin,
|
||||
Operator):
|
||||
class PillarCredentialsUpdate(pillar.PillarOperatorMixin, Operator):
|
||||
"""Updates the Pillar URL and tests the new URL."""
|
||||
bl_idname = 'pillar.credentials_update'
|
||||
bl_label = 'Update credentials'
|
||||
bl_description = 'Resynchronises your Blender ID login with Blender Cloud'
|
||||
|
||||
log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
bl_idname = "pillar.credentials_update"
|
||||
bl_label = "Update credentials"
|
||||
bl_description = "Resynchronises your Blender ID login with Blender Cloud"
|
||||
|
||||
log = logging.getLogger("bpy.ops.%s" % bl_idname)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
@ -501,49 +548,86 @@ class PillarCredentialsUpdate(pillar.PillarOperatorMixin,
|
||||
|
||||
# Only allow activation when the user is actually logged in.
|
||||
if not self.is_logged_in(context):
|
||||
self.report({'ERROR'}, 'No active profile found')
|
||||
return {'CANCELLED'}
|
||||
self.report({"ERROR"}, "No active profile found")
|
||||
return {"CANCELLED"}
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(self.check_credentials(context, set()))
|
||||
except blender_id.BlenderIdCommError as ex:
|
||||
log.exception('Error sending subclient-specific token to Blender ID')
|
||||
self.report({'ERROR'}, 'Failed to sync Blender ID to Blender Cloud')
|
||||
return {'CANCELLED'}
|
||||
log.exception("Error sending subclient-specific token to Blender ID")
|
||||
self.report({"ERROR"}, "Failed to sync Blender ID to Blender Cloud")
|
||||
return {"CANCELLED"}
|
||||
except Exception as ex:
|
||||
log.exception('Error in test call to Pillar')
|
||||
self.report({'ERROR'}, 'Failed test connection to Blender Cloud')
|
||||
return {'CANCELLED'}
|
||||
log.exception("Error in test call to Pillar")
|
||||
self.report({"ERROR"}, "Failed test connection to Blender Cloud")
|
||||
return {"CANCELLED"}
|
||||
|
||||
self.report({'INFO'}, 'Blender Cloud credentials & endpoint URL updated.')
|
||||
return {'FINISHED'}
|
||||
self.report({"INFO"}, "Blender Cloud credentials & endpoint URL updated.")
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class PILLAR_OT_subscribe(Operator):
|
||||
"""Opens a browser to subscribe the user to the Cloud."""
|
||||
bl_idname = 'pillar.subscribe'
|
||||
bl_label = 'Subscribe to the Cloud'
|
||||
|
||||
bl_idname = "pillar.subscribe"
|
||||
bl_label = "Subscribe to the Cloud"
|
||||
bl_description = "Opens a page in a web browser to subscribe to the Blender Cloud"
|
||||
|
||||
def execute(self, context):
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open_new_tab('https://cloud.blender.org/join')
|
||||
self.report({'INFO'}, 'We just started a browser for you.')
|
||||
webbrowser.open_new_tab("https://cloud.blender.org/join")
|
||||
self.report({"INFO"}, "We just started a browser for you.")
|
||||
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class PILLAR_OT_projects(async_loop.AsyncModalOperatorMixin,
|
||||
class PILLAR_OT_project_open_in_browser(Operator):
|
||||
bl_idname = "pillar.project_open_in_browser"
|
||||
bl_label = "Open in Browser"
|
||||
bl_description = "Opens a webbrowser to show the project"
|
||||
|
||||
project_id: StringProperty(name="Project ID")
|
||||
|
||||
def execute(self, context):
|
||||
if not self.project_id:
|
||||
return {"CANCELLED"}
|
||||
|
||||
import webbrowser
|
||||
import urllib.parse
|
||||
|
||||
import pillarsdk
|
||||
from .pillar import sync_call
|
||||
|
||||
project = sync_call(
|
||||
pillarsdk.Project.find, self.project_id, {"projection": {"url": True}}
|
||||
)
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
import pprint
|
||||
|
||||
log.debug("found project: %s", pprint.pformat(project.to_dict()))
|
||||
|
||||
url = urllib.parse.urljoin(PILLAR_WEB_SERVER_URL, "p/" + project.url)
|
||||
webbrowser.open_new_tab(url)
|
||||
self.report({"INFO"}, "Opened a browser at %s" % url)
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class PILLAR_OT_projects(
|
||||
async_loop.AsyncModalOperatorMixin,
|
||||
pillar.AuthenticatedPillarOperatorMixin,
|
||||
Operator):
|
||||
Operator,
|
||||
):
|
||||
"""Fetches the projects available to the user"""
|
||||
bl_idname = 'pillar.projects'
|
||||
bl_label = 'Fetch available projects'
|
||||
|
||||
bl_idname = "pillar.projects"
|
||||
bl_label = "Fetch available projects"
|
||||
|
||||
stop_upon_exception = True
|
||||
_log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
_log = logging.getLogger("bpy.ops.%s" % bl_idname)
|
||||
|
||||
async def async_execute(self, context):
|
||||
if not await self.authenticate(context):
|
||||
@ -552,71 +636,84 @@ class PILLAR_OT_projects(async_loop.AsyncModalOperatorMixin,
|
||||
import pillarsdk
|
||||
from .pillar import pillar_call
|
||||
|
||||
self.log.info('Going to fetch projects for user %s', self.user_id)
|
||||
self.log.info("Going to fetch projects for user %s", self.user_id)
|
||||
|
||||
preferences().project.status = 'FETCHING'
|
||||
preferences().project.status = "FETCHING"
|
||||
|
||||
# Get all projects, except the home project.
|
||||
projects_user = await pillar_call(
|
||||
pillarsdk.Project.all,
|
||||
{'where': {'user': self.user_id,
|
||||
'category': {'$ne': 'home'}},
|
||||
'sort': '-_created',
|
||||
'projection': {'_id': True,
|
||||
'name': True,
|
||||
'extension_props': True},
|
||||
})
|
||||
{
|
||||
"where": {"user": self.user_id, "category": {"$ne": "home"}},
|
||||
"sort": "-name",
|
||||
"projection": {"_id": True, "name": True, "extension_props": True},
|
||||
},
|
||||
)
|
||||
|
||||
projects_shared = await pillar_call(
|
||||
pillarsdk.Project.all,
|
||||
{'where': {'user': {'$ne': self.user_id},
|
||||
'permissions.groups.group': {'$in': self.db_user.groups}},
|
||||
'sort': '-_created',
|
||||
'projection': {'_id': True,
|
||||
'name': True,
|
||||
'extension_props': True},
|
||||
})
|
||||
{
|
||||
"where": {
|
||||
"user": {"$ne": self.user_id},
|
||||
"permissions.groups.group": {"$in": self.db_user.groups},
|
||||
},
|
||||
"sort": "-name",
|
||||
"projection": {"_id": True, "name": True, "extension_props": True},
|
||||
},
|
||||
)
|
||||
|
||||
# We need to convert to regular dicts before storing in ID properties.
|
||||
# Also don't store more properties than we need.
|
||||
def reduce_properties(project_list):
|
||||
for p in project_list:
|
||||
p = p.to_dict()
|
||||
extension_props = p.get('extension_props', {})
|
||||
extension_props = p.get("extension_props", {})
|
||||
enabled_for = list(extension_props.keys())
|
||||
|
||||
self._log.debug('Project %r is enabled for %s', p['name'], enabled_for)
|
||||
self._log.debug("Project %r is enabled for %s", p["name"], enabled_for)
|
||||
yield {
|
||||
'_id': p['_id'],
|
||||
'name': p['name'],
|
||||
'enabled_for': enabled_for,
|
||||
"_id": p["_id"],
|
||||
"name": p["name"],
|
||||
"enabled_for": enabled_for,
|
||||
}
|
||||
|
||||
projects = list(reduce_properties(projects_user['_items'])) + \
|
||||
list(reduce_properties(projects_shared['_items']))
|
||||
projects = list(reduce_properties(projects_user["_items"])) + list(
|
||||
reduce_properties(projects_shared["_items"])
|
||||
)
|
||||
|
||||
preferences().project.available_projects = projects
|
||||
def proj_sort_key(project):
|
||||
return project.get("name")
|
||||
|
||||
preferences().project.available_projects = sorted(projects, key=proj_sort_key)
|
||||
|
||||
self.quit()
|
||||
|
||||
def quit(self):
|
||||
preferences().project.status = 'IDLE'
|
||||
preferences().project.status = "IDLE"
|
||||
super().quit()
|
||||
|
||||
|
||||
class PILLAR_PT_image_custom_properties(rna_prop_ui.PropertyPanel, bpy.types.Panel):
|
||||
"""Shows custom properties in the image editor."""
|
||||
|
||||
bl_space_type = 'IMAGE_EDITOR'
|
||||
bl_region_type = 'UI'
|
||||
bl_label = 'Custom Properties'
|
||||
bl_space_type = "IMAGE_EDITOR"
|
||||
bl_region_type = "UI"
|
||||
bl_label = "Custom Properties"
|
||||
|
||||
_context_path = 'edit_image'
|
||||
_context_path = "edit_image"
|
||||
_property_type = bpy.types.Image
|
||||
|
||||
|
||||
def ctx_preferences():
|
||||
"""Returns bpy.context.preferences in a 2.79-compatible way."""
|
||||
try:
|
||||
return bpy.context.preferences
|
||||
except AttributeError:
|
||||
return bpy.context.user_preferences
|
||||
|
||||
|
||||
def preferences() -> BlenderCloudPreferences:
|
||||
return bpy.context.user_preferences.addons[ADDON_NAME].preferences
|
||||
return ctx_preferences().addons[ADDON_NAME].preferences
|
||||
|
||||
|
||||
def load_custom_icons():
|
||||
@ -627,9 +724,10 @@ def load_custom_icons():
|
||||
return
|
||||
|
||||
import bpy.utils.previews
|
||||
|
||||
icons = bpy.utils.previews.new()
|
||||
my_icons_dir = os.path.join(os.path.dirname(__file__), 'icons')
|
||||
icons.load('CLOUD', os.path.join(my_icons_dir, 'icon-cloud.png'), 'IMAGE')
|
||||
my_icons_dir = os.path.join(os.path.dirname(__file__), "icons")
|
||||
icons.load("CLOUD", os.path.join(my_icons_dir, "icon-cloud.png"), "IMAGE")
|
||||
|
||||
|
||||
def unload_custom_icons():
|
||||
@ -659,13 +757,14 @@ def register():
|
||||
bpy.utils.register_class(SyncStatusProperties)
|
||||
bpy.utils.register_class(PILLAR_OT_subscribe)
|
||||
bpy.utils.register_class(PILLAR_OT_projects)
|
||||
bpy.utils.register_class(PILLAR_OT_project_open_in_browser)
|
||||
bpy.utils.register_class(PILLAR_PT_image_custom_properties)
|
||||
|
||||
addon_prefs = preferences()
|
||||
|
||||
WindowManager.last_blender_cloud_location = StringProperty(
|
||||
name="Last Blender Cloud browser location",
|
||||
default="/")
|
||||
name="Last Blender Cloud browser location", default="/"
|
||||
)
|
||||
|
||||
def default_if_empty(scene, context):
|
||||
"""The scene's local_texture_dir, if empty, reverts to the addon prefs."""
|
||||
@ -674,10 +773,11 @@ def register():
|
||||
scene.local_texture_dir = addon_prefs.local_texture_dir
|
||||
|
||||
Scene.local_texture_dir = StringProperty(
|
||||
name='Blender Cloud texture storage directory for current scene',
|
||||
subtype='DIR_PATH',
|
||||
name="Blender Cloud texture storage directory for current scene",
|
||||
subtype="DIR_PATH",
|
||||
default=addon_prefs.local_texture_dir,
|
||||
update=default_if_empty)
|
||||
update=default_if_empty,
|
||||
)
|
||||
|
||||
WindowManager.blender_sync_status = PointerProperty(type=SyncStatusProperties)
|
||||
|
||||
@ -693,6 +793,7 @@ def unregister():
|
||||
bpy.utils.unregister_class(SyncStatusProperties)
|
||||
bpy.utils.unregister_class(PILLAR_OT_subscribe)
|
||||
bpy.utils.unregister_class(PILLAR_OT_projects)
|
||||
bpy.utils.unregister_class(PILLAR_OT_project_open_in_browser)
|
||||
bpy.utils.unregister_class(PILLAR_PT_image_custom_properties)
|
||||
|
||||
del WindowManager.last_blender_cloud_location
|
||||
|
@ -52,13 +52,13 @@ def open_blend(filename, access="rb"):
|
||||
bfile.is_compressed = False
|
||||
bfile.filepath_orig = filename
|
||||
return bfile
|
||||
elif magic[:2] == b'\x1f\x8b':
|
||||
elif magic[:2] == b"\x1f\x8b":
|
||||
log.debug("gzip blendfile detected")
|
||||
handle.close()
|
||||
log.debug("decompressing started")
|
||||
fs = gzip.open(filename, "rb")
|
||||
data = fs.read(FILE_BUFFER_SIZE)
|
||||
magic = data[:len(magic_test)]
|
||||
magic = data[: len(magic_test)]
|
||||
if magic == magic_test:
|
||||
handle = tempfile.TemporaryFile()
|
||||
while data:
|
||||
@ -90,6 +90,7 @@ class BlendFile:
|
||||
"""
|
||||
Blend file.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
# file (result of open())
|
||||
"handle",
|
||||
@ -125,9 +126,10 @@ class BlendFile:
|
||||
self.code_index = {}
|
||||
|
||||
block = BlendFileBlock(handle, self)
|
||||
while block.code != b'ENDB':
|
||||
if block.code == b'DNA1':
|
||||
(self.structs,
|
||||
while block.code != b"ENDB":
|
||||
if block.code == b"DNA1":
|
||||
(
|
||||
self.structs,
|
||||
self.sdna_index_from_id,
|
||||
) = BlendFile.decode_structs(self.header, block, handle)
|
||||
else:
|
||||
@ -141,7 +143,9 @@ class BlendFile:
|
||||
self.blocks.append(block)
|
||||
|
||||
# cache (could lazy init, incase we never use?)
|
||||
self.block_from_offset = {block.addr_old: block for block in self.blocks if block.code != b'ENDB'}
|
||||
self.block_from_offset = {
|
||||
block.addr_old: block for block in self.blocks if block.code != b"ENDB"
|
||||
}
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
@ -150,7 +154,7 @@ class BlendFile:
|
||||
self.close()
|
||||
|
||||
def find_blocks_from_code(self, code):
|
||||
assert(type(code) == bytes)
|
||||
assert type(code) == bytes
|
||||
if code not in self.code_index:
|
||||
return []
|
||||
return self.code_index[code]
|
||||
@ -158,7 +162,7 @@ class BlendFile:
|
||||
def find_block_from_offset(self, offset):
|
||||
# same as looking looping over all blocks,
|
||||
# then checking ``block.addr_old == offset``
|
||||
assert(type(offset) is int)
|
||||
assert type(offset) is int
|
||||
return self.block_from_offset.get(offset)
|
||||
|
||||
def close(self):
|
||||
@ -185,12 +189,15 @@ class BlendFile:
|
||||
|
||||
def ensure_subtype_smaller(self, sdna_index_curr, sdna_index_next):
|
||||
# never refine to a smaller type
|
||||
if (self.structs[sdna_index_curr].size >
|
||||
self.structs[sdna_index_next].size):
|
||||
if self.structs[sdna_index_curr].size > self.structs[sdna_index_next].size:
|
||||
|
||||
raise RuntimeError("cant refine to smaller type (%s -> %s)" %
|
||||
(self.structs[sdna_index_curr].dna_type_id.decode('ascii'),
|
||||
self.structs[sdna_index_next].dna_type_id.decode('ascii')))
|
||||
raise RuntimeError(
|
||||
"cant refine to smaller type (%s -> %s)"
|
||||
% (
|
||||
self.structs[sdna_index_curr].dna_type_id.decode("ascii"),
|
||||
self.structs[sdna_index_next].dna_type_id.decode("ascii"),
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def decode_structs(header, block, handle):
|
||||
@ -199,7 +206,7 @@ class BlendFile:
|
||||
"""
|
||||
log.debug("building DNA catalog")
|
||||
shortstruct = DNA_IO.USHORT[header.endian_index]
|
||||
shortstruct2 = struct.Struct(header.endian_str + b'HH')
|
||||
shortstruct2 = struct.Struct(header.endian_str + b"HH")
|
||||
intstruct = DNA_IO.UINT[header.endian_index]
|
||||
|
||||
data = handle.read(block.size)
|
||||
@ -281,6 +288,7 @@ class BlendFileBlock:
|
||||
"""
|
||||
Instance of a struct.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
# BlendFile
|
||||
"file",
|
||||
@ -294,18 +302,22 @@ class BlendFileBlock:
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return ("<%s.%s (%s), size=%d at %s>" %
|
||||
return (
|
||||
"<%s.%s (%s), size=%d at %s>"
|
||||
%
|
||||
# fields=[%s]
|
||||
(self.__class__.__name__,
|
||||
self.dna_type.dna_type_id.decode('ascii'),
|
||||
(
|
||||
self.__class__.__name__,
|
||||
self.dna_type.dna_type_id.decode("ascii"),
|
||||
self.code.decode(),
|
||||
self.size,
|
||||
# b", ".join(f.dna_name.name_only for f in self.dna_type.fields).decode('ascii'),
|
||||
hex(self.addr_old),
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
def __init__(self, handle, bfile):
|
||||
OLDBLOCK = struct.Struct(b'4sI')
|
||||
OLDBLOCK = struct.Struct(b"4sI")
|
||||
|
||||
self.file = bfile
|
||||
self.user_data = None
|
||||
@ -318,8 +330,8 @@ class BlendFileBlock:
|
||||
if len(data) > 15:
|
||||
|
||||
blockheader = bfile.block_header_struct.unpack(data)
|
||||
self.code = blockheader[0].partition(b'\0')[0]
|
||||
if self.code != b'ENDB':
|
||||
self.code = blockheader[0].partition(b"\0")[0]
|
||||
if self.code != b"ENDB":
|
||||
self.size = blockheader[1]
|
||||
self.addr_old = blockheader[2]
|
||||
self.sdna_index = blockheader[3]
|
||||
@ -333,7 +345,7 @@ class BlendFileBlock:
|
||||
self.file_offset = 0
|
||||
else:
|
||||
blockheader = OLDBLOCK.unpack(data)
|
||||
self.code = blockheader[0].partition(b'\0')[0]
|
||||
self.code = blockheader[0].partition(b"\0")[0]
|
||||
self.code = DNA_IO.read_data0(blockheader[0])
|
||||
self.size = 0
|
||||
self.addr_old = 0
|
||||
@ -346,16 +358,18 @@ class BlendFileBlock:
|
||||
return self.file.structs[self.sdna_index]
|
||||
|
||||
def refine_type_from_index(self, sdna_index_next):
|
||||
assert(type(sdna_index_next) is int)
|
||||
assert type(sdna_index_next) is int
|
||||
sdna_index_curr = self.sdna_index
|
||||
self.file.ensure_subtype_smaller(sdna_index_curr, sdna_index_next)
|
||||
self.sdna_index = sdna_index_next
|
||||
|
||||
def refine_type(self, dna_type_id):
|
||||
assert(type(dna_type_id) is bytes)
|
||||
assert type(dna_type_id) is bytes
|
||||
self.refine_type_from_index(self.file.sdna_index_from_id[dna_type_id])
|
||||
|
||||
def get_file_offset(self, path,
|
||||
def get_file_offset(
|
||||
self,
|
||||
path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
base_index=0,
|
||||
@ -363,11 +377,11 @@ class BlendFileBlock:
|
||||
"""
|
||||
Return (offset, length)
|
||||
"""
|
||||
assert(type(path) is bytes)
|
||||
assert type(path) is bytes
|
||||
|
||||
ofs = self.file_offset
|
||||
if base_index != 0:
|
||||
assert(base_index < self.count)
|
||||
assert base_index < self.count
|
||||
ofs += (self.size // self.count) * base_index
|
||||
self.file.handle.seek(ofs, os.SEEK_SET)
|
||||
|
||||
@ -377,21 +391,23 @@ class BlendFileBlock:
|
||||
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
field = dna_struct.field_from_path(
|
||||
self.file.header, self.file.handle, path)
|
||||
field = dna_struct.field_from_path(self.file.header, self.file.handle, path)
|
||||
|
||||
return (self.file.handle.tell(), field.dna_name.array_size)
|
||||
|
||||
def get(self, path,
|
||||
def get(
|
||||
self,
|
||||
path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
use_nil=True, use_str=True,
|
||||
use_nil=True,
|
||||
use_str=True,
|
||||
base_index=0,
|
||||
):
|
||||
|
||||
ofs = self.file_offset
|
||||
if base_index != 0:
|
||||
assert(base_index < self.count)
|
||||
assert base_index < self.count
|
||||
ofs += (self.size // self.count) * base_index
|
||||
self.file.handle.seek(ofs, os.SEEK_SET)
|
||||
|
||||
@ -402,36 +418,55 @@ class BlendFileBlock:
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
return dna_struct.field_get(
|
||||
self.file.header, self.file.handle, path,
|
||||
self.file.header,
|
||||
self.file.handle,
|
||||
path,
|
||||
default=default,
|
||||
use_nil=use_nil, use_str=use_str,
|
||||
use_nil=use_nil,
|
||||
use_str=use_str,
|
||||
)
|
||||
|
||||
def get_recursive_iter(self, path, path_root=b"",
|
||||
def get_recursive_iter(
|
||||
self,
|
||||
path,
|
||||
path_root=b"",
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
use_nil=True, use_str=True,
|
||||
use_nil=True,
|
||||
use_str=True,
|
||||
base_index=0,
|
||||
):
|
||||
if path_root:
|
||||
path_full = (
|
||||
(path_root if type(path_root) is tuple else (path_root, )) +
|
||||
(path if type(path) is tuple else (path, )))
|
||||
path_full = (path_root if type(path_root) is tuple else (path_root,)) + (
|
||||
path if type(path) is tuple else (path,)
|
||||
)
|
||||
else:
|
||||
path_full = path
|
||||
|
||||
try:
|
||||
yield (path_full, self.get(path_full, default, sdna_index_refine, use_nil, use_str, base_index))
|
||||
yield (
|
||||
path_full,
|
||||
self.get(
|
||||
path_full, default, sdna_index_refine, use_nil, use_str, base_index
|
||||
),
|
||||
)
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
struct_index = self.file.sdna_index_from_id.get(dna_type.dna_type_id, None)
|
||||
if struct_index is None:
|
||||
yield (path_full, "<%s>" % dna_type.dna_type_id.decode('ascii'))
|
||||
yield (path_full, "<%s>" % dna_type.dna_type_id.decode("ascii"))
|
||||
else:
|
||||
struct = self.file.structs[struct_index]
|
||||
for f in struct.fields:
|
||||
yield from self.get_recursive_iter(
|
||||
f.dna_name.name_only, path_full, default, None, use_nil, use_str, 0)
|
||||
f.dna_name.name_only,
|
||||
path_full,
|
||||
default,
|
||||
None,
|
||||
use_nil,
|
||||
use_str,
|
||||
0,
|
||||
)
|
||||
|
||||
def items_recursive_iter(self):
|
||||
for k in self.keys():
|
||||
@ -445,9 +480,13 @@ class BlendFileBlock:
|
||||
# TODO This implementation is most likely far from optimal... and CRC32 is not renown as the best hashing
|
||||
# algo either. But for now does the job!
|
||||
import zlib
|
||||
|
||||
def _is_pointer(self, k):
|
||||
return self.file.structs[self.sdna_index].field_from_path(
|
||||
self.file.header, self.file.handle, k).dna_name.is_pointer
|
||||
return (
|
||||
self.file.structs[self.sdna_index]
|
||||
.field_from_path(self.file.header, self.file.handle, k)
|
||||
.dna_name.is_pointer
|
||||
)
|
||||
|
||||
hsh = 1
|
||||
for k, v in self.items_recursive_iter():
|
||||
@ -455,7 +494,10 @@ class BlendFileBlock:
|
||||
hsh = zlib.adler32(str(v).encode(), hsh)
|
||||
return hsh
|
||||
|
||||
def set(self, path, value,
|
||||
def set(
|
||||
self,
|
||||
path,
|
||||
value,
|
||||
sdna_index_refine=None,
|
||||
):
|
||||
|
||||
@ -467,29 +509,34 @@ class BlendFileBlock:
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
self.file.handle.seek(self.file_offset, os.SEEK_SET)
|
||||
self.file.is_modified = True
|
||||
return dna_struct.field_set(
|
||||
self.file.header, self.file.handle, path, value)
|
||||
return dna_struct.field_set(self.file.header, self.file.handle, path, value)
|
||||
|
||||
# ---------------
|
||||
# Utility get/set
|
||||
#
|
||||
# avoid inline pointer casting
|
||||
def get_pointer(
|
||||
self, path,
|
||||
self,
|
||||
path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
base_index=0,
|
||||
):
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
result = self.get(path, default, sdna_index_refine=sdna_index_refine, base_index=base_index)
|
||||
result = self.get(
|
||||
path, default, sdna_index_refine=sdna_index_refine, base_index=base_index
|
||||
)
|
||||
|
||||
# default
|
||||
if type(result) is not int:
|
||||
return result
|
||||
|
||||
assert(self.file.structs[sdna_index_refine].field_from_path(
|
||||
self.file.header, self.file.handle, path).dna_name.is_pointer)
|
||||
assert (
|
||||
self.file.structs[sdna_index_refine]
|
||||
.field_from_path(self.file.header, self.file.handle, path)
|
||||
.dna_name.is_pointer
|
||||
)
|
||||
if result != 0:
|
||||
# possible (but unlikely)
|
||||
# that this fails and returns None
|
||||
@ -517,7 +564,7 @@ class BlendFileBlock:
|
||||
yield self[k]
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
yield "<%s>" % dna_type.dna_type_id.decode('ascii')
|
||||
yield "<%s>" % dna_type.dna_type_id.decode("ascii")
|
||||
|
||||
def items(self):
|
||||
for k in self.keys():
|
||||
@ -525,7 +572,7 @@ class BlendFileBlock:
|
||||
yield (k, self[k])
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
yield (k, "<%s>" % dna_type.dna_type_id.decode('ascii'))
|
||||
yield (k, "<%s>" % dna_type.dna_type_id.decode("ascii"))
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
@ -542,6 +589,7 @@ class BlendFileHeader:
|
||||
BlendFileHeader allocates the first 12 bytes of a blend file
|
||||
it contains information about the hardware architecture
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
# str
|
||||
"magic",
|
||||
@ -558,46 +606,51 @@ class BlendFileHeader:
|
||||
)
|
||||
|
||||
def __init__(self, handle):
|
||||
FILEHEADER = struct.Struct(b'7s1s1s3s')
|
||||
FILEHEADER = struct.Struct(b"7s1s1s3s")
|
||||
|
||||
log.debug("reading blend-file-header")
|
||||
values = FILEHEADER.unpack(handle.read(FILEHEADER.size))
|
||||
self.magic = values[0]
|
||||
pointer_size_id = values[1]
|
||||
if pointer_size_id == b'-':
|
||||
if pointer_size_id == b"-":
|
||||
self.pointer_size = 8
|
||||
elif pointer_size_id == b'_':
|
||||
elif pointer_size_id == b"_":
|
||||
self.pointer_size = 4
|
||||
else:
|
||||
assert(0)
|
||||
assert 0
|
||||
endian_id = values[2]
|
||||
if endian_id == b'v':
|
||||
if endian_id == b"v":
|
||||
self.is_little_endian = True
|
||||
self.endian_str = b'<'
|
||||
self.endian_str = b"<"
|
||||
self.endian_index = 0
|
||||
elif endian_id == b'V':
|
||||
elif endian_id == b"V":
|
||||
self.is_little_endian = False
|
||||
self.endian_index = 1
|
||||
self.endian_str = b'>'
|
||||
self.endian_str = b">"
|
||||
else:
|
||||
assert(0)
|
||||
assert 0
|
||||
|
||||
version_id = values[3]
|
||||
self.version = int(version_id)
|
||||
|
||||
def create_block_header_struct(self):
|
||||
return struct.Struct(b''.join((
|
||||
return struct.Struct(
|
||||
b"".join(
|
||||
(
|
||||
self.endian_str,
|
||||
b'4sI',
|
||||
b'I' if self.pointer_size == 4 else b'Q',
|
||||
b'II',
|
||||
)))
|
||||
b"4sI",
|
||||
b"I" if self.pointer_size == 4 else b"Q",
|
||||
b"II",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class DNAName:
|
||||
"""
|
||||
DNAName is a C-type name stored in the DNA
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
"name_full",
|
||||
"name_only",
|
||||
@ -614,40 +667,40 @@ class DNAName:
|
||||
self.array_size = self.calc_array_size()
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (type(self).__qualname__, self.name_full)
|
||||
return "%s(%r)" % (type(self).__qualname__, self.name_full)
|
||||
|
||||
def as_reference(self, parent):
|
||||
if parent is None:
|
||||
result = b''
|
||||
result = b""
|
||||
else:
|
||||
result = parent + b'.'
|
||||
result = parent + b"."
|
||||
|
||||
result = result + self.name_only
|
||||
return result
|
||||
|
||||
def calc_name_only(self):
|
||||
result = self.name_full.strip(b'*()')
|
||||
index = result.find(b'[')
|
||||
result = self.name_full.strip(b"*()")
|
||||
index = result.find(b"[")
|
||||
if index != -1:
|
||||
result = result[:index]
|
||||
return result
|
||||
|
||||
def calc_is_pointer(self):
|
||||
return (b'*' in self.name_full)
|
||||
return b"*" in self.name_full
|
||||
|
||||
def calc_is_method_pointer(self):
|
||||
return (b'(*' in self.name_full)
|
||||
return b"(*" in self.name_full
|
||||
|
||||
def calc_array_size(self):
|
||||
result = 1
|
||||
temp = self.name_full
|
||||
index = temp.find(b'[')
|
||||
index = temp.find(b"[")
|
||||
|
||||
while index != -1:
|
||||
index_2 = temp.find(b']')
|
||||
result *= int(temp[index + 1:index_2])
|
||||
temp = temp[index_2 + 1:]
|
||||
index = temp.find(b'[')
|
||||
index_2 = temp.find(b"]")
|
||||
result *= int(temp[index + 1 : index_2])
|
||||
temp = temp[index_2 + 1 :]
|
||||
index = temp.find(b"[")
|
||||
|
||||
return result
|
||||
|
||||
@ -657,6 +710,7 @@ class DNAField:
|
||||
DNAField is a coupled DNAStruct and DNAName
|
||||
and cache offset for reuse
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
# DNAName
|
||||
"dna_name",
|
||||
@ -680,6 +734,7 @@ class DNAStruct:
|
||||
"""
|
||||
DNAStruct is a C-type structure stored in the DNA
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
"dna_type_id",
|
||||
"size",
|
||||
@ -695,7 +750,7 @@ class DNAStruct:
|
||||
self.user_data = None
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (type(self).__qualname__, self.dna_type_id)
|
||||
return "%s(%r)" % (type(self).__qualname__, self.dna_type_id)
|
||||
|
||||
def field_from_path(self, header, handle, path):
|
||||
"""
|
||||
@ -709,7 +764,7 @@ class DNAStruct:
|
||||
if len(path) >= 2 and type(path[1]) is not bytes:
|
||||
name_tail = path[2:]
|
||||
index = path[1]
|
||||
assert(type(index) is int)
|
||||
assert type(index) is int
|
||||
else:
|
||||
name_tail = path[1:]
|
||||
index = 0
|
||||
@ -718,7 +773,7 @@ class DNAStruct:
|
||||
name_tail = None
|
||||
index = 0
|
||||
|
||||
assert(type(name) is bytes)
|
||||
assert type(name) is bytes
|
||||
|
||||
field = self.field_from_name.get(name)
|
||||
|
||||
@ -729,47 +784,69 @@ class DNAStruct:
|
||||
index_offset = header.pointer_size * index
|
||||
else:
|
||||
index_offset = field.dna_type.size * index
|
||||
assert(index_offset < field.dna_size)
|
||||
assert index_offset < field.dna_size
|
||||
handle.seek(index_offset, os.SEEK_CUR)
|
||||
if not name_tail: # None or ()
|
||||
return field
|
||||
else:
|
||||
return field.dna_type.field_from_path(header, handle, name_tail)
|
||||
|
||||
def field_get(self, header, handle, path,
|
||||
def field_get(
|
||||
self,
|
||||
header,
|
||||
handle,
|
||||
path,
|
||||
default=...,
|
||||
use_nil=True, use_str=True,
|
||||
use_nil=True,
|
||||
use_str=True,
|
||||
):
|
||||
field = self.field_from_path(header, handle, path)
|
||||
if field is None:
|
||||
if default is not ...:
|
||||
return default
|
||||
else:
|
||||
raise KeyError("%r not found in %r (%r)" %
|
||||
(path, [f.dna_name.name_only for f in self.fields], self.dna_type_id))
|
||||
raise KeyError(
|
||||
"%r not found in %r (%r)"
|
||||
% (
|
||||
path,
|
||||
[f.dna_name.name_only for f in self.fields],
|
||||
self.dna_type_id,
|
||||
)
|
||||
)
|
||||
|
||||
dna_type = field.dna_type
|
||||
dna_name = field.dna_name
|
||||
|
||||
if dna_name.is_pointer:
|
||||
return DNA_IO.read_pointer(handle, header)
|
||||
elif dna_type.dna_type_id == b'int':
|
||||
elif dna_type.dna_type_id == b"int":
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_int(handle, header) for i in range(dna_name.array_size)]
|
||||
return [
|
||||
DNA_IO.read_int(handle, header) for i in range(dna_name.array_size)
|
||||
]
|
||||
return DNA_IO.read_int(handle, header)
|
||||
elif dna_type.dna_type_id == b'short':
|
||||
elif dna_type.dna_type_id == b"short":
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_short(handle, header) for i in range(dna_name.array_size)]
|
||||
return [
|
||||
DNA_IO.read_short(handle, header)
|
||||
for i in range(dna_name.array_size)
|
||||
]
|
||||
return DNA_IO.read_short(handle, header)
|
||||
elif dna_type.dna_type_id == b'uint64_t':
|
||||
elif dna_type.dna_type_id == b"uint64_t":
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_ulong(handle, header) for i in range(dna_name.array_size)]
|
||||
return [
|
||||
DNA_IO.read_ulong(handle, header)
|
||||
for i in range(dna_name.array_size)
|
||||
]
|
||||
return DNA_IO.read_ulong(handle, header)
|
||||
elif dna_type.dna_type_id == b'float':
|
||||
elif dna_type.dna_type_id == b"float":
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_float(handle, header) for i in range(dna_name.array_size)]
|
||||
return [
|
||||
DNA_IO.read_float(handle, header)
|
||||
for i in range(dna_name.array_size)
|
||||
]
|
||||
return DNA_IO.read_float(handle, header)
|
||||
elif dna_type.dna_type_id == b'char':
|
||||
elif dna_type.dna_type_id == b"char":
|
||||
if use_str:
|
||||
if use_nil:
|
||||
return DNA_IO.read_string0(handle, dna_name.array_size)
|
||||
@ -781,30 +858,39 @@ class DNAStruct:
|
||||
else:
|
||||
return DNA_IO.read_bytes(handle, dna_name.array_size)
|
||||
else:
|
||||
raise NotImplementedError("%r exists but isn't pointer, can't resolve field %r" %
|
||||
(path, dna_name.name_only), dna_name, dna_type)
|
||||
raise NotImplementedError(
|
||||
"%r exists but isn't pointer, can't resolve field %r"
|
||||
% (path, dna_name.name_only),
|
||||
dna_name,
|
||||
dna_type,
|
||||
)
|
||||
|
||||
def field_set(self, header, handle, path, value):
|
||||
assert(type(path) == bytes)
|
||||
assert type(path) == bytes
|
||||
|
||||
field = self.field_from_path(header, handle, path)
|
||||
if field is None:
|
||||
raise KeyError("%r not found in %r" %
|
||||
(path, [f.dna_name.name_only for f in self.fields]))
|
||||
raise KeyError(
|
||||
"%r not found in %r"
|
||||
% (path, [f.dna_name.name_only for f in self.fields])
|
||||
)
|
||||
|
||||
dna_type = field.dna_type
|
||||
dna_name = field.dna_name
|
||||
|
||||
if dna_type.dna_type_id == b'char':
|
||||
if dna_type.dna_type_id == b"char":
|
||||
if type(value) is str:
|
||||
return DNA_IO.write_string(handle, value, dna_name.array_size)
|
||||
else:
|
||||
return DNA_IO.write_bytes(handle, value, dna_name.array_size)
|
||||
elif dna_type.dna_type_id == b'int':
|
||||
elif dna_type.dna_type_id == b"int":
|
||||
DNA_IO.write_int(handle, header, value)
|
||||
else:
|
||||
raise NotImplementedError("Setting %r is not yet supported for %r" %
|
||||
(dna_type, dna_name), dna_name, dna_type)
|
||||
raise NotImplementedError(
|
||||
"Setting %r is not yet supported for %r" % (dna_type, dna_name),
|
||||
dna_name,
|
||||
dna_type,
|
||||
)
|
||||
|
||||
|
||||
class DNA_IO:
|
||||
@ -821,20 +907,20 @@ class DNA_IO:
|
||||
|
||||
@staticmethod
|
||||
def write_string(handle, astring, fieldlen):
|
||||
assert(isinstance(astring, str))
|
||||
assert isinstance(astring, str)
|
||||
if len(astring) >= fieldlen:
|
||||
stringw = astring[0:fieldlen]
|
||||
else:
|
||||
stringw = astring + '\0'
|
||||
handle.write(stringw.encode('utf-8'))
|
||||
stringw = astring + "\0"
|
||||
handle.write(stringw.encode("utf-8"))
|
||||
|
||||
@staticmethod
|
||||
def write_bytes(handle, astring, fieldlen):
|
||||
assert(isinstance(astring, (bytes, bytearray)))
|
||||
assert isinstance(astring, (bytes, bytearray))
|
||||
if len(astring) >= fieldlen:
|
||||
stringw = astring[0:fieldlen]
|
||||
else:
|
||||
stringw = astring + b'\0'
|
||||
stringw = astring + b"\0"
|
||||
|
||||
handle.write(stringw)
|
||||
|
||||
@ -850,44 +936,44 @@ class DNA_IO:
|
||||
|
||||
@staticmethod
|
||||
def read_string(handle, length):
|
||||
return DNA_IO.read_bytes(handle, length).decode('utf-8')
|
||||
return DNA_IO.read_bytes(handle, length).decode("utf-8")
|
||||
|
||||
@staticmethod
|
||||
def read_string0(handle, length):
|
||||
return DNA_IO.read_bytes0(handle, length).decode('utf-8')
|
||||
return DNA_IO.read_bytes0(handle, length).decode("utf-8")
|
||||
|
||||
@staticmethod
|
||||
def read_data0_offset(data, offset):
|
||||
add = data.find(b'\0', offset) - offset
|
||||
return data[offset:offset + add]
|
||||
add = data.find(b"\0", offset) - offset
|
||||
return data[offset : offset + add]
|
||||
|
||||
@staticmethod
|
||||
def read_data0(data):
|
||||
add = data.find(b'\0')
|
||||
add = data.find(b"\0")
|
||||
return data[:add]
|
||||
|
||||
USHORT = struct.Struct(b'<H'), struct.Struct(b'>H')
|
||||
USHORT = struct.Struct(b"<H"), struct.Struct(b">H")
|
||||
|
||||
@staticmethod
|
||||
def read_ushort(handle, fileheader):
|
||||
st = DNA_IO.USHORT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
SSHORT = struct.Struct(b'<h'), struct.Struct(b'>h')
|
||||
SSHORT = struct.Struct(b"<h"), struct.Struct(b">h")
|
||||
|
||||
@staticmethod
|
||||
def read_short(handle, fileheader):
|
||||
st = DNA_IO.SSHORT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
UINT = struct.Struct(b'<I'), struct.Struct(b'>I')
|
||||
UINT = struct.Struct(b"<I"), struct.Struct(b">I")
|
||||
|
||||
@staticmethod
|
||||
def read_uint(handle, fileheader):
|
||||
st = DNA_IO.UINT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
SINT = struct.Struct(b'<i'), struct.Struct(b'>i')
|
||||
SINT = struct.Struct(b"<i"), struct.Struct(b">i")
|
||||
|
||||
@staticmethod
|
||||
def read_int(handle, fileheader):
|
||||
@ -896,19 +982,22 @@ class DNA_IO:
|
||||
|
||||
@staticmethod
|
||||
def write_int(handle, fileheader, value):
|
||||
assert isinstance(value, int), 'value must be int, but is %r: %r' % (type(value), value)
|
||||
assert isinstance(value, int), "value must be int, but is %r: %r" % (
|
||||
type(value),
|
||||
value,
|
||||
)
|
||||
st = DNA_IO.SINT[fileheader.endian_index]
|
||||
to_write = st.pack(value)
|
||||
handle.write(to_write)
|
||||
|
||||
FLOAT = struct.Struct(b'<f'), struct.Struct(b'>f')
|
||||
FLOAT = struct.Struct(b"<f"), struct.Struct(b">f")
|
||||
|
||||
@staticmethod
|
||||
def read_float(handle, fileheader):
|
||||
st = DNA_IO.FLOAT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
ULONG = struct.Struct(b'<Q'), struct.Struct(b'>Q')
|
||||
ULONG = struct.Struct(b"<Q"), struct.Struct(b">Q")
|
||||
|
||||
@staticmethod
|
||||
def read_ulong(handle, fileheader):
|
||||
|
@ -33,7 +33,9 @@ from cachecontrol.caches import FileCache
|
||||
from . import appdirs
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
_session = None # requests.Session object that's set up for caching by requests_session().
|
||||
_session = (
|
||||
None # requests.Session object that's set up for caching by requests_session().
|
||||
)
|
||||
|
||||
|
||||
def cache_directory(*subdirs) -> str:
|
||||
@ -56,12 +58,12 @@ def cache_directory(*subdirs) -> str:
|
||||
if profile:
|
||||
username = profile.username
|
||||
else:
|
||||
username = 'anonymous'
|
||||
username = "anonymous"
|
||||
|
||||
# TODO: use bpy.utils.user_resource('CACHE', ...)
|
||||
# once https://developer.blender.org/T47684 is finished.
|
||||
user_cache_dir = appdirs.user_cache_dir(appname='Blender', appauthor=False)
|
||||
cache_dir = os.path.join(user_cache_dir, 'blender_cloud', username, *subdirs)
|
||||
user_cache_dir = appdirs.user_cache_dir(appname="Blender", appauthor=False)
|
||||
cache_dir = os.path.join(user_cache_dir, "blender_cloud", username, *subdirs)
|
||||
|
||||
os.makedirs(cache_dir, mode=0o700, exist_ok=True)
|
||||
|
||||
@ -76,10 +78,11 @@ def requests_session() -> requests.Session:
|
||||
if _session is not None:
|
||||
return _session
|
||||
|
||||
cache_name = cache_directory('blender_cloud_http')
|
||||
log.info('Storing cache in %s' % cache_name)
|
||||
cache_name = cache_directory("blender_cloud_http")
|
||||
log.info("Storing cache in %s" % cache_name)
|
||||
|
||||
_session = cachecontrol.CacheControl(sess=requests.session(),
|
||||
cache=FileCache(cache_name))
|
||||
_session = cachecontrol.CacheControl(
|
||||
sess=requests.session(), cache=FileCache(cache_name)
|
||||
)
|
||||
|
||||
return _session
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,144 +0,0 @@
|
||||
"""BAM packing interface for Flamenco."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import typing
|
||||
|
||||
# Timeout of the BAM subprocess, in seconds.
|
||||
SUBPROC_READLINE_TIMEOUT = 600
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CommandExecutionError(Exception):
|
||||
"""Raised when there was an error executing a BAM command."""
|
||||
pass
|
||||
|
||||
|
||||
async def bam_copy(base_blendfile: Path, target_blendfile: Path) -> typing.List[Path]:
|
||||
"""Uses BAM to copy the given file and dependencies to the target blendfile.
|
||||
|
||||
Due to the way blendfile_pack.py is programmed/structured, we cannot import it
|
||||
and call a function; it has to be run in a subprocess.
|
||||
|
||||
:raises: asyncio.CanceledError if the task was cancelled.
|
||||
:raises: asyncio.TimeoutError if reading a line from the BAM process timed out.
|
||||
:raises: CommandExecutionError if the subprocess failed or output invalid UTF-8.
|
||||
:returns: a list of missing sources; hopefully empty.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import shlex
|
||||
import subprocess
|
||||
|
||||
import bpy
|
||||
import io_blend_utils
|
||||
|
||||
args = [
|
||||
bpy.app.binary_path_python,
|
||||
'-m', 'bam.pack',
|
||||
'--input', str(base_blendfile),
|
||||
'--output', str(target_blendfile),
|
||||
'--mode', 'FILE',
|
||||
]
|
||||
|
||||
cmd_to_log = ' '.join(shlex.quote(s) for s in args)
|
||||
log.info('Executing %s', cmd_to_log)
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*args,
|
||||
env={'PYTHONPATH': io_blend_utils.pythonpath()},
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
missing_sources = []
|
||||
|
||||
try:
|
||||
while not proc.stdout.at_eof():
|
||||
line = await asyncio.wait_for(proc.stdout.readline(),
|
||||
SUBPROC_READLINE_TIMEOUT)
|
||||
if not line:
|
||||
# EOF received, so let's bail.
|
||||
break
|
||||
|
||||
try:
|
||||
line = line.decode('utf8')
|
||||
except UnicodeDecodeError as ex:
|
||||
raise CommandExecutionError('Command produced non-UTF8 output, '
|
||||
'aborting: %s' % ex)
|
||||
|
||||
line = line.rstrip()
|
||||
if 'source missing:' in line:
|
||||
path = parse_missing_source(line)
|
||||
missing_sources.append(path)
|
||||
log.warning('Source is missing: %s', path)
|
||||
|
||||
log.info(' %s', line)
|
||||
finally:
|
||||
if proc.returncode is None:
|
||||
# Always wait for the process, to avoid zombies.
|
||||
try:
|
||||
proc.kill()
|
||||
except ProcessLookupError:
|
||||
# The process is already stopped, so killing is impossible. That's ok.
|
||||
log.debug("The process was already stopped, aborting is impossible. That's ok.")
|
||||
await proc.wait()
|
||||
log.info('The process stopped with status code %i', proc.returncode)
|
||||
|
||||
if proc.returncode:
|
||||
raise CommandExecutionError('Process stopped with status %i' % proc.returncode)
|
||||
|
||||
return missing_sources
|
||||
|
||||
|
||||
def parse_missing_source(line: str) -> Path:
|
||||
r"""Parses a "missing source" line into a pathlib.Path.
|
||||
|
||||
>>> parse_missing_source(r" source missing: b'D\xc3\xaffficult \xc3\x9cTF-8 filename'")
|
||||
PosixPath('Dïfficult ÜTF-8 filename')
|
||||
>>> parse_missing_source(r" source missing: b'D\xfffficult Win1252 f\xeflen\xe6me'")
|
||||
PosixPath('D<EFBFBD>fficult Win1252 f<>len<65>me')
|
||||
"""
|
||||
|
||||
_, missing_source = line.split(': ', 1)
|
||||
missing_source_as_bytes = parse_byte_literal(missing_source.strip())
|
||||
|
||||
# The file could originate from any platform, so UTF-8 and the current platform's
|
||||
# filesystem encodings are just guesses.
|
||||
try:
|
||||
missing_source = missing_source_as_bytes.decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
import sys
|
||||
try:
|
||||
missing_source = missing_source_as_bytes.decode(sys.getfilesystemencoding())
|
||||
except UnicodeDecodeError:
|
||||
missing_source = missing_source_as_bytes.decode('ascii', errors='replace')
|
||||
|
||||
path = Path(missing_source)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def parse_byte_literal(bytes_literal: str) -> bytes:
|
||||
r"""Parses a repr(bytes) output into a bytes object.
|
||||
|
||||
>>> parse_byte_literal(r"b'D\xc3\xaffficult \xc3\x9cTF-8 filename'")
|
||||
b'D\xc3\xaffficult \xc3\x9cTF-8 filename'
|
||||
>>> parse_byte_literal(r"b'D\xeffficult Win1252 f\xeflen\xe6me'")
|
||||
b'D\xeffficult Win1252 f\xeflen\xe6me'
|
||||
"""
|
||||
|
||||
# Some very basic assertions to make sure we have a proper bytes literal.
|
||||
assert bytes_literal[0] == "b"
|
||||
assert bytes_literal[1] in {'"', "'"}
|
||||
assert bytes_literal[-1] == bytes_literal[1]
|
||||
|
||||
import ast
|
||||
return ast.literal_eval(bytes_literal)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
199
blender_cloud/flamenco/bat_interface.py
Normal file
199
blender_cloud/flamenco/bat_interface.py
Normal file
@ -0,0 +1,199 @@
|
||||
"""BAT🦇 packing interface for Flamenco."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
import threading
|
||||
import typing
|
||||
import urllib.parse
|
||||
|
||||
import bpy
|
||||
from blender_asset_tracer import pack
|
||||
from blender_asset_tracer.pack import progress, transfer, shaman
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_running_packer = None # type: pack.Packer
|
||||
_packer_lock = threading.RLock()
|
||||
|
||||
# For using in other parts of the add-on, so only this file imports BAT.
|
||||
Aborted = pack.Aborted
|
||||
FileTransferError = transfer.FileTransferError
|
||||
parse_shaman_endpoint = shaman.parse_endpoint
|
||||
|
||||
|
||||
class BatProgress(progress.Callback):
|
||||
"""Report progress of BAT Packing to the UI.
|
||||
|
||||
Uses asyncio.run_coroutine_threadsafe() to ensure the UI is only updated
|
||||
from the main thread. This is required since we run the BAT Pack in a
|
||||
background thread.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.loop = asyncio.get_event_loop()
|
||||
|
||||
def _set_attr(self, attr: str, value):
|
||||
async def do_it():
|
||||
setattr(bpy.context.window_manager, attr, value)
|
||||
|
||||
asyncio.run_coroutine_threadsafe(do_it(), loop=self.loop)
|
||||
|
||||
def _txt(self, msg: str):
|
||||
"""Set a text in a thread-safe way."""
|
||||
self._set_attr("flamenco_status_txt", msg)
|
||||
|
||||
def _status(self, status: str):
|
||||
"""Set the flamenco_status property in a thread-safe way."""
|
||||
self._set_attr("flamenco_status", status)
|
||||
|
||||
def _progress(self, progress: int):
|
||||
"""Set the flamenco_progress property in a thread-safe way."""
|
||||
self._set_attr("flamenco_progress", progress)
|
||||
|
||||
def pack_start(self) -> None:
|
||||
self._txt("Starting BAT Pack operation")
|
||||
|
||||
def pack_done(
|
||||
self, output_blendfile: pathlib.Path, missing_files: typing.Set[pathlib.Path]
|
||||
) -> None:
|
||||
if missing_files:
|
||||
self._txt("There were %d missing files" % len(missing_files))
|
||||
else:
|
||||
self._txt("Pack of %s done" % output_blendfile.name)
|
||||
|
||||
def pack_aborted(self, reason: str):
|
||||
self._txt("Aborted: %s" % reason)
|
||||
self._status("ABORTED")
|
||||
|
||||
def trace_blendfile(self, filename: pathlib.Path) -> None:
|
||||
"""Called for every blendfile opened when tracing dependencies."""
|
||||
self._txt("Inspecting %s" % filename.name)
|
||||
|
||||
def trace_asset(self, filename: pathlib.Path) -> None:
|
||||
if filename.stem == ".blend":
|
||||
return
|
||||
self._txt("Found asset %s" % filename.name)
|
||||
|
||||
def rewrite_blendfile(self, orig_filename: pathlib.Path) -> None:
|
||||
self._txt("Rewriting %s" % orig_filename.name)
|
||||
|
||||
def transfer_file(self, src: pathlib.Path, dst: pathlib.Path) -> None:
|
||||
self._txt("Transferring %s" % src.name)
|
||||
|
||||
def transfer_file_skipped(self, src: pathlib.Path, dst: pathlib.Path) -> None:
|
||||
self._txt("Skipped %s" % src.name)
|
||||
|
||||
def transfer_progress(self, total_bytes: int, transferred_bytes: int) -> None:
|
||||
self._progress(round(100 * transferred_bytes / total_bytes))
|
||||
|
||||
def missing_file(self, filename: pathlib.Path) -> None:
|
||||
# TODO(Sybren): report missing files in a nice way
|
||||
pass
|
||||
|
||||
|
||||
class ShamanPacker(shaman.ShamanPacker):
|
||||
"""Packer with support for getting an auth token from Flamenco Server."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bfile: pathlib.Path,
|
||||
project: pathlib.Path,
|
||||
target: str,
|
||||
endpoint: str,
|
||||
checkout_id: str,
|
||||
*,
|
||||
manager_id: str,
|
||||
**kwargs
|
||||
) -> None:
|
||||
self.manager_id = manager_id
|
||||
super().__init__(bfile, project, target, endpoint, checkout_id, **kwargs)
|
||||
|
||||
def _get_auth_token(self) -> str:
|
||||
"""get a token from Flamenco Server"""
|
||||
|
||||
from ..blender import PILLAR_SERVER_URL
|
||||
from ..pillar import blender_id_subclient, uncached_session, SUBCLIENT_ID
|
||||
|
||||
url = urllib.parse.urljoin(
|
||||
PILLAR_SERVER_URL, "flamenco/jwt/generate-token/%s" % self.manager_id
|
||||
)
|
||||
auth_token = blender_id_subclient()["token"]
|
||||
|
||||
resp = uncached_session.get(url, auth=(auth_token, SUBCLIENT_ID))
|
||||
resp.raise_for_status()
|
||||
return resp.text
|
||||
|
||||
|
||||
async def copy(
|
||||
context,
|
||||
base_blendfile: pathlib.Path,
|
||||
project: pathlib.Path,
|
||||
target: str,
|
||||
exclusion_filter: str,
|
||||
*,
|
||||
relative_only: bool,
|
||||
packer_class=pack.Packer,
|
||||
**packer_args
|
||||
) -> typing.Tuple[pathlib.Path, typing.Set[pathlib.Path]]:
|
||||
"""Use BAT🦇 to copy the given file and dependencies to the target location.
|
||||
|
||||
:raises: FileTransferError if a file couldn't be transferred.
|
||||
:returns: the path of the packed blend file, and a set of missing sources.
|
||||
"""
|
||||
global _running_packer
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
wm = bpy.context.window_manager
|
||||
|
||||
packer = packer_class(
|
||||
base_blendfile,
|
||||
project,
|
||||
target,
|
||||
compress=True,
|
||||
relative_only=relative_only,
|
||||
**packer_args
|
||||
)
|
||||
with packer:
|
||||
with _packer_lock:
|
||||
if exclusion_filter:
|
||||
# There was a mistake in an older version of the property tooltip,
|
||||
# showing semicolon-separated instead of space-separated. We now
|
||||
# just handle both.
|
||||
filter_parts = re.split("[ ;]+", exclusion_filter.strip(" ;"))
|
||||
packer.exclude(*filter_parts)
|
||||
|
||||
packer.progress_cb = BatProgress()
|
||||
_running_packer = packer
|
||||
|
||||
log.debug("awaiting strategise")
|
||||
wm.flamenco_status = "INVESTIGATING"
|
||||
await loop.run_in_executor(None, packer.strategise)
|
||||
|
||||
log.debug("awaiting execute")
|
||||
wm.flamenco_status = "TRANSFERRING"
|
||||
await loop.run_in_executor(None, packer.execute)
|
||||
|
||||
log.debug("done")
|
||||
wm.flamenco_status = "DONE"
|
||||
|
||||
with _packer_lock:
|
||||
_running_packer = None
|
||||
|
||||
return packer.output_path, packer.missing_files
|
||||
|
||||
|
||||
def abort() -> None:
|
||||
"""Abort a running copy() call.
|
||||
|
||||
No-op when there is no running copy(). Can be called from any thread.
|
||||
"""
|
||||
|
||||
with _packer_lock:
|
||||
if _running_packer is None:
|
||||
log.debug("No running packer, ignoring call to bat_abort()")
|
||||
return
|
||||
log.info("Aborting running packer")
|
||||
_running_packer.abort()
|
@ -1,13 +1,116 @@
|
||||
import functools
|
||||
import pathlib
|
||||
import typing
|
||||
|
||||
from pillarsdk.resource import List, Find, Create
|
||||
|
||||
|
||||
class Manager(List, Find):
|
||||
"""Manager class wrapping the REST nodes endpoint"""
|
||||
path = 'flamenco/managers'
|
||||
|
||||
path = "flamenco/managers"
|
||||
PurePlatformPath = pathlib.PurePath
|
||||
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def _path_replacements(self) -> list:
|
||||
"""Defer to _path_replacements_vN() to get path replacement vars.
|
||||
|
||||
Returns a list of tuples (variable name, variable value).
|
||||
"""
|
||||
settings_version = self.settings_version or 1
|
||||
try:
|
||||
settings_func = getattr(self, "_path_replacements_v%d" % settings_version)
|
||||
except AttributeError:
|
||||
raise RuntimeError(
|
||||
"This manager has unsupported settings version %d; "
|
||||
"upgrade Blender Cloud add-on"
|
||||
)
|
||||
|
||||
def longest_value_first(item):
|
||||
var_name, var_value = item
|
||||
return -len(var_value), var_value, var_name
|
||||
|
||||
replacements = settings_func()
|
||||
replacements.sort(key=longest_value_first)
|
||||
return replacements
|
||||
|
||||
def _path_replacements_v1(self) -> typing.List[typing.Tuple[str, str]]:
|
||||
import platform
|
||||
|
||||
if self.path_replacement is None:
|
||||
return []
|
||||
|
||||
items = self.path_replacement.to_dict().items()
|
||||
|
||||
this_platform = platform.system().lower()
|
||||
return [
|
||||
(varname, platform_replacements[this_platform])
|
||||
for varname, platform_replacements in items
|
||||
if this_platform in platform_replacements
|
||||
]
|
||||
|
||||
def _path_replacements_v2(self) -> typing.List[typing.Tuple[str, str]]:
|
||||
import platform
|
||||
|
||||
if not self.variables:
|
||||
return []
|
||||
|
||||
this_platform = platform.system().lower()
|
||||
audiences = {"users", "all"}
|
||||
|
||||
replacements = []
|
||||
for var_name, variable in self.variables.to_dict().items():
|
||||
# Path replacement requires bidirectional variables.
|
||||
if variable.get("direction") != "twoway":
|
||||
continue
|
||||
|
||||
for var_value in variable.get("values", []):
|
||||
if var_value.get("audience") not in audiences:
|
||||
continue
|
||||
if var_value.get("platform", "").lower() != this_platform:
|
||||
continue
|
||||
|
||||
replacements.append((var_name, var_value.get("value")))
|
||||
return replacements
|
||||
|
||||
def replace_path(self, some_path: pathlib.PurePath) -> str:
|
||||
"""Performs path variable replacement.
|
||||
|
||||
Tries to find platform-specific path prefixes, and replaces them with
|
||||
variables.
|
||||
"""
|
||||
assert isinstance(some_path, pathlib.PurePath), (
|
||||
"some_path should be a PurePath, not %r" % some_path
|
||||
)
|
||||
|
||||
for varname, path in self._path_replacements():
|
||||
replacement = self.PurePlatformPath(path)
|
||||
try:
|
||||
relpath = some_path.relative_to(replacement)
|
||||
except ValueError:
|
||||
# Not relative to each other, so no replacement possible
|
||||
continue
|
||||
|
||||
replacement_root = self.PurePlatformPath("{%s}" % varname)
|
||||
return (replacement_root / relpath).as_posix()
|
||||
|
||||
return some_path.as_posix()
|
||||
|
||||
|
||||
class Job(List, Find, Create):
|
||||
"""Job class wrapping the REST nodes endpoint
|
||||
"""
|
||||
path = 'flamenco/jobs'
|
||||
ensure_query_projections = {'project': 1}
|
||||
"""Job class wrapping the REST nodes endpoint"""
|
||||
|
||||
path = "flamenco/jobs"
|
||||
ensure_query_projections = {"project": 1}
|
||||
|
||||
def patch(self, payload: dict, api=None):
|
||||
import pillarsdk.utils
|
||||
|
||||
api = api or self.api
|
||||
|
||||
url = pillarsdk.utils.join_url(self.path, str(self["_id"]))
|
||||
headers = pillarsdk.utils.merge_dict(
|
||||
self.http_headers(), {"Content-Type": "application/json"}
|
||||
)
|
||||
response = api.patch(url, payload, headers=headers)
|
||||
return response
|
||||
|
@ -23,28 +23,31 @@ from pillarsdk import exceptions as sdk_exceptions
|
||||
from .pillar import pillar_call
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
HOME_PROJECT_ENDPOINT = '/bcloud/home-project'
|
||||
HOME_PROJECT_ENDPOINT = "/bcloud/home-project"
|
||||
|
||||
|
||||
async def get_home_project(params=None) -> pillarsdk.Project:
|
||||
"""Returns the home project."""
|
||||
|
||||
log.debug('Getting home project')
|
||||
log.debug("Getting home project")
|
||||
try:
|
||||
return await pillar_call(pillarsdk.Project.find_from_endpoint,
|
||||
HOME_PROJECT_ENDPOINT, params=params)
|
||||
return await pillar_call(
|
||||
pillarsdk.Project.find_from_endpoint, HOME_PROJECT_ENDPOINT, params=params
|
||||
)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
log.warning('Access to the home project was denied. '
|
||||
'Double-check that you are logged in with valid BlenderID credentials.')
|
||||
log.warning(
|
||||
"Access to the home project was denied. "
|
||||
"Double-check that you are logged in with valid BlenderID credentials."
|
||||
)
|
||||
raise
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
log.warning('No home project available.')
|
||||
log.warning("No home project available.")
|
||||
raise
|
||||
|
||||
|
||||
async def get_home_project_id() -> str:
|
||||
"""Returns just the ID of the home project."""
|
||||
|
||||
home_proj = await get_home_project({'projection': {'_id': 1}})
|
||||
home_proj_id = home_proj['_id']
|
||||
home_proj = await get_home_project({"projection": {"_id": 1}})
|
||||
home_proj_id = home_proj["_id"]
|
||||
return home_proj_id
|
||||
|
@ -27,8 +27,8 @@ from pillarsdk import exceptions as sdk_exceptions
|
||||
from .pillar import pillar_call
|
||||
from . import async_loop, pillar, home_project, blender
|
||||
|
||||
REQUIRES_ROLES_FOR_IMAGE_SHARING = {'subscriber', 'demo'}
|
||||
IMAGE_SHARING_GROUP_NODE_NAME = 'Image sharing'
|
||||
REQUIRES_ROLES_FOR_IMAGE_SHARING = {"subscriber", "demo"}
|
||||
IMAGE_SHARING_GROUP_NODE_NAME = "Image sharing"
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -36,154 +36,172 @@ async def find_image_sharing_group_id(home_project_id, user_id):
|
||||
# Find the top-level image sharing group node.
|
||||
try:
|
||||
share_group, created = await pillar.find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': IMAGE_SHARING_GROUP_NODE_NAME},
|
||||
additional_create_props={
|
||||
'user': user_id,
|
||||
'properties': {},
|
||||
where={
|
||||
"project": home_project_id,
|
||||
"node_type": "group",
|
||||
"parent": None,
|
||||
"name": IMAGE_SHARING_GROUP_NODE_NAME,
|
||||
},
|
||||
projection={'_id': 1},
|
||||
may_create=True)
|
||||
additional_create_props={
|
||||
"user": user_id,
|
||||
"properties": {},
|
||||
},
|
||||
projection={"_id": 1},
|
||||
may_create=True,
|
||||
)
|
||||
except pillar.PillarError:
|
||||
log.exception('Pillar error caught')
|
||||
raise pillar.PillarError('Unable to find image sharing folder on the Cloud')
|
||||
log.exception("Pillar error caught")
|
||||
raise pillar.PillarError("Unable to find image sharing folder on the Cloud")
|
||||
|
||||
return share_group['_id']
|
||||
return share_group["_id"]
|
||||
|
||||
|
||||
class PILLAR_OT_image_share(pillar.PillarOperatorMixin,
|
||||
async_loop.AsyncModalOperatorMixin,
|
||||
bpy.types.Operator):
|
||||
bl_idname = 'pillar.image_share'
|
||||
bl_label = 'Share an image/screenshot via Blender Cloud'
|
||||
bl_description = 'Uploads an image for sharing via Blender Cloud'
|
||||
class PILLAR_OT_image_share(
|
||||
pillar.PillarOperatorMixin, async_loop.AsyncModalOperatorMixin, bpy.types.Operator
|
||||
):
|
||||
bl_idname = "pillar.image_share"
|
||||
bl_label = "Share an image/screenshot via Blender Cloud"
|
||||
bl_description = "Uploads an image for sharing via Blender Cloud"
|
||||
|
||||
log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
log = logging.getLogger("bpy.ops.%s" % bl_idname)
|
||||
|
||||
home_project_id = None
|
||||
home_project_url = 'home'
|
||||
home_project_url = "home"
|
||||
share_group_id = None # top-level share group node ID
|
||||
user_id = None
|
||||
|
||||
target = bpy.props.EnumProperty(
|
||||
target: bpy.props.EnumProperty(
|
||||
items=[
|
||||
('FILE', 'File', 'Share an image file'),
|
||||
('DATABLOCK', 'Datablock', 'Share an image datablock'),
|
||||
('SCREENSHOT', 'Screenshot', 'Share a screenshot'),
|
||||
("FILE", "File", "Share an image file"),
|
||||
("DATABLOCK", "Datablock", "Share an image datablock"),
|
||||
("SCREENSHOT", "Screenshot", "Share a screenshot"),
|
||||
],
|
||||
name='target',
|
||||
default='SCREENSHOT')
|
||||
name="target",
|
||||
default="SCREENSHOT",
|
||||
)
|
||||
|
||||
name = bpy.props.StringProperty(name='name',
|
||||
description='File or datablock name to sync')
|
||||
name: bpy.props.StringProperty(
|
||||
name="name", description="File or datablock name to sync"
|
||||
)
|
||||
|
||||
screenshot_show_multiview = bpy.props.BoolProperty(
|
||||
name='screenshot_show_multiview',
|
||||
description='Enable Multi-View',
|
||||
default=False)
|
||||
screenshot_show_multiview: bpy.props.BoolProperty(
|
||||
name="screenshot_show_multiview", description="Enable Multi-View", default=False
|
||||
)
|
||||
|
||||
screenshot_use_multiview = bpy.props.BoolProperty(
|
||||
name='screenshot_use_multiview',
|
||||
description='Use Multi-View',
|
||||
default=False)
|
||||
screenshot_use_multiview: bpy.props.BoolProperty(
|
||||
name="screenshot_use_multiview", description="Use Multi-View", default=False
|
||||
)
|
||||
|
||||
screenshot_full = bpy.props.BoolProperty(
|
||||
name='screenshot_full',
|
||||
description='Full Screen, Capture the whole window (otherwise only capture the active area)',
|
||||
default=False)
|
||||
screenshot_full: bpy.props.BoolProperty(
|
||||
name="screenshot_full",
|
||||
description="Full Screen, Capture the whole window (otherwise only capture the active area)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def invoke(self, context, event):
|
||||
# Do a quick test on datablock dirtyness. If it's not packed and dirty,
|
||||
# the user should save it first.
|
||||
if self.target == 'DATABLOCK':
|
||||
if self.target == "DATABLOCK":
|
||||
if not self.name:
|
||||
self.report({'ERROR'}, 'No name given of the datablock to share.')
|
||||
return {'CANCELLED'}
|
||||
self.report({"ERROR"}, "No name given of the datablock to share.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
datablock = bpy.data.images[self.name]
|
||||
if datablock.type == 'IMAGE' and datablock.is_dirty and not datablock.packed_file:
|
||||
self.report({'ERROR'}, 'Datablock is dirty, save it first.')
|
||||
return {'CANCELLED'}
|
||||
if (
|
||||
datablock.type == "IMAGE"
|
||||
and datablock.is_dirty
|
||||
and not datablock.packed_file
|
||||
):
|
||||
self.report({"ERROR"}, "Datablock is dirty, save it first.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
return async_loop.AsyncModalOperatorMixin.invoke(self, context, event)
|
||||
|
||||
async def async_execute(self, context):
|
||||
"""Entry point of the asynchronous operator."""
|
||||
|
||||
self.report({'INFO'}, 'Communicating with Blender Cloud')
|
||||
# We don't want to influence what is included in the screen shot.
|
||||
if self.target == "SCREENSHOT":
|
||||
print("Blender Cloud add-on is communicating with Blender Cloud")
|
||||
else:
|
||||
self.report({"INFO"}, "Communicating with Blender Cloud")
|
||||
|
||||
try:
|
||||
# Refresh credentials
|
||||
try:
|
||||
db_user = await self.check_credentials(context, REQUIRES_ROLES_FOR_IMAGE_SHARING)
|
||||
self.user_id = db_user['_id']
|
||||
self.log.debug('Found user ID: %s', self.user_id)
|
||||
except pillar.NotSubscribedToCloudError:
|
||||
self.log.exception('User not subscribed to cloud.')
|
||||
self.report({'ERROR'}, 'Please subscribe to the Blender Cloud.')
|
||||
self._state = 'QUIT'
|
||||
db_user = await self.check_credentials(
|
||||
context, REQUIRES_ROLES_FOR_IMAGE_SHARING
|
||||
)
|
||||
self.user_id = db_user["_id"]
|
||||
self.log.debug("Found user ID: %s", self.user_id)
|
||||
except pillar.NotSubscribedToCloudError as ex:
|
||||
self._log_subscription_needed(can_renew=ex.can_renew)
|
||||
self._state = "QUIT"
|
||||
return
|
||||
except pillar.UserNotLoggedInError:
|
||||
self.log.exception('Error checking/refreshing credentials.')
|
||||
self.report({'ERROR'}, 'Please log in on Blender ID first.')
|
||||
self._state = 'QUIT'
|
||||
self.log.exception("Error checking/refreshing credentials.")
|
||||
self.report({"ERROR"}, "Please log in on Blender ID first.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
# Find the home project.
|
||||
try:
|
||||
home_proj = await home_project.get_home_project({
|
||||
'projection': {'_id': 1, 'url': 1}
|
||||
})
|
||||
home_proj = await home_project.get_home_project(
|
||||
{"projection": {"_id": 1, "url": 1}}
|
||||
)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Forbidden access to home project.')
|
||||
self.report({'ERROR'}, 'Did not get access to home project.')
|
||||
self._state = 'QUIT'
|
||||
self.log.exception("Forbidden access to home project.")
|
||||
self.report({"ERROR"}, "Did not get access to home project.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
self.report({'ERROR'}, 'Home project not found.')
|
||||
self._state = 'QUIT'
|
||||
self.report({"ERROR"}, "Home project not found.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
self.home_project_id = home_proj['_id']
|
||||
self.home_project_url = home_proj['url']
|
||||
self.home_project_id = home_proj["_id"]
|
||||
self.home_project_url = home_proj["url"]
|
||||
|
||||
try:
|
||||
gid = await find_image_sharing_group_id(self.home_project_id,
|
||||
self.user_id)
|
||||
gid = await find_image_sharing_group_id(
|
||||
self.home_project_id, self.user_id
|
||||
)
|
||||
self.share_group_id = gid
|
||||
self.log.debug('Found group node ID: %s', self.share_group_id)
|
||||
self.log.debug("Found group node ID: %s", self.share_group_id)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Unable to find Group ID')
|
||||
self.report({'ERROR'}, 'Unable to find sync folder.')
|
||||
self._state = 'QUIT'
|
||||
self.log.exception("Unable to find Group ID")
|
||||
self.report({"ERROR"}, "Unable to find sync folder.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
await self.share_image(context)
|
||||
except Exception as ex:
|
||||
self.log.exception('Unexpected exception caught.')
|
||||
self.report({'ERROR'}, 'Unexpected error %s: %s' % (type(ex), ex))
|
||||
self.log.exception("Unexpected exception caught.")
|
||||
self.report({"ERROR"}, "Unexpected error %s: %s" % (type(ex), ex))
|
||||
|
||||
self._state = 'QUIT'
|
||||
self._state = "QUIT"
|
||||
|
||||
async def share_image(self, context):
|
||||
"""Sends files to the Pillar server."""
|
||||
|
||||
if self.target == 'FILE':
|
||||
self.report({'INFO'}, "Uploading %s '%s'" % (self.target.lower(), self.name))
|
||||
if self.target == "FILE":
|
||||
self.report(
|
||||
{"INFO"}, "Uploading %s '%s'" % (self.target.lower(), self.name)
|
||||
)
|
||||
node = await self.upload_file(self.name)
|
||||
elif self.target == 'SCREENSHOT':
|
||||
elif self.target == "SCREENSHOT":
|
||||
node = await self.upload_screenshot(context)
|
||||
else:
|
||||
self.report({'INFO'}, "Uploading %s '%s'" % (self.target.lower(), self.name))
|
||||
self.report(
|
||||
{"INFO"}, "Uploading %s '%s'" % (self.target.lower(), self.name)
|
||||
)
|
||||
node = await self.upload_datablock(context)
|
||||
|
||||
self.report({'INFO'}, 'Upload complete, creating link to share.')
|
||||
self.report({"INFO"}, "Upload complete, creating link to share.")
|
||||
share_info = await pillar_call(node.share)
|
||||
url = share_info.get('short_link')
|
||||
url = share_info.get("short_link")
|
||||
context.window_manager.clipboard = url
|
||||
self.report({'INFO'}, 'The link has been copied to your clipboard: %s' % url)
|
||||
self.report({"INFO"}, "The link has been copied to your clipboard: %s" % url)
|
||||
|
||||
await self.maybe_open_browser(url)
|
||||
|
||||
@ -193,19 +211,21 @@ class PILLAR_OT_image_share(pillar.PillarOperatorMixin,
|
||||
Returns the node.
|
||||
"""
|
||||
|
||||
self.log.info('Uploading file %s', filename)
|
||||
node = await pillar_call(pillarsdk.Node.create_asset_from_file,
|
||||
self.log.info("Uploading file %s", filename)
|
||||
node = await pillar_call(
|
||||
pillarsdk.Node.create_asset_from_file,
|
||||
self.home_project_id,
|
||||
self.share_group_id,
|
||||
'image',
|
||||
"image",
|
||||
filename,
|
||||
extra_where={'user': self.user_id},
|
||||
extra_where={"user": self.user_id},
|
||||
always_create_new_node=True,
|
||||
fileobj=fileobj,
|
||||
caching=False)
|
||||
node_id = node['_id']
|
||||
self.log.info('Created node %s', node_id)
|
||||
self.report({'INFO'}, 'File succesfully uploaded to the cloud!')
|
||||
caching=False,
|
||||
)
|
||||
node_id = node["_id"]
|
||||
self.log.info("Created node %s", node_id)
|
||||
self.report({"INFO"}, "File succesfully uploaded to the cloud!")
|
||||
|
||||
return node
|
||||
|
||||
@ -216,7 +236,7 @@ class PILLAR_OT_image_share(pillar.PillarOperatorMixin,
|
||||
|
||||
import webbrowser
|
||||
|
||||
self.log.info('Opening browser at %s', url)
|
||||
self.log.info("Opening browser at %s", url)
|
||||
webbrowser.open_new_tab(url)
|
||||
|
||||
async def upload_datablock(self, context) -> pillarsdk.Node:
|
||||
@ -228,12 +248,13 @@ class PILLAR_OT_image_share(pillar.PillarOperatorMixin,
|
||||
self.log.info("Uploading datablock '%s'" % self.name)
|
||||
datablock = bpy.data.images[self.name]
|
||||
|
||||
if datablock.type == 'RENDER_RESULT':
|
||||
if datablock.type == "RENDER_RESULT":
|
||||
# Construct a sensible name for this render.
|
||||
filename = '%s-%s-render%s' % (
|
||||
filename = "%s-%s-render%s" % (
|
||||
os.path.splitext(os.path.basename(context.blend_data.filepath))[0],
|
||||
context.scene.name,
|
||||
context.scene.render.file_extension)
|
||||
context.scene.render.file_extension,
|
||||
)
|
||||
return await self.upload_via_tempdir(datablock, filename)
|
||||
|
||||
if datablock.packed_file is not None:
|
||||
@ -262,7 +283,7 @@ class PILLAR_OT_image_share(pillar.PillarOperatorMixin,
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
filepath = os.path.join(tmpdir, filename_on_cloud)
|
||||
self.log.debug('Saving %s to %s', datablock, filepath)
|
||||
self.log.debug("Saving %s to %s", datablock, filepath)
|
||||
datablock.save_render(filepath)
|
||||
return await self.upload_file(filepath)
|
||||
|
||||
@ -274,25 +295,27 @@ class PILLAR_OT_image_share(pillar.PillarOperatorMixin,
|
||||
|
||||
import io
|
||||
|
||||
filename = '%s.%s' % (datablock.name, datablock.file_format.lower())
|
||||
filename = "%s.%s" % (datablock.name, datablock.file_format.lower())
|
||||
fileobj = io.BytesIO(datablock.packed_file.data)
|
||||
fileobj.seek(0) # ensure PillarSDK reads the file from the beginning.
|
||||
self.log.info('Uploading packed file directly from memory to %r.', filename)
|
||||
self.log.info("Uploading packed file directly from memory to %r.", filename)
|
||||
return await self.upload_file(filename, fileobj=fileobj)
|
||||
|
||||
async def upload_screenshot(self, context) -> pillarsdk.Node:
|
||||
"""Takes a screenshot, saves it to a temp file, and uploads it."""
|
||||
|
||||
self.name = datetime.datetime.now().strftime('Screenshot-%Y-%m-%d-%H%M%S.png')
|
||||
self.report({'INFO'}, "Uploading %s '%s'" % (self.target.lower(), self.name))
|
||||
self.name = datetime.datetime.now().strftime("Screenshot-%Y-%m-%d-%H%M%S.png")
|
||||
self.report({"INFO"}, "Uploading %s '%s'" % (self.target.lower(), self.name))
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
filepath = os.path.join(tmpdir, self.name)
|
||||
self.log.debug('Saving screenshot to %s', filepath)
|
||||
bpy.ops.screen.screenshot(filepath=filepath,
|
||||
self.log.debug("Saving screenshot to %s", filepath)
|
||||
bpy.ops.screen.screenshot(
|
||||
filepath=filepath,
|
||||
show_multiview=self.screenshot_show_multiview,
|
||||
use_multiview=self.screenshot_use_multiview,
|
||||
full=self.screenshot_full)
|
||||
full=self.screenshot_full,
|
||||
)
|
||||
return await self.upload_file(filepath)
|
||||
|
||||
|
||||
@ -301,34 +324,47 @@ def image_editor_menu(self, context):
|
||||
|
||||
box = self.layout.row()
|
||||
if image and image.has_data:
|
||||
text = 'Share on Blender Cloud'
|
||||
if image.type == 'IMAGE' and image.is_dirty and not image.packed_file:
|
||||
text = "Share on Blender Cloud"
|
||||
if image.type == "IMAGE" and image.is_dirty and not image.packed_file:
|
||||
box.enabled = False
|
||||
text = 'Save image before sharing on Blender Cloud'
|
||||
text = "Save image before sharing on Blender Cloud"
|
||||
|
||||
props = box.operator(PILLAR_OT_image_share.bl_idname, text=text,
|
||||
icon_value=blender.icon('CLOUD'))
|
||||
props.target = 'DATABLOCK'
|
||||
props = box.operator(
|
||||
PILLAR_OT_image_share.bl_idname, text=text, icon_value=blender.icon("CLOUD")
|
||||
)
|
||||
props.target = "DATABLOCK"
|
||||
props.name = image.name
|
||||
|
||||
|
||||
def window_menu(self, context):
|
||||
props = self.layout.operator(PILLAR_OT_image_share.bl_idname,
|
||||
text='Share screenshot via Blender Cloud',
|
||||
icon_value=blender.icon('CLOUD'))
|
||||
props.target = 'SCREENSHOT'
|
||||
props = self.layout.operator(
|
||||
PILLAR_OT_image_share.bl_idname,
|
||||
text="Share screenshot via Blender Cloud",
|
||||
icon_value=blender.icon("CLOUD"),
|
||||
)
|
||||
props.target = "SCREENSHOT"
|
||||
props.screenshot_full = True
|
||||
|
||||
|
||||
def get_topbar_menu():
|
||||
"""Return the topbar menu in a Blender 2.79 and 2.80 compatible way."""
|
||||
try:
|
||||
menu = bpy.types.TOPBAR_MT_window
|
||||
except AttributeError:
|
||||
# Blender < 2.80
|
||||
menu = bpy.types.INFO_MT_window
|
||||
return menu
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(PILLAR_OT_image_share)
|
||||
|
||||
bpy.types.IMAGE_MT_image.append(image_editor_menu)
|
||||
bpy.types.INFO_MT_window.append(window_menu)
|
||||
get_topbar_menu().append(window_menu)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(PILLAR_OT_image_share)
|
||||
|
||||
bpy.types.IMAGE_MT_image.remove(image_editor_menu)
|
||||
bpy.types.INFO_MT_window.remove(window_menu)
|
||||
get_topbar_menu().remove(window_menu)
|
||||
|
661
blender_cloud/pillar.py
Normal file → Executable file
661
blender_cloud/pillar.py
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
180
blender_cloud/project_specific.py
Normal file
180
blender_cloud/project_specific.py
Normal file
@ -0,0 +1,180 @@
|
||||
"""Handle saving and loading project-specific settings."""
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import typing
|
||||
|
||||
# Names of BlenderCloudPreferences properties that are both project-specific
|
||||
# and simple enough to store directly in a dict.
|
||||
PROJECT_SPECIFIC_SIMPLE_PROPS = ("cloud_project_local_path",)
|
||||
|
||||
# Names of BlenderCloudPreferences properties that are project-specific and
|
||||
# Flamenco Manager-specific, and simple enough to store in a dict.
|
||||
FLAMENCO_PER_PROJECT_PER_MANAGER = (
|
||||
"flamenco_exclude_filter",
|
||||
"flamenco_job_file_path",
|
||||
"flamenco_job_output_path",
|
||||
"flamenco_job_output_strip_components",
|
||||
"flamenco_relative_only",
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
project_settings_loading = 0 # counter, if > 0 then we're loading stuff.
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def mark_as_loading():
|
||||
"""Sets project_settings_loading > 0 while the context is active.
|
||||
|
||||
A counter is used to allow for nested mark_as_loading() contexts.
|
||||
"""
|
||||
global project_settings_loading
|
||||
project_settings_loading += 1
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
project_settings_loading -= 1
|
||||
|
||||
|
||||
def update_preferences(
|
||||
prefs,
|
||||
names_to_update: typing.Iterable[str],
|
||||
new_values: typing.Mapping[str, typing.Any],
|
||||
):
|
||||
for name in names_to_update:
|
||||
if not hasattr(prefs, name):
|
||||
log.debug("not setting %r, property cannot be found", name)
|
||||
continue
|
||||
|
||||
if name in new_values:
|
||||
log.debug("setting %r = %r", name, new_values[name])
|
||||
setattr(prefs, name, new_values[name])
|
||||
else:
|
||||
# The property wasn't stored, so set the default value instead.
|
||||
bl_type, args = getattr(prefs.bl_rna, name)
|
||||
log.debug("finding default value for %r", name)
|
||||
if "default" not in args:
|
||||
log.debug("no default value for %r, not touching", name)
|
||||
continue
|
||||
log.debug("found default value for %r = %r", name, args["default"])
|
||||
setattr(prefs, name, args["default"])
|
||||
|
||||
|
||||
def handle_project_update(_=None, _2=None):
|
||||
"""Handles changing projects, which may cause extensions to be disabled/enabled.
|
||||
|
||||
Ignores arguments so that it can be used as property update callback.
|
||||
"""
|
||||
|
||||
from .blender import preferences, project_extensions
|
||||
|
||||
with mark_as_loading():
|
||||
prefs = preferences()
|
||||
project_id = prefs.project.project
|
||||
log.debug(
|
||||
"Updating internal state to reflect extensions enabled on current project %s.",
|
||||
project_id,
|
||||
)
|
||||
|
||||
project_extensions.cache_clear()
|
||||
|
||||
from blender_cloud import attract, flamenco
|
||||
|
||||
attract.deactivate()
|
||||
flamenco.deactivate()
|
||||
|
||||
enabled_for = project_extensions(project_id)
|
||||
log.info("Project extensions: %s", enabled_for)
|
||||
if "attract" in enabled_for:
|
||||
attract.activate()
|
||||
if "flamenco" in enabled_for:
|
||||
flamenco.activate()
|
||||
|
||||
# Load project-specific settings from the last time we visited this project.
|
||||
ps = prefs.get("project_settings", {}).get(project_id, {})
|
||||
if not ps:
|
||||
log.debug(
|
||||
"no project-specific settings are available, "
|
||||
"only resetting available Flamenco Managers"
|
||||
)
|
||||
# The Flamenco Manager should really be chosen explicitly out of the available
|
||||
# Managers.
|
||||
prefs.flamenco_manager.available_managers = []
|
||||
return
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
from pprint import pformat
|
||||
|
||||
log.debug("loading project-specific settings:\n%s", pformat(ps.to_dict()))
|
||||
|
||||
# Restore simple properties.
|
||||
update_preferences(prefs, PROJECT_SPECIFIC_SIMPLE_PROPS, ps)
|
||||
|
||||
# Restore Flamenco settings.
|
||||
prefs.flamenco_manager.available_managers = ps.get(
|
||||
"flamenco_available_managers", []
|
||||
)
|
||||
flamenco_manager_id = ps.get("flamenco_manager_id")
|
||||
if flamenco_manager_id:
|
||||
log.debug("setting flamenco manager to %s", flamenco_manager_id)
|
||||
try:
|
||||
# This will trigger a load of Project+Manager-specfic settings.
|
||||
prefs.flamenco_manager.manager = flamenco_manager_id
|
||||
except TypeError:
|
||||
log.warning(
|
||||
"manager %s for this project could not be found",
|
||||
flamenco_manager_id,
|
||||
)
|
||||
elif prefs.flamenco_manager.available_managers:
|
||||
prefs.flamenco_manager.manager = prefs.flamenco_manager.available_managers[
|
||||
0
|
||||
]["_id"]
|
||||
|
||||
|
||||
def store(_=None, _2=None):
|
||||
"""Remember project-specific settings as soon as one of them changes.
|
||||
|
||||
Ignores arguments so that it can be used as property update callback.
|
||||
|
||||
No-op when project_settings_loading=True, to prevent saving project-
|
||||
specific settings while they are actually being loaded.
|
||||
"""
|
||||
from .blender import preferences
|
||||
|
||||
global project_settings_loading
|
||||
if project_settings_loading:
|
||||
return
|
||||
|
||||
prefs = preferences()
|
||||
project_id = prefs.project.project
|
||||
all_settings = prefs.get("project_settings", {})
|
||||
ps = all_settings.get(project_id, {}) # either a dict or bpy.types.IDPropertyGroup
|
||||
|
||||
for name in PROJECT_SPECIFIC_SIMPLE_PROPS:
|
||||
ps[name] = getattr(prefs, name)
|
||||
|
||||
# Store project-specific Flamenco settings
|
||||
ps["flamenco_manager_id"] = prefs.flamenco_manager.manager
|
||||
ps["flamenco_available_managers"] = prefs.flamenco_manager.available_managers
|
||||
|
||||
# Store per-project, per-manager settings for the current Manager.
|
||||
pppm = ps.get("flamenco_managers_settings", {})
|
||||
pppm[prefs.flamenco_manager.manager] = {
|
||||
name: getattr(prefs, name) for name in FLAMENCO_PER_PROJECT_PER_MANAGER
|
||||
}
|
||||
ps[
|
||||
"flamenco_managers_settings"
|
||||
] = pppm # IDPropertyGroup has no setdefault() method.
|
||||
|
||||
# Store this project's settings in the preferences.
|
||||
all_settings[project_id] = ps
|
||||
prefs["project_settings"] = all_settings
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
from pprint import pformat
|
||||
|
||||
if hasattr(all_settings, "to_dict"):
|
||||
to_log = all_settings.to_dict()
|
||||
else:
|
||||
to_log = all_settings
|
||||
log.debug("Saving project-specific settings:\n%s", pformat(to_log))
|
@ -25,6 +25,7 @@ import functools
|
||||
import logging
|
||||
import pathlib
|
||||
import tempfile
|
||||
import typing
|
||||
import shutil
|
||||
|
||||
import bpy
|
||||
@ -34,33 +35,35 @@ import asyncio
|
||||
import pillarsdk
|
||||
from pillarsdk import exceptions as sdk_exceptions
|
||||
from .pillar import pillar_call
|
||||
from . import async_loop, pillar, cache, blendfile, home_project
|
||||
from . import async_loop, blender, pillar, cache, blendfile, home_project
|
||||
|
||||
SETTINGS_FILES_TO_UPLOAD = ['userpref.blend', 'startup.blend']
|
||||
SETTINGS_FILES_TO_UPLOAD = ["userpref.blend", "startup.blend"]
|
||||
|
||||
# These are RNA keys inside the userpref.blend file, and their
|
||||
# Python properties names. These settings will not be synced.
|
||||
LOCAL_SETTINGS_RNA = [
|
||||
(b'dpi', 'system.dpi'),
|
||||
(b'virtual_pixel', 'system.virtual_pixel_mode'),
|
||||
(b'compute_device_id', 'system.compute_device'),
|
||||
(b'compute_device_type', 'system.compute_device_type'),
|
||||
(b'fontdir', 'filepaths.font_directory'),
|
||||
(b'textudir', 'filepaths.texture_directory'),
|
||||
(b'renderdir', 'filepaths.render_output_directory'),
|
||||
(b'pythondir', 'filepaths.script_directory'),
|
||||
(b'sounddir', 'filepaths.sound_directory'),
|
||||
(b'tempdir', 'filepaths.temporary_directory'),
|
||||
(b'render_cachedir', 'filepaths.render_cache_directory'),
|
||||
(b'i18ndir', 'filepaths.i18n_branches_directory'),
|
||||
(b'image_editor', 'filepaths.image_editor'),
|
||||
(b'anim_player', 'filepaths.animation_player'),
|
||||
(b"dpi", "system.dpi"),
|
||||
(b"virtual_pixel", "system.virtual_pixel_mode"),
|
||||
(b"compute_device_id", "system.compute_device"),
|
||||
(b"compute_device_type", "system.compute_device_type"),
|
||||
(b"fontdir", "filepaths.font_directory"),
|
||||
(b"textudir", "filepaths.texture_directory"),
|
||||
(b"renderdir", "filepaths.render_output_directory"),
|
||||
(b"pythondir", "filepaths.script_directory"),
|
||||
(b"sounddir", "filepaths.sound_directory"),
|
||||
(b"tempdir", "filepaths.temporary_directory"),
|
||||
(b"render_cachedir", "filepaths.render_cache_directory"),
|
||||
(b"i18ndir", "filepaths.i18n_branches_directory"),
|
||||
(b"image_editor", "filepaths.image_editor"),
|
||||
(b"anim_player", "filepaths.animation_player"),
|
||||
]
|
||||
|
||||
REQUIRES_ROLES_FOR_SYNC = set() # no roles needed.
|
||||
SYNC_GROUP_NODE_NAME = 'Blender Sync'
|
||||
SYNC_GROUP_NODE_DESC = 'The [Blender Cloud Addon](https://cloud.blender.org/services' \
|
||||
'#blender-addon) will synchronize your Blender settings here.'
|
||||
SYNC_GROUP_NODE_NAME = "Blender Sync"
|
||||
SYNC_GROUP_NODE_DESC = (
|
||||
"The [Blender Cloud Addon](https://cloud.blender.org/services"
|
||||
"#blender-addon) will synchronize your Blender settings here."
|
||||
)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -73,7 +76,7 @@ def set_blender_sync_status(set_status: str):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
bss.status = 'IDLE'
|
||||
bss.status = "IDLE"
|
||||
|
||||
return wrapper
|
||||
|
||||
@ -89,18 +92,16 @@ def async_set_blender_sync_status(set_status: str):
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
finally:
|
||||
bss.status = 'IDLE'
|
||||
bss.status = "IDLE"
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
async def find_sync_group_id(home_project_id: str,
|
||||
user_id: str,
|
||||
blender_version: str,
|
||||
*,
|
||||
may_create=True) -> str:
|
||||
async def find_sync_group_id(
|
||||
home_project_id: str, user_id: str, blender_version: str, *, may_create=True
|
||||
) -> typing.Tuple[str, str]:
|
||||
"""Finds the group node in which to store sync assets.
|
||||
|
||||
If the group node doesn't exist and may_create=True, it creates it.
|
||||
@ -110,43 +111,52 @@ async def find_sync_group_id(home_project_id: str,
|
||||
# created by Pillar while creating the home project.
|
||||
try:
|
||||
sync_group, created = await pillar.find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': SYNC_GROUP_NODE_NAME,
|
||||
'user': user_id},
|
||||
projection={'_id': 1},
|
||||
may_create=False)
|
||||
where={
|
||||
"project": home_project_id,
|
||||
"node_type": "group",
|
||||
"parent": None,
|
||||
"name": SYNC_GROUP_NODE_NAME,
|
||||
"user": user_id,
|
||||
},
|
||||
projection={"_id": 1},
|
||||
may_create=False,
|
||||
)
|
||||
except pillar.PillarError:
|
||||
raise pillar.PillarError('Unable to find sync folder on the Cloud')
|
||||
raise pillar.PillarError("Unable to find sync folder on the Cloud")
|
||||
|
||||
if not may_create and sync_group is None:
|
||||
log.info("Sync folder doesn't exist, and not creating it either.")
|
||||
return None, None
|
||||
return "", ""
|
||||
|
||||
# Find/create the sub-group for the requested Blender version
|
||||
try:
|
||||
sub_sync_group, created = await pillar.find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': sync_group['_id'],
|
||||
'name': blender_version,
|
||||
'user': user_id},
|
||||
additional_create_props={
|
||||
'description': 'Sync folder for Blender %s' % blender_version,
|
||||
'properties': {'status': 'published'},
|
||||
where={
|
||||
"project": home_project_id,
|
||||
"node_type": "group",
|
||||
"parent": sync_group["_id"],
|
||||
"name": blender_version,
|
||||
"user": user_id,
|
||||
},
|
||||
projection={'_id': 1},
|
||||
may_create=may_create)
|
||||
additional_create_props={
|
||||
"description": "Sync folder for Blender %s" % blender_version,
|
||||
"properties": {"status": "published"},
|
||||
},
|
||||
projection={"_id": 1},
|
||||
may_create=may_create,
|
||||
)
|
||||
except pillar.PillarError:
|
||||
raise pillar.PillarError('Unable to create sync folder on the Cloud')
|
||||
raise pillar.PillarError("Unable to create sync folder on the Cloud")
|
||||
|
||||
if not may_create and sub_sync_group is None:
|
||||
log.info("Sync folder for Blender version %s doesn't exist, "
|
||||
"and not creating it either.", blender_version)
|
||||
return sync_group['_id'], None
|
||||
log.info(
|
||||
"Sync folder for Blender version %s doesn't exist, "
|
||||
"and not creating it either.",
|
||||
blender_version,
|
||||
)
|
||||
return sync_group["_id"], ""
|
||||
|
||||
return sync_group['_id'], sub_sync_group['_id']
|
||||
return sync_group["_id"], sub_sync_group["_id"]
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
@ -157,82 +167,94 @@ async def available_blender_versions(home_project_id: str, user_id: str) -> list
|
||||
sync_group = await pillar_call(
|
||||
pillarsdk.Node.find_first,
|
||||
params={
|
||||
'where': {'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': SYNC_GROUP_NODE_NAME,
|
||||
'user': user_id},
|
||||
'projection': {'_id': 1},
|
||||
"where": {
|
||||
"project": home_project_id,
|
||||
"node_type": "group",
|
||||
"parent": None,
|
||||
"name": SYNC_GROUP_NODE_NAME,
|
||||
"user": user_id,
|
||||
},
|
||||
caching=False)
|
||||
"projection": {"_id": 1},
|
||||
},
|
||||
caching=False,
|
||||
)
|
||||
|
||||
if sync_group is None:
|
||||
bss.report({'ERROR'}, 'No synced Blender settings in your Blender Cloud')
|
||||
log.debug('-- unable to find sync group for home_project_id=%r and user_id=%r',
|
||||
home_project_id, user_id)
|
||||
bss.report({"ERROR"}, "No synced Blender settings in your Blender Cloud")
|
||||
log.debug(
|
||||
"-- unable to find sync group for home_project_id=%r and user_id=%r",
|
||||
home_project_id,
|
||||
user_id,
|
||||
)
|
||||
return []
|
||||
|
||||
sync_nodes = await pillar_call(
|
||||
pillarsdk.Node.all,
|
||||
params={
|
||||
'where': {'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': sync_group['_id'],
|
||||
'user': user_id},
|
||||
'projection': {'_id': 1, 'name': 1},
|
||||
'sort': '-name',
|
||||
"where": {
|
||||
"project": home_project_id,
|
||||
"node_type": "group",
|
||||
"parent": sync_group["_id"],
|
||||
"user": user_id,
|
||||
},
|
||||
caching=False)
|
||||
"projection": {"_id": 1, "name": 1},
|
||||
"sort": "-name",
|
||||
},
|
||||
caching=False,
|
||||
)
|
||||
|
||||
if not sync_nodes or not sync_nodes._items:
|
||||
bss.report({'ERROR'}, 'No synced Blender settings in your Blender Cloud.')
|
||||
bss.report({"ERROR"}, "No synced Blender settings in your Blender Cloud.")
|
||||
return []
|
||||
|
||||
versions = [node.name for node in sync_nodes._items]
|
||||
log.debug('Versions: %s', versions)
|
||||
log.debug("Versions: %s", versions)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class PILLAR_OT_sync(pillar.PillarOperatorMixin,
|
||||
async_loop.AsyncModalOperatorMixin,
|
||||
bpy.types.Operator):
|
||||
bl_idname = 'pillar.sync'
|
||||
bl_label = 'Synchronise with Blender Cloud'
|
||||
bl_description = 'Synchronises Blender settings with Blender Cloud'
|
||||
class PILLAR_OT_sync(
|
||||
pillar.PillarOperatorMixin, async_loop.AsyncModalOperatorMixin, bpy.types.Operator
|
||||
):
|
||||
bl_idname = "pillar.sync"
|
||||
bl_label = "Synchronise with Blender Cloud"
|
||||
bl_description = "Synchronises Blender settings with Blender Cloud"
|
||||
|
||||
log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
home_project_id = None
|
||||
sync_group_id = None # top-level sync group node ID
|
||||
sync_group_versioned_id = None # sync group node ID for the given Blender version.
|
||||
log = logging.getLogger("bpy.ops.%s" % bl_idname)
|
||||
home_project_id = ""
|
||||
sync_group_id = "" # top-level sync group node ID
|
||||
sync_group_versioned_id = "" # sync group node ID for the given Blender version.
|
||||
|
||||
action = bpy.props.EnumProperty(
|
||||
action: bpy.props.EnumProperty(
|
||||
items=[
|
||||
('PUSH', 'Push', 'Push settings to the Blender Cloud'),
|
||||
('PULL', 'Pull', 'Pull settings from the Blender Cloud'),
|
||||
('REFRESH', 'Refresh', 'Refresh available versions'),
|
||||
('SELECT', 'Select', 'Select version to sync'),
|
||||
("PUSH", "Push", "Push settings to the Blender Cloud"),
|
||||
("PULL", "Pull", "Pull settings from the Blender Cloud"),
|
||||
("REFRESH", "Refresh", "Refresh available versions"),
|
||||
("SELECT", "Select", "Select version to sync"),
|
||||
],
|
||||
name='action')
|
||||
name="action",
|
||||
)
|
||||
|
||||
CURRENT_BLENDER_VERSION = '%i.%i' % bpy.app.version[:2]
|
||||
blender_version = bpy.props.StringProperty(name='blender_version',
|
||||
description='Blender version to sync for',
|
||||
default=CURRENT_BLENDER_VERSION)
|
||||
CURRENT_BLENDER_VERSION = "%i.%i" % bpy.app.version[:2]
|
||||
blender_version: bpy.props.StringProperty(
|
||||
name="blender_version",
|
||||
description="Blender version to sync for",
|
||||
default=CURRENT_BLENDER_VERSION,
|
||||
)
|
||||
|
||||
def bss_report(self, level, message):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.report(level, message)
|
||||
|
||||
def invoke(self, context, event):
|
||||
if self.action == 'SELECT':
|
||||
if self.action == "SELECT":
|
||||
# Synchronous action
|
||||
return self.action_select(context)
|
||||
|
||||
if self.action in {'PUSH', 'PULL'} and not self.blender_version:
|
||||
self.bss_report({'ERROR'}, 'No Blender version to sync for was given.')
|
||||
return {'CANCELLED'}
|
||||
if self.action in {"PUSH", "PULL"} and not self.blender_version:
|
||||
self.bss_report({"ERROR"}, "No Blender version to sync for was given.")
|
||||
return {"CANCELLED"}
|
||||
|
||||
return async_loop.AsyncModalOperatorMixin.invoke(self, context, event)
|
||||
|
||||
@ -242,133 +264,146 @@ class PILLAR_OT_sync(pillar.PillarOperatorMixin,
|
||||
This is a synchronous action, as it requires a dialog box.
|
||||
"""
|
||||
|
||||
self.log.info('Performing action SELECT')
|
||||
self.log.info("Performing action SELECT")
|
||||
|
||||
# Do a refresh before we can show the dropdown.
|
||||
fut = asyncio.ensure_future(self.async_execute(context, action_override='REFRESH'))
|
||||
fut = asyncio.ensure_future(
|
||||
self.async_execute(context, action_override="REFRESH")
|
||||
)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(fut)
|
||||
|
||||
self._state = 'SELECTING'
|
||||
self._state = "SELECTING"
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
def draw(self, context):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
self.layout.prop(bss, 'version', text='Blender version')
|
||||
self.layout.prop(bss, "version", text="Blender version")
|
||||
|
||||
def execute(self, context):
|
||||
if self.action != 'SELECT':
|
||||
log.debug('Ignoring execute() for action %r', self.action)
|
||||
return {'FINISHED'}
|
||||
if self.action != "SELECT":
|
||||
log.debug("Ignoring execute() for action %r", self.action)
|
||||
return {"FINISHED"}
|
||||
|
||||
log.debug('Performing execute() for action %r', self.action)
|
||||
log.debug("Performing execute() for action %r", self.action)
|
||||
# Perform the sync when the user closes the dialog box.
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bpy.ops.pillar.sync('INVOKE_DEFAULT',
|
||||
action='PULL',
|
||||
blender_version=bss.version)
|
||||
bpy.ops.pillar.sync(
|
||||
"INVOKE_DEFAULT", action="PULL", blender_version=bss.version
|
||||
)
|
||||
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
@async_set_blender_sync_status('SYNCING')
|
||||
@async_set_blender_sync_status("SYNCING")
|
||||
async def async_execute(self, context, *, action_override=None):
|
||||
"""Entry point of the asynchronous operator."""
|
||||
|
||||
action = action_override or self.action
|
||||
self.bss_report({'INFO'}, 'Communicating with Blender Cloud')
|
||||
self.log.info('Performing action %s', action)
|
||||
self.bss_report({"INFO"}, "Communicating with Blender Cloud")
|
||||
self.log.info("Performing action %s", action)
|
||||
|
||||
try:
|
||||
# Refresh credentials
|
||||
try:
|
||||
db_user = await self.check_credentials(context, REQUIRES_ROLES_FOR_SYNC)
|
||||
self.user_id = db_user['_id']
|
||||
log.debug('Found user ID: %s', self.user_id)
|
||||
except pillar.NotSubscribedToCloudError:
|
||||
self.log.exception('User not subscribed to cloud.')
|
||||
self.bss_report({'SUBSCRIBE'}, 'Please subscribe to the Blender Cloud.')
|
||||
self._state = 'QUIT'
|
||||
self.user_id = db_user["_id"]
|
||||
log.debug("Found user ID: %s", self.user_id)
|
||||
except pillar.NotSubscribedToCloudError as ex:
|
||||
self._log_subscription_needed(can_renew=ex.can_renew)
|
||||
self._state = "QUIT"
|
||||
return
|
||||
except pillar.UserNotLoggedInError:
|
||||
self.log.exception('Error checking/refreshing credentials.')
|
||||
self.bss_report({'ERROR'}, 'Please log in on Blender ID first.')
|
||||
self._state = 'QUIT'
|
||||
self.log.exception("Error checking/refreshing credentials.")
|
||||
self.bss_report({"ERROR"}, "Please log in on Blender ID first.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
# Find the home project.
|
||||
try:
|
||||
self.home_project_id = await home_project.get_home_project_id()
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Forbidden access to home project.')
|
||||
self.bss_report({'ERROR'}, 'Did not get access to home project.')
|
||||
self._state = 'QUIT'
|
||||
self.log.exception("Forbidden access to home project.")
|
||||
self.bss_report({"ERROR"}, "Did not get access to home project.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
self.bss_report({'ERROR'}, 'Home project not found.')
|
||||
self._state = 'QUIT'
|
||||
self.bss_report({"ERROR"}, "Home project not found.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
# Only create the folder structure if we're pushing.
|
||||
may_create = self.action == 'PUSH'
|
||||
may_create = self.action == "PUSH"
|
||||
try:
|
||||
gid, subgid = await find_sync_group_id(self.home_project_id,
|
||||
gid, subgid = await find_sync_group_id(
|
||||
self.home_project_id,
|
||||
self.user_id,
|
||||
self.blender_version,
|
||||
may_create=may_create)
|
||||
may_create=may_create,
|
||||
)
|
||||
self.sync_group_id = gid
|
||||
self.sync_group_versioned_id = subgid
|
||||
self.log.debug('Found top-level group node ID: %s', self.sync_group_id)
|
||||
self.log.debug('Found group node ID for %s: %s',
|
||||
self.blender_version, self.sync_group_versioned_id)
|
||||
self.log.debug("Found top-level group node ID: %s", self.sync_group_id)
|
||||
self.log.debug(
|
||||
"Found group node ID for %s: %s",
|
||||
self.blender_version,
|
||||
self.sync_group_versioned_id,
|
||||
)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Unable to find Group ID')
|
||||
self.bss_report({'ERROR'}, 'Unable to find sync folder.')
|
||||
self._state = 'QUIT'
|
||||
self.log.exception("Unable to find Group ID")
|
||||
self.bss_report({"ERROR"}, "Unable to find sync folder.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
# Perform the requested action.
|
||||
action_method = {
|
||||
'PUSH': self.action_push,
|
||||
'PULL': self.action_pull,
|
||||
'REFRESH': self.action_refresh,
|
||||
"PUSH": self.action_push,
|
||||
"PULL": self.action_pull,
|
||||
"REFRESH": self.action_refresh,
|
||||
}[action]
|
||||
await action_method(context)
|
||||
except Exception as ex:
|
||||
self.log.exception('Unexpected exception caught.')
|
||||
self.bss_report({'ERROR'}, 'Unexpected error: %s' % ex)
|
||||
self.log.exception("Unexpected exception caught.")
|
||||
self.bss_report({"ERROR"}, "Unexpected error: %s" % ex)
|
||||
|
||||
self._state = 'QUIT'
|
||||
self._state = "QUIT"
|
||||
|
||||
async def action_push(self, context):
|
||||
"""Sends files to the Pillar server."""
|
||||
|
||||
self.log.info('Saved user preferences to disk before pushing to cloud.')
|
||||
self.log.info("Saved user preferences to disk before pushing to cloud.")
|
||||
bpy.ops.wm.save_userpref()
|
||||
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource('CONFIG'))
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource("CONFIG"))
|
||||
|
||||
for fname in SETTINGS_FILES_TO_UPLOAD:
|
||||
path = config_dir / fname
|
||||
if not path.exists():
|
||||
self.log.debug('Skipping non-existing %s', path)
|
||||
self.log.debug("Skipping non-existing %s", path)
|
||||
continue
|
||||
|
||||
if self.signalling_future.cancelled():
|
||||
self.bss_report({'WARNING'}, 'Upload aborted.')
|
||||
self.bss_report({"WARNING"}, "Upload aborted.")
|
||||
return
|
||||
|
||||
self.bss_report({'INFO'}, 'Uploading %s' % fname)
|
||||
self.bss_report({"INFO"}, "Uploading %s" % fname)
|
||||
try:
|
||||
await pillar.attach_file_to_group(path,
|
||||
await pillar.attach_file_to_group(
|
||||
path,
|
||||
self.home_project_id,
|
||||
self.sync_group_versioned_id,
|
||||
self.user_id)
|
||||
self.user_id,
|
||||
)
|
||||
except sdk_exceptions.RequestEntityTooLarge as ex:
|
||||
self.log.error('File too big to upload: %s' % ex)
|
||||
self.log.error('To upload larger files, please subscribe to Blender Cloud.')
|
||||
self.bss_report({'SUBSCRIBE'}, 'File %s too big to upload. '
|
||||
'Subscribe for unlimited space.' % fname)
|
||||
self._state = 'QUIT'
|
||||
self.log.error("File too big to upload: %s" % ex)
|
||||
self.log.error(
|
||||
"To upload larger files, please subscribe to Blender Cloud."
|
||||
)
|
||||
self.bss_report(
|
||||
{"SUBSCRIBE"},
|
||||
"File %s too big to upload. "
|
||||
"Subscribe for unlimited space." % fname,
|
||||
)
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
await self.action_refresh(context)
|
||||
@ -382,31 +417,37 @@ class PILLAR_OT_sync(pillar.PillarOperatorMixin,
|
||||
else:
|
||||
bss.version = max(bss.available_blender_versions)
|
||||
|
||||
self.bss_report({'INFO'}, 'Settings pushed to Blender Cloud.')
|
||||
self.bss_report({"INFO"}, "Settings pushed to Blender Cloud.")
|
||||
|
||||
async def action_pull(self, context):
|
||||
"""Loads files from the Pillar server."""
|
||||
|
||||
# If the sync group node doesn't exist, offer a list of groups that do.
|
||||
if self.sync_group_id is None:
|
||||
self.bss_report({'ERROR'},
|
||||
'There are no synced Blender settings in your Blender Cloud.')
|
||||
if not self.sync_group_id:
|
||||
self.bss_report(
|
||||
{"ERROR"}, "There are no synced Blender settings in your Blender Cloud."
|
||||
)
|
||||
return
|
||||
|
||||
if self.sync_group_versioned_id is None:
|
||||
self.bss_report({'ERROR'}, 'Therre are no synced Blender settings for version %s' %
|
||||
self.blender_version)
|
||||
if not self.sync_group_versioned_id:
|
||||
self.bss_report(
|
||||
{"ERROR"},
|
||||
"Therre are no synced Blender settings for version %s"
|
||||
% self.blender_version,
|
||||
)
|
||||
return
|
||||
|
||||
self.bss_report({'INFO'}, 'Pulling settings from Blender Cloud')
|
||||
with tempfile.TemporaryDirectory(prefix='bcloud-sync') as tempdir:
|
||||
self.bss_report({"INFO"}, "Pulling settings from Blender Cloud")
|
||||
with tempfile.TemporaryDirectory(prefix="bcloud-sync") as tempdir:
|
||||
for fname in SETTINGS_FILES_TO_UPLOAD:
|
||||
await self.download_settings_file(fname, tempdir)
|
||||
|
||||
self.bss_report({'WARNING'}, 'Settings pulled from Cloud, restart Blender to load them.')
|
||||
self.bss_report(
|
||||
{"WARNING"}, "Settings pulled from Cloud, restart Blender to load them."
|
||||
)
|
||||
|
||||
async def action_refresh(self, context):
|
||||
self.bss_report({'INFO'}, 'Refreshing available Blender versions.')
|
||||
self.bss_report({"INFO"}, "Refreshing available Blender versions.")
|
||||
|
||||
# Clear the LRU cache of available_blender_versions so that we can
|
||||
# obtain new versions (if someone synced from somewhere else, for example)
|
||||
@ -416,102 +457,123 @@ class PILLAR_OT_sync(pillar.PillarOperatorMixin,
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.available_blender_versions = versions
|
||||
|
||||
if versions:
|
||||
if not versions:
|
||||
# There are versions to sync, so we can remove the status message.
|
||||
# However, if there aren't any, the status message shows why, and
|
||||
# shouldn't be erased.
|
||||
self.bss_report({'INFO'}, '')
|
||||
|
||||
async def download_settings_file(self, fname: str, temp_dir: str):
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource('CONFIG'))
|
||||
meta_path = cache.cache_directory('home-project', 'blender-sync')
|
||||
|
||||
self.bss_report({'INFO'}, 'Downloading %s from Cloud' % fname)
|
||||
|
||||
# Get the asset node
|
||||
node_props = {'project': self.home_project_id,
|
||||
'node_type': 'asset',
|
||||
'parent': self.sync_group_versioned_id,
|
||||
'name': fname}
|
||||
node = await pillar_call(pillarsdk.Node.find_first, {
|
||||
'where': node_props,
|
||||
'projection': {'_id': 1, 'properties.file': 1}
|
||||
}, caching=False)
|
||||
if node is None:
|
||||
self.bss_report({'INFO'}, 'Unable to find %s on Blender Cloud' % fname)
|
||||
self.log.info('Unable to find node on Blender Cloud for %s', fname)
|
||||
return
|
||||
|
||||
async def file_downloaded(file_path: str, file_desc: pillarsdk.File, map_type: str):
|
||||
# Prevent warnings that the current value of the EnumProperty isn't valid.
|
||||
current_version = "%d.%d" % bpy.app.version[:2]
|
||||
if current_version in versions:
|
||||
bss.version = current_version
|
||||
else:
|
||||
bss.version = versions[0]
|
||||
|
||||
self.bss_report({"INFO"}, "")
|
||||
|
||||
async def download_settings_file(self, fname: str, temp_dir: str):
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource("CONFIG"))
|
||||
meta_path = cache.cache_directory("home-project", "blender-sync")
|
||||
|
||||
self.bss_report({"INFO"}, "Downloading %s from Cloud" % fname)
|
||||
|
||||
# Get the asset node
|
||||
node_props = {
|
||||
"project": self.home_project_id,
|
||||
"node_type": "asset",
|
||||
"parent": self.sync_group_versioned_id,
|
||||
"name": fname,
|
||||
}
|
||||
node = await pillar_call(
|
||||
pillarsdk.Node.find_first,
|
||||
{"where": node_props, "projection": {"_id": 1, "properties.file": 1}},
|
||||
caching=False,
|
||||
)
|
||||
if node is None:
|
||||
self.bss_report({"INFO"}, "Unable to find %s on Blender Cloud" % fname)
|
||||
self.log.info("Unable to find node on Blender Cloud for %s", fname)
|
||||
return
|
||||
|
||||
async def file_downloaded(
|
||||
file_path: str, file_desc: pillarsdk.File, map_type: str
|
||||
):
|
||||
# Allow the caller to adjust the file before we move it into place.
|
||||
|
||||
if fname.lower() == 'userpref.blend':
|
||||
if fname.lower() == "userpref.blend":
|
||||
await self.update_userpref_blend(file_path)
|
||||
|
||||
# Move the file next to the final location; as it may be on a
|
||||
# different filesystem than the temporary directory, this can
|
||||
# fail, and we don't want to destroy the existing file.
|
||||
local_temp = config_dir / (fname + '~')
|
||||
local_temp = config_dir / (fname + "~")
|
||||
local_final = config_dir / fname
|
||||
|
||||
# Make a backup copy of the file as it was before pulling.
|
||||
if local_final.exists():
|
||||
local_bak = config_dir / (fname + '-pre-bcloud-pull')
|
||||
local_bak = config_dir / (fname + "-pre-bcloud-pull")
|
||||
self.move_file(local_final, local_bak)
|
||||
|
||||
self.move_file(file_path, local_temp)
|
||||
self.move_file(local_temp, local_final)
|
||||
|
||||
file_id = node.properties.file
|
||||
await pillar.download_file_by_uuid(file_id,
|
||||
await pillar.download_file_by_uuid(
|
||||
file_id,
|
||||
temp_dir,
|
||||
str(meta_path),
|
||||
file_loaded_sync=file_downloaded,
|
||||
future=self.signalling_future)
|
||||
future=self.signalling_future,
|
||||
)
|
||||
|
||||
def move_file(self, src, dst):
|
||||
self.log.info('Moving %s to %s', src, dst)
|
||||
self.log.info("Moving %s to %s", src, dst)
|
||||
shutil.move(str(src), str(dst))
|
||||
|
||||
async def update_userpref_blend(self, file_path: str):
|
||||
self.log.info('Overriding machine-local settings in %s', file_path)
|
||||
self.log.info("Overriding machine-local settings in %s", file_path)
|
||||
|
||||
# Remember some settings that should not be overwritten from the Cloud.
|
||||
up = bpy.context.user_preferences
|
||||
prefs = blender.ctx_preferences()
|
||||
remembered = {}
|
||||
for rna_key, python_key in LOCAL_SETTINGS_RNA:
|
||||
assert '.' in python_key, 'Sorry, this code assumes there is a dot in the Python key'
|
||||
assert (
|
||||
"." in python_key
|
||||
), "Sorry, this code assumes there is a dot in the Python key"
|
||||
|
||||
try:
|
||||
value = up.path_resolve(python_key)
|
||||
value = prefs.path_resolve(python_key)
|
||||
except ValueError:
|
||||
# Setting doesn't exist. This can happen, for example Cycles
|
||||
# settings on a build that doesn't have Cycles enabled.
|
||||
continue
|
||||
|
||||
# Map enums from strings (in Python) to ints (in DNA).
|
||||
dot_index = python_key.rindex('.')
|
||||
parent_key, prop_key = python_key[:dot_index], python_key[dot_index + 1:]
|
||||
parent = up.path_resolve(parent_key)
|
||||
dot_index = python_key.rindex(".")
|
||||
parent_key, prop_key = python_key[:dot_index], python_key[dot_index + 1 :]
|
||||
parent = prefs.path_resolve(parent_key)
|
||||
prop = parent.bl_rna.properties[prop_key]
|
||||
if prop.type == 'ENUM':
|
||||
log.debug('Rewriting %s from %r to %r',
|
||||
python_key, value, prop.enum_items[value].value)
|
||||
if prop.type == "ENUM":
|
||||
log.debug(
|
||||
"Rewriting %s from %r to %r",
|
||||
python_key,
|
||||
value,
|
||||
prop.enum_items[value].value,
|
||||
)
|
||||
value = prop.enum_items[value].value
|
||||
else:
|
||||
log.debug('Keeping value of %s: %r', python_key, value)
|
||||
log.debug("Keeping value of %s: %r", python_key, value)
|
||||
|
||||
remembered[rna_key] = value
|
||||
log.debug('Overriding values: %s', remembered)
|
||||
log.debug("Overriding values: %s", remembered)
|
||||
|
||||
# Rewrite the userprefs.blend file to override the options.
|
||||
with blendfile.open_blend(file_path, 'rb+') as blend:
|
||||
prefs = next(block for block in blend.blocks
|
||||
if block.code == b'USER')
|
||||
with blendfile.open_blend(file_path, "rb+") as blend:
|
||||
prefs = next(block for block in blend.blocks if block.code == b"USER")
|
||||
|
||||
for key, value in remembered.items():
|
||||
self.log.debug('prefs[%r] = %r' % (key, prefs[key]))
|
||||
self.log.debug(' -> setting prefs[%r] = %r' % (key, value))
|
||||
self.log.debug("prefs[%r] = %r" % (key, prefs[key]))
|
||||
self.log.debug(" -> setting prefs[%r] = %r" % (key, value))
|
||||
prefs[key] = value
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
910
blender_cloud/texture_browser/__init__.py
Normal file
910
blender_cloud/texture_browser/__init__.py
Normal file
@ -0,0 +1,910 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import typing
|
||||
|
||||
import bpy
|
||||
import bgl
|
||||
|
||||
import pillarsdk
|
||||
from .. import async_loop, pillar, cache, blender, utils
|
||||
from . import (
|
||||
menu_item as menu_item_mod,
|
||||
) # so that we can have menu items called 'menu_item'
|
||||
from . import draw, nodes
|
||||
|
||||
REQUIRED_ROLES_FOR_TEXTURE_BROWSER = {"subscriber", "demo"}
|
||||
MOUSE_SCROLL_PIXELS_PER_TICK = 50
|
||||
|
||||
TARGET_ITEM_WIDTH = 400
|
||||
TARGET_ITEM_HEIGHT = 128
|
||||
ITEM_MARGIN_X = 5
|
||||
ITEM_MARGIN_Y = 5
|
||||
ITEM_PADDING_X = 5
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BlenderCloudBrowser(
|
||||
pillar.PillarOperatorMixin, async_loop.AsyncModalOperatorMixin, bpy.types.Operator
|
||||
):
|
||||
bl_idname = "pillar.browser"
|
||||
bl_label = "Blender Cloud Texture Browser"
|
||||
|
||||
_draw_handle = None
|
||||
|
||||
current_path = pillar.CloudPath("/")
|
||||
project_name = ""
|
||||
|
||||
# This contains a stack of Node objects that lead up to the currently browsed node.
|
||||
path_stack = [] # type: typing.List[pillarsdk.Node]
|
||||
|
||||
# This contains a stack of MenuItem objects that lead up to the currently browsed node.
|
||||
menu_item_stack = [] # type: typing.List[menu_item_mod.MenuItem]
|
||||
|
||||
timer = None
|
||||
log = logging.getLogger("%s.BlenderCloudBrowser" % __name__)
|
||||
|
||||
_menu_item_lock = threading.Lock()
|
||||
current_display_content = [] # type: typing.List[menu_item_mod.MenuItem]
|
||||
loaded_images = set() # type: typing.Set[str]
|
||||
thumbnails_cache = ""
|
||||
maximized_area = False
|
||||
|
||||
mouse_x = 0
|
||||
mouse_y = 0
|
||||
scroll_offset = 0
|
||||
scroll_offset_target = 0
|
||||
scroll_offset_max = 0
|
||||
scroll_offset_space_left = 0
|
||||
|
||||
def invoke(self, context, event):
|
||||
# Refuse to start if the file hasn't been saved. It's okay if
|
||||
# it's dirty, we just need to know where '//' points to.
|
||||
if not os.path.exists(context.blend_data.filepath):
|
||||
self.report(
|
||||
{"ERROR"},
|
||||
"Please save your Blend file before using " "the Blender Cloud addon.",
|
||||
)
|
||||
return {"CANCELLED"}
|
||||
|
||||
wm = context.window_manager
|
||||
|
||||
self.current_path = pillar.CloudPath(wm.last_blender_cloud_location)
|
||||
self.path_stack = [] # list of nodes that make up the current path.
|
||||
|
||||
self.thumbnails_cache = cache.cache_directory("thumbnails")
|
||||
self.mouse_x = event.mouse_x
|
||||
self.mouse_y = event.mouse_y
|
||||
|
||||
# See if we have to maximize the current area
|
||||
if not context.screen.show_fullscreen:
|
||||
self.maximized_area = True
|
||||
bpy.ops.screen.screen_full_area(use_hide_panels=True)
|
||||
|
||||
# Add the region OpenGL drawing callback
|
||||
# draw in view space with 'POST_VIEW' and 'PRE_VIEW'
|
||||
self._draw_handle = context.space_data.draw_handler_add(
|
||||
self.draw_menu, (context,), "WINDOW", "POST_PIXEL"
|
||||
)
|
||||
|
||||
self.current_display_content = []
|
||||
self.loaded_images = set()
|
||||
self._scroll_reset()
|
||||
|
||||
context.window.cursor_modal_set("DEFAULT")
|
||||
return async_loop.AsyncModalOperatorMixin.invoke(self, context, event)
|
||||
|
||||
def modal(self, context, event):
|
||||
result = async_loop.AsyncModalOperatorMixin.modal(self, context, event)
|
||||
if not {"PASS_THROUGH", "RUNNING_MODAL"}.intersection(result):
|
||||
return result
|
||||
|
||||
if event.type == "TAB" and event.value == "RELEASE":
|
||||
self.log.info("Ensuring async loop is running")
|
||||
async_loop.ensure_async_loop()
|
||||
|
||||
if event.type == "TIMER":
|
||||
self._scroll_smooth()
|
||||
context.area.tag_redraw()
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
if "MOUSE" in event.type:
|
||||
context.area.tag_redraw()
|
||||
self.mouse_x = event.mouse_x
|
||||
self.mouse_y = event.mouse_y
|
||||
|
||||
left_mouse_release = event.type == "LEFTMOUSE" and event.value == "RELEASE"
|
||||
if left_mouse_release and self._state in {"PLEASE_SUBSCRIBE", "PLEASE_RENEW"}:
|
||||
self.open_browser_subscribe(renew=self._state == "PLEASE_RENEW")
|
||||
self._finish(context)
|
||||
return {"FINISHED"}
|
||||
|
||||
if self._state == "BROWSING":
|
||||
selected = self.get_clicked()
|
||||
|
||||
if selected:
|
||||
if selected.is_spinning:
|
||||
context.window.cursor_set("WAIT")
|
||||
else:
|
||||
context.window.cursor_set("HAND")
|
||||
else:
|
||||
context.window.cursor_set("DEFAULT")
|
||||
|
||||
# Scrolling
|
||||
if event.type == "WHEELUPMOUSE":
|
||||
self._scroll_by(MOUSE_SCROLL_PIXELS_PER_TICK)
|
||||
context.area.tag_redraw()
|
||||
elif event.type == "WHEELDOWNMOUSE":
|
||||
self._scroll_by(-MOUSE_SCROLL_PIXELS_PER_TICK)
|
||||
context.area.tag_redraw()
|
||||
elif event.type == "TRACKPADPAN":
|
||||
self._scroll_by(event.mouse_prev_y - event.mouse_y, smooth=False)
|
||||
context.area.tag_redraw()
|
||||
|
||||
if left_mouse_release:
|
||||
if selected is None:
|
||||
# No item clicked, ignore it.
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
if selected.is_spinning:
|
||||
# This can happen when the thumbnail information isn't loaded yet.
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
if selected.is_folder:
|
||||
self.descend_node(selected)
|
||||
else:
|
||||
self.handle_item_selection(context, selected)
|
||||
|
||||
if event.type in {"RIGHTMOUSE", "ESC"}:
|
||||
self._finish(context)
|
||||
return {"CANCELLED"}
|
||||
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
async def async_execute(self, context):
|
||||
self._state = "CHECKING_CREDENTIALS"
|
||||
self.log.debug("Checking credentials")
|
||||
|
||||
try:
|
||||
db_user = await self.check_credentials(
|
||||
context, REQUIRED_ROLES_FOR_TEXTURE_BROWSER
|
||||
)
|
||||
except pillar.NotSubscribedToCloudError as ex:
|
||||
self._log_subscription_needed(can_renew=ex.can_renew, level="INFO")
|
||||
self._show_subscribe_screen(can_renew=ex.can_renew)
|
||||
return None
|
||||
|
||||
if db_user is None:
|
||||
raise pillar.UserNotLoggedInError()
|
||||
|
||||
await self.async_download_previews()
|
||||
|
||||
def _show_subscribe_screen(self, *, can_renew: bool):
|
||||
"""Shows the "You need to subscribe" screen."""
|
||||
|
||||
if can_renew:
|
||||
self._state = "PLEASE_RENEW"
|
||||
else:
|
||||
self._state = "PLEASE_SUBSCRIBE"
|
||||
|
||||
bpy.context.window.cursor_set("HAND")
|
||||
|
||||
def descend_node(self, menu_item: menu_item_mod.MenuItem):
|
||||
"""Descends the node hierarchy by visiting this menu item's node.
|
||||
|
||||
Also keeps track of the current node, so that we know where the "up" button should go.
|
||||
"""
|
||||
|
||||
node = menu_item.node
|
||||
assert isinstance(node, pillarsdk.Node), "Wrong type %s" % node
|
||||
|
||||
if isinstance(node, nodes.UpNode):
|
||||
# Going up.
|
||||
self.log.debug("Going up to %r", self.current_path)
|
||||
self.current_path = self.current_path.parent
|
||||
if self.path_stack:
|
||||
self.path_stack.pop()
|
||||
if self.menu_item_stack:
|
||||
self.menu_item_stack.pop()
|
||||
if not self.path_stack:
|
||||
self.project_name = ""
|
||||
else:
|
||||
# Going down, keep track of where we were
|
||||
if isinstance(node, nodes.ProjectNode):
|
||||
self.project_name = node["name"]
|
||||
|
||||
self.current_path /= node["_id"]
|
||||
self.log.debug("Going down to %r", self.current_path)
|
||||
self.path_stack.append(node)
|
||||
self.menu_item_stack.append(menu_item)
|
||||
|
||||
self.browse_assets()
|
||||
|
||||
@property
|
||||
def node(self):
|
||||
if not self.path_stack:
|
||||
return None
|
||||
return self.path_stack[-1]
|
||||
|
||||
def _finish(self, context):
|
||||
self.log.debug("Finishing the modal operator")
|
||||
async_loop.AsyncModalOperatorMixin._finish(self, context)
|
||||
self.clear_images()
|
||||
|
||||
context.space_data.draw_handler_remove(self._draw_handle, "WINDOW")
|
||||
context.window.cursor_modal_restore()
|
||||
|
||||
if self.maximized_area:
|
||||
bpy.ops.screen.screen_full_area(use_hide_panels=True)
|
||||
|
||||
context.area.tag_redraw()
|
||||
self.log.debug("Modal operator finished")
|
||||
|
||||
def clear_images(self):
|
||||
"""Removes all images we loaded from Blender's memory."""
|
||||
|
||||
for image in bpy.data.images:
|
||||
if image.filepath_raw not in self.loaded_images:
|
||||
continue
|
||||
|
||||
image.user_clear()
|
||||
bpy.data.images.remove(image)
|
||||
|
||||
self.loaded_images.clear()
|
||||
self.current_display_content.clear()
|
||||
|
||||
def add_menu_item(self, *args) -> menu_item_mod.MenuItem:
|
||||
menu_item = menu_item_mod.MenuItem(*args)
|
||||
|
||||
# Just make this thread-safe to be on the safe side.
|
||||
with self._menu_item_lock:
|
||||
self.current_display_content.append(menu_item)
|
||||
if menu_item.icon is not None:
|
||||
self.loaded_images.add(menu_item.icon.filepath_raw)
|
||||
|
||||
self.sort_menu()
|
||||
|
||||
return menu_item
|
||||
|
||||
def update_menu_item(self, node, *args):
|
||||
node_uuid = node["_id"]
|
||||
|
||||
# Just make this thread-safe to be on the safe side.
|
||||
with self._menu_item_lock:
|
||||
for menu_item in self.current_display_content:
|
||||
if menu_item.represents(node):
|
||||
menu_item.update(node, *args)
|
||||
self.loaded_images.add(menu_item.icon.filepath_raw)
|
||||
break
|
||||
else:
|
||||
raise ValueError("Unable to find MenuItem(node_uuid=%r)" % node_uuid)
|
||||
|
||||
self.sort_menu()
|
||||
|
||||
def sort_menu(self):
|
||||
"""Sorts the self.current_display_content list."""
|
||||
|
||||
if not self.current_display_content:
|
||||
return
|
||||
|
||||
with self._menu_item_lock:
|
||||
self.current_display_content.sort(key=menu_item_mod.MenuItem.sort_key)
|
||||
|
||||
async def async_download_previews(self):
|
||||
self._state = "BROWSING"
|
||||
|
||||
thumbnails_directory = self.thumbnails_cache
|
||||
self.log.info("Asynchronously downloading previews to %r", thumbnails_directory)
|
||||
self.log.info("Current BCloud path is %r", self.current_path)
|
||||
self.clear_images()
|
||||
self._scroll_reset()
|
||||
|
||||
project_uuid = self.current_path.project_uuid
|
||||
node_uuid = self.current_path.node_uuid
|
||||
|
||||
if node_uuid:
|
||||
# Query for sub-nodes of this node.
|
||||
self.log.debug("Getting subnodes for parent node %r", node_uuid)
|
||||
children = await pillar.get_nodes(
|
||||
parent_node_uuid=node_uuid, node_type={"group_texture", "group_hdri"}
|
||||
)
|
||||
elif project_uuid:
|
||||
# Query for top-level nodes.
|
||||
self.log.debug("Getting subnodes for project node %r", project_uuid)
|
||||
children = await pillar.get_nodes(
|
||||
project_uuid=project_uuid,
|
||||
parent_node_uuid="",
|
||||
node_type={"group_texture", "group_hdri"},
|
||||
)
|
||||
else:
|
||||
# Query for projects
|
||||
self.log.debug(
|
||||
"No node UUID and no project UUID, listing available projects"
|
||||
)
|
||||
children = await pillar.get_texture_projects()
|
||||
for proj_dict in children:
|
||||
self.add_menu_item(
|
||||
nodes.ProjectNode(proj_dict), None, "FOLDER", proj_dict["name"]
|
||||
)
|
||||
return
|
||||
|
||||
# Make sure we can go up again.
|
||||
self.add_menu_item(nodes.UpNode(), None, "FOLDER", ".. up ..")
|
||||
|
||||
# Download all child nodes
|
||||
self.log.debug("Iterating over child nodes of %r", self.current_path)
|
||||
for child in children:
|
||||
# print(' - %(_id)s = %(name)s' % child)
|
||||
if child["node_type"] not in menu_item_mod.MenuItem.SUPPORTED_NODE_TYPES:
|
||||
self.log.debug("Skipping node of type %r", child["node_type"])
|
||||
continue
|
||||
self.add_menu_item(child, None, "FOLDER", child["name"])
|
||||
|
||||
# There are only sub-nodes at the project level, no texture nodes,
|
||||
# so we won't have to bother looking for textures.
|
||||
if not node_uuid:
|
||||
return
|
||||
|
||||
directory = os.path.join(thumbnails_directory, project_uuid, node_uuid)
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
|
||||
self.log.debug("Fetching texture thumbnails for node %r", node_uuid)
|
||||
|
||||
def thumbnail_loading(node, texture_node):
|
||||
self.add_menu_item(node, None, "SPINNER", texture_node["name"])
|
||||
|
||||
def thumbnail_loaded(node, file_desc, thumb_path):
|
||||
self.log.debug("Node %s thumbnail loaded", node["_id"])
|
||||
self.update_menu_item(node, file_desc, thumb_path)
|
||||
|
||||
await pillar.fetch_texture_thumbs(
|
||||
node_uuid,
|
||||
"s",
|
||||
directory,
|
||||
thumbnail_loading=thumbnail_loading,
|
||||
thumbnail_loaded=thumbnail_loaded,
|
||||
future=self.signalling_future,
|
||||
)
|
||||
|
||||
def browse_assets(self):
|
||||
self.log.debug("Browsing assets at %r", self.current_path)
|
||||
bpy.context.window_manager.last_blender_cloud_location = str(self.current_path)
|
||||
self._new_async_task(self.async_download_previews())
|
||||
|
||||
def draw_menu(self, context):
|
||||
"""Draws the GUI with OpenGL."""
|
||||
|
||||
drawers = {
|
||||
"INITIALIZING": self._draw_initializing,
|
||||
"CHECKING_CREDENTIALS": self._draw_checking_credentials,
|
||||
"BROWSING": self._draw_browser,
|
||||
"DOWNLOADING_TEXTURE": self._draw_downloading,
|
||||
"EXCEPTION": self._draw_exception,
|
||||
"PLEASE_SUBSCRIBE": self._draw_subscribe,
|
||||
"PLEASE_RENEW": self._draw_renew,
|
||||
}
|
||||
|
||||
if self._state in drawers:
|
||||
drawer = drawers[self._state]
|
||||
drawer(context)
|
||||
|
||||
# For debugging: draw the state
|
||||
draw.text(
|
||||
(5, 5),
|
||||
"%s %s" % (self._state, self.project_name),
|
||||
rgba=(1.0, 1.0, 1.0, 1.0),
|
||||
fsize=12,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _window_region(context):
|
||||
window_regions = [
|
||||
region for region in context.area.regions if region.type == "WINDOW"
|
||||
]
|
||||
return window_regions[0]
|
||||
|
||||
def _draw_browser(self, context):
|
||||
"""OpenGL drawing code for the BROWSING state."""
|
||||
from . import draw
|
||||
|
||||
if not self.current_display_content:
|
||||
self._draw_text_on_colour(
|
||||
context, "Communicating with Blender Cloud", (0.0, 0.0, 0.0, 0.6)
|
||||
)
|
||||
return
|
||||
|
||||
window_region = self._window_region(context)
|
||||
content_width = window_region.width - ITEM_MARGIN_X * 2
|
||||
content_height = window_region.height - ITEM_MARGIN_Y * 2
|
||||
|
||||
content_x = ITEM_MARGIN_X
|
||||
content_y = context.area.height - ITEM_MARGIN_Y - TARGET_ITEM_HEIGHT
|
||||
|
||||
col_count = content_width // TARGET_ITEM_WIDTH
|
||||
|
||||
item_width = (content_width - (col_count * ITEM_PADDING_X)) / col_count
|
||||
item_height = TARGET_ITEM_HEIGHT
|
||||
|
||||
block_width = item_width + ITEM_PADDING_X
|
||||
block_height = item_height + ITEM_MARGIN_Y
|
||||
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
draw.aabox(
|
||||
(0, 0), (window_region.width, window_region.height), (0.0, 0.0, 0.0, 0.6)
|
||||
)
|
||||
|
||||
bottom_y = float("inf")
|
||||
|
||||
# The -1 / +2 are for extra rows that are drawn only half at the top/bottom.
|
||||
first_item_idx = max(
|
||||
0, int(-self.scroll_offset // block_height - 1) * col_count
|
||||
)
|
||||
items_per_page = int(content_height // item_height + 2) * col_count
|
||||
last_item_idx = first_item_idx + items_per_page
|
||||
|
||||
for item_idx, item in enumerate(self.current_display_content):
|
||||
x = content_x + (item_idx % col_count) * block_width
|
||||
y = content_y - (item_idx // col_count) * block_height - self.scroll_offset
|
||||
|
||||
item.update_placement(x, y, item_width, item_height)
|
||||
|
||||
if first_item_idx <= item_idx < last_item_idx:
|
||||
# Only draw if the item is actually on screen.
|
||||
item.draw(highlighted=item.hits(self.mouse_x, self.mouse_y))
|
||||
|
||||
bottom_y = min(y, bottom_y)
|
||||
self.scroll_offset_space_left = window_region.height - bottom_y
|
||||
self.scroll_offset_max = (
|
||||
self.scroll_offset - self.scroll_offset_space_left + 0.25 * block_height
|
||||
)
|
||||
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
def _draw_downloading(self, context):
|
||||
"""OpenGL drawing code for the DOWNLOADING_TEXTURE state."""
|
||||
|
||||
self._draw_text_on_colour(
|
||||
context, "Downloading texture from Blender Cloud", (0.0, 0.0, 0.2, 0.6)
|
||||
)
|
||||
|
||||
def _draw_checking_credentials(self, context):
|
||||
"""OpenGL drawing code for the CHECKING_CREDENTIALS state."""
|
||||
|
||||
self._draw_text_on_colour(
|
||||
context, "Checking login credentials", (0.0, 0.0, 0.2, 0.6)
|
||||
)
|
||||
|
||||
def _draw_initializing(self, context):
|
||||
"""OpenGL drawing code for the INITIALIZING state."""
|
||||
|
||||
self._draw_text_on_colour(context, "Initializing", (0.0, 0.0, 0.2, 0.6))
|
||||
|
||||
def _draw_text_on_colour(self, context, text: str, bgcolour):
|
||||
content_height, content_width = self._window_size(context)
|
||||
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
|
||||
draw.aabox((0, 0), (content_width, content_height), bgcolour)
|
||||
draw.text(
|
||||
(content_width * 0.5, content_height * 0.7), text, fsize=20, align="C"
|
||||
)
|
||||
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
def _window_size(self, context):
|
||||
window_region = self._window_region(context)
|
||||
content_width = window_region.width
|
||||
content_height = window_region.height
|
||||
return content_height, content_width
|
||||
|
||||
def _draw_exception(self, context):
|
||||
"""OpenGL drawing code for the EXCEPTION state."""
|
||||
|
||||
import textwrap
|
||||
|
||||
content_height, content_width = self._window_size(context)
|
||||
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
draw.aabox((0, 0), (content_width, content_height), (0.2, 0.0, 0.0, 0.6))
|
||||
|
||||
ex = self.async_task.exception()
|
||||
if isinstance(ex, pillar.UserNotLoggedInError):
|
||||
ex_msg = (
|
||||
"You are not logged in on Blender ID. Please log in at User Preferences, "
|
||||
"Add-ons, Blender ID Authentication."
|
||||
)
|
||||
else:
|
||||
ex_msg = str(ex)
|
||||
if not ex_msg:
|
||||
ex_msg = str(type(ex))
|
||||
text = "An error occurred:\n%s" % ex_msg
|
||||
lines = textwrap.wrap(text, width=100)
|
||||
|
||||
draw.text((content_width * 0.1, content_height * 0.9), lines, fsize=16)
|
||||
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
def _draw_subscribe(self, context):
|
||||
self._draw_text_on_colour(
|
||||
context, "Click to subscribe to the Blender Cloud", (0.0, 0.0, 0.2, 0.6)
|
||||
)
|
||||
|
||||
def _draw_renew(self, context):
|
||||
self._draw_text_on_colour(
|
||||
context,
|
||||
"Click to renew your Blender Cloud subscription",
|
||||
(0.0, 0.0, 0.2, 0.6),
|
||||
)
|
||||
|
||||
def get_clicked(self) -> typing.Optional[menu_item_mod.MenuItem]:
|
||||
|
||||
for item in self.current_display_content:
|
||||
if item.hits(self.mouse_x, self.mouse_y):
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
def handle_item_selection(self, context, item: menu_item_mod.MenuItem):
|
||||
"""Called when the user clicks on a menu item that doesn't represent a folder."""
|
||||
|
||||
from pillarsdk.utils import sanitize_filename
|
||||
|
||||
self.clear_images()
|
||||
self._state = "DOWNLOADING_TEXTURE"
|
||||
|
||||
node_path_components = (
|
||||
node["name"] for node in self.path_stack if node is not None
|
||||
)
|
||||
local_path_components = [
|
||||
sanitize_filename(comp) for comp in node_path_components
|
||||
]
|
||||
|
||||
top_texture_directory = bpy.path.abspath(context.scene.local_texture_dir)
|
||||
local_path = os.path.join(top_texture_directory, *local_path_components)
|
||||
meta_path = os.path.join(top_texture_directory, ".blender_cloud")
|
||||
|
||||
self.log.info("Downloading texture %r to %s", item.node_uuid, local_path)
|
||||
self.log.debug("Metadata will be stored at %s", meta_path)
|
||||
|
||||
file_paths = []
|
||||
select_dblock = None
|
||||
node = item.node
|
||||
|
||||
def texture_downloading(file_path, *_):
|
||||
self.log.info("Texture downloading to %s", file_path)
|
||||
|
||||
def texture_downloaded(file_path, file_desc, map_type):
|
||||
nonlocal select_dblock
|
||||
|
||||
self.log.info("Texture downloaded to %r.", file_path)
|
||||
|
||||
if context.scene.local_texture_dir.startswith("//"):
|
||||
file_path = bpy.path.relpath(file_path)
|
||||
|
||||
image_dblock = bpy.data.images.load(filepath=file_path)
|
||||
image_dblock["bcloud_file_uuid"] = file_desc["_id"]
|
||||
image_dblock["bcloud_node_uuid"] = node["_id"]
|
||||
image_dblock["bcloud_node_type"] = node["node_type"]
|
||||
image_dblock["bcloud_node"] = pillar.node_to_id(node)
|
||||
|
||||
if node["node_type"] == "hdri":
|
||||
# All HDRi variations should use the same image datablock, hence once name.
|
||||
image_dblock.name = node["name"]
|
||||
else:
|
||||
# All texture variations are loaded at once, and thus need the map type in the name.
|
||||
image_dblock.name = "%s-%s" % (node["name"], map_type)
|
||||
|
||||
# Select the image in the image editor (if the context is right).
|
||||
# Just set the first image we download,
|
||||
if context.area.type == "IMAGE_EDITOR":
|
||||
if select_dblock is None or file_desc.map_type == "color":
|
||||
select_dblock = image_dblock
|
||||
context.space_data.image = select_dblock
|
||||
|
||||
file_paths.append(file_path)
|
||||
|
||||
def texture_download_completed(_):
|
||||
self.log.info(
|
||||
"Texture download complete, inspect:\n%s", "\n".join(file_paths)
|
||||
)
|
||||
self._state = "QUIT"
|
||||
|
||||
# For HDRi nodes: only download the first file.
|
||||
download_node = pillarsdk.Node.new(node)
|
||||
if node["node_type"] == "hdri":
|
||||
download_node.properties.files = [download_node.properties.files[0]]
|
||||
|
||||
signalling_future = asyncio.Future()
|
||||
self._new_async_task(
|
||||
pillar.download_texture(
|
||||
download_node,
|
||||
local_path,
|
||||
metadata_directory=meta_path,
|
||||
texture_loading=texture_downloading,
|
||||
texture_loaded=texture_downloaded,
|
||||
future=signalling_future,
|
||||
)
|
||||
)
|
||||
self.async_task.add_done_callback(texture_download_completed)
|
||||
|
||||
def open_browser_subscribe(self, *, renew: bool):
|
||||
import webbrowser
|
||||
|
||||
url = "renew" if renew else "join"
|
||||
webbrowser.open_new_tab("https://cloud.blender.org/%s" % url)
|
||||
self.report({"INFO"}, "We just started a browser for you.")
|
||||
|
||||
def _scroll_smooth(self):
|
||||
diff = self.scroll_offset_target - self.scroll_offset
|
||||
if diff == 0:
|
||||
return
|
||||
|
||||
if abs(round(diff)) < 1:
|
||||
self.scroll_offset = self.scroll_offset_target
|
||||
return
|
||||
|
||||
self.scroll_offset += diff * 0.5
|
||||
|
||||
def _scroll_by(self, amount, *, smooth=True):
|
||||
# Slow down scrolling up
|
||||
if smooth and amount < 0 and -amount > self.scroll_offset_space_left / 4:
|
||||
amount = -self.scroll_offset_space_left / 4
|
||||
|
||||
self.scroll_offset_target = min(
|
||||
0, max(self.scroll_offset_max, self.scroll_offset_target + amount)
|
||||
)
|
||||
|
||||
if not smooth:
|
||||
self._scroll_offset = self.scroll_offset_target
|
||||
|
||||
def _scroll_reset(self):
|
||||
self.scroll_offset_target = self.scroll_offset = 0
|
||||
|
||||
|
||||
class PILLAR_OT_switch_hdri(
|
||||
pillar.PillarOperatorMixin, async_loop.AsyncModalOperatorMixin, bpy.types.Operator
|
||||
):
|
||||
bl_idname = "pillar.switch_hdri"
|
||||
bl_label = "Switch with another variation"
|
||||
bl_description = (
|
||||
"Downloads the selected variation of an HDRi, " "replacing the current image"
|
||||
)
|
||||
|
||||
log = logging.getLogger("bpy.ops.%s" % bl_idname)
|
||||
|
||||
image_name: bpy.props.StringProperty(
|
||||
name="image_name", description="Name of the image block to replace"
|
||||
)
|
||||
|
||||
file_uuid: bpy.props.StringProperty(
|
||||
name="file_uuid", description="File ID to download"
|
||||
)
|
||||
|
||||
async def async_execute(self, context):
|
||||
"""Entry point of the asynchronous operator."""
|
||||
|
||||
self.report({"INFO"}, "Communicating with Blender Cloud")
|
||||
|
||||
try:
|
||||
try:
|
||||
db_user = await self.check_credentials(
|
||||
context, REQUIRED_ROLES_FOR_TEXTURE_BROWSER
|
||||
)
|
||||
user_id = db_user["_id"]
|
||||
except pillar.NotSubscribedToCloudError as ex:
|
||||
self._log_subscription_needed(can_renew=ex.can_renew)
|
||||
self._state = "QUIT"
|
||||
return
|
||||
except pillar.UserNotLoggedInError:
|
||||
self.log.exception("Error checking/refreshing credentials.")
|
||||
self.report({"ERROR"}, "Please log in on Blender ID first.")
|
||||
self._state = "QUIT"
|
||||
return
|
||||
|
||||
if not user_id:
|
||||
raise pillar.UserNotLoggedInError()
|
||||
|
||||
await self.download_and_replace(context)
|
||||
except Exception as ex:
|
||||
self.log.exception("Unexpected exception caught.")
|
||||
self.report({"ERROR"}, "Unexpected error %s: %s" % (type(ex), ex))
|
||||
|
||||
self._state = "QUIT"
|
||||
|
||||
async def download_and_replace(self, context):
|
||||
self._state = "DOWNLOADING_TEXTURE"
|
||||
|
||||
current_image = bpy.data.images[self.image_name]
|
||||
node = current_image["bcloud_node"]
|
||||
filename = "%s.taken_from_file" % pillar.sanitize_filename(node["name"])
|
||||
|
||||
local_path = os.path.dirname(bpy.path.abspath(current_image.filepath))
|
||||
top_texture_directory = bpy.path.abspath(context.scene.local_texture_dir)
|
||||
meta_path = os.path.join(top_texture_directory, ".blender_cloud")
|
||||
|
||||
file_uuid = self.file_uuid
|
||||
resolution = next(
|
||||
file_ref["resolution"]
|
||||
for file_ref in node["properties"]["files"]
|
||||
if file_ref["file"] == file_uuid
|
||||
)
|
||||
|
||||
my_log = self.log
|
||||
my_log.info("Downloading file %r-%s to %s", file_uuid, resolution, local_path)
|
||||
my_log.debug("Metadata will be stored at %s", meta_path)
|
||||
|
||||
def file_loading(file_path, file_desc, map_type):
|
||||
my_log.info(
|
||||
"Texture downloading to %s (%s)",
|
||||
file_path,
|
||||
utils.sizeof_fmt(file_desc["length"]),
|
||||
)
|
||||
|
||||
async def file_loaded(file_path, file_desc, map_type):
|
||||
if context.scene.local_texture_dir.startswith("//"):
|
||||
file_path = bpy.path.relpath(file_path)
|
||||
|
||||
my_log.info("Texture downloaded to %s", file_path)
|
||||
current_image["bcloud_file_uuid"] = file_uuid
|
||||
current_image.filepath = (
|
||||
file_path # This automatically reloads the image from disk.
|
||||
)
|
||||
|
||||
# This forces users of the image to update.
|
||||
for datablocks in bpy.data.user_map({current_image}).values():
|
||||
for datablock in datablocks:
|
||||
datablock.update_tag()
|
||||
|
||||
await pillar.download_file_by_uuid(
|
||||
file_uuid,
|
||||
local_path,
|
||||
meta_path,
|
||||
filename=filename,
|
||||
map_type=resolution,
|
||||
file_loading=file_loading,
|
||||
file_loaded_sync=file_loaded,
|
||||
future=self.signalling_future,
|
||||
)
|
||||
|
||||
self.report({"INFO"}, "Image download complete")
|
||||
|
||||
|
||||
# store keymaps here to access after registration
|
||||
addon_keymaps = []
|
||||
|
||||
|
||||
def image_editor_menu(self, context):
|
||||
self.layout.operator(
|
||||
BlenderCloudBrowser.bl_idname,
|
||||
text="Get image from Blender Cloud",
|
||||
icon_value=blender.icon("CLOUD"),
|
||||
)
|
||||
|
||||
|
||||
def hdri_download_panel__image_editor(self, context):
|
||||
_hdri_download_panel(self, context.edit_image)
|
||||
|
||||
|
||||
def hdri_download_panel__node_editor(self, context):
|
||||
if context.active_node.type not in {"TEX_ENVIRONMENT", "TEX_IMAGE"}:
|
||||
return
|
||||
|
||||
_hdri_download_panel(self, context.active_node.image)
|
||||
|
||||
|
||||
def _hdri_download_panel(self, current_image):
|
||||
if not current_image:
|
||||
return
|
||||
if "bcloud_node_type" not in current_image:
|
||||
return
|
||||
if current_image["bcloud_node_type"] != "hdri":
|
||||
return
|
||||
try:
|
||||
current_variation = current_image["bcloud_file_uuid"]
|
||||
except KeyError:
|
||||
log.warning(
|
||||
"Image %r has a bcloud_node_type but no bcloud_file_uuid property.",
|
||||
current_image.name,
|
||||
)
|
||||
return
|
||||
|
||||
row = self.layout.row(align=True).split(factor=0.3)
|
||||
row.label(text="HDRi", icon_value=blender.icon("CLOUD"))
|
||||
row.prop(current_image, "hdri_variation", text="")
|
||||
|
||||
if current_image.hdri_variation != current_variation:
|
||||
props = row.operator(
|
||||
PILLAR_OT_switch_hdri.bl_idname, text="Replace", icon="FILE_REFRESH"
|
||||
)
|
||||
props.image_name = current_image.name
|
||||
props.file_uuid = current_image.hdri_variation
|
||||
|
||||
|
||||
# Storage for variation labels, as the strings in EnumProperty items
|
||||
# MUST be kept in Python memory.
|
||||
variation_label_storage = {}
|
||||
|
||||
|
||||
def hdri_variation_choices(self, context):
|
||||
if context.area.type == "IMAGE_EDITOR":
|
||||
image = context.edit_image
|
||||
elif context.area.type == "NODE_EDITOR":
|
||||
image = context.active_node.image
|
||||
else:
|
||||
return []
|
||||
|
||||
if "bcloud_node" not in image:
|
||||
return []
|
||||
|
||||
choices = []
|
||||
for file_doc in image["bcloud_node"]["properties"]["files"]:
|
||||
label = file_doc["resolution"]
|
||||
variation_label_storage[label] = label
|
||||
choices.append((file_doc["file"], label, ""))
|
||||
|
||||
return choices
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(BlenderCloudBrowser)
|
||||
bpy.utils.register_class(PILLAR_OT_switch_hdri)
|
||||
bpy.types.IMAGE_MT_image.prepend(image_editor_menu)
|
||||
bpy.types.IMAGE_PT_image_properties.append(hdri_download_panel__image_editor)
|
||||
bpy.types.NODE_PT_active_node_properties.append(hdri_download_panel__node_editor)
|
||||
|
||||
# HDRi resolution switcher/chooser.
|
||||
# TODO: when an image is selected, switch this property to its current resolution.
|
||||
bpy.types.Image.hdri_variation = bpy.props.EnumProperty(
|
||||
name="HDRi variations",
|
||||
items=hdri_variation_choices,
|
||||
description="Select a variation with which to replace this image",
|
||||
)
|
||||
|
||||
# handle the keymap
|
||||
wm = bpy.context.window_manager
|
||||
kc = wm.keyconfigs.addon
|
||||
if not kc:
|
||||
print("No addon key configuration space found, so no custom hotkeys added.")
|
||||
return
|
||||
|
||||
km = kc.keymaps.new(name="Screen")
|
||||
kmi = km.keymap_items.new(
|
||||
"pillar.browser", "A", "PRESS", ctrl=True, shift=True, alt=True
|
||||
)
|
||||
addon_keymaps.append((km, kmi))
|
||||
|
||||
|
||||
def unregister():
|
||||
# handle the keymap
|
||||
for km, kmi in addon_keymaps:
|
||||
km.keymap_items.remove(kmi)
|
||||
addon_keymaps.clear()
|
||||
|
||||
if hasattr(bpy.types.Image, "hdri_variation"):
|
||||
del bpy.types.Image.hdri_variation
|
||||
|
||||
bpy.types.IMAGE_MT_image.remove(image_editor_menu)
|
||||
bpy.types.IMAGE_PT_image_properties.remove(hdri_download_panel__image_editor)
|
||||
bpy.types.NODE_PT_active_node_properties.remove(hdri_download_panel__node_editor)
|
||||
bpy.utils.unregister_class(BlenderCloudBrowser)
|
||||
bpy.utils.unregister_class(PILLAR_OT_switch_hdri)
|
119
blender_cloud/texture_browser/draw.py
Normal file
119
blender_cloud/texture_browser/draw.py
Normal file
@ -0,0 +1,119 @@
|
||||
"""OpenGL drawing code for the texture browser.
|
||||
|
||||
Requires Blender 2.80 or newer.
|
||||
"""
|
||||
|
||||
import typing
|
||||
|
||||
import bgl
|
||||
import blf
|
||||
import bpy
|
||||
import gpu
|
||||
from gpu_extras.batch import batch_for_shader
|
||||
|
||||
if bpy.app.background:
|
||||
shader = None
|
||||
texture_shader = None
|
||||
else:
|
||||
shader = gpu.shader.from_builtin("2D_UNIFORM_COLOR")
|
||||
texture_shader = gpu.shader.from_builtin("2D_IMAGE")
|
||||
|
||||
Float2 = typing.Tuple[float, float]
|
||||
Float3 = typing.Tuple[float, float, float]
|
||||
Float4 = typing.Tuple[float, float, float, float]
|
||||
|
||||
|
||||
def text(
|
||||
pos2d: Float2,
|
||||
display_text: typing.Union[str, typing.List[str]],
|
||||
rgba: Float4 = (1.0, 1.0, 1.0, 1.0),
|
||||
fsize=12,
|
||||
align="L",
|
||||
):
|
||||
"""Draw text with the top-left corner at 'pos2d'."""
|
||||
|
||||
dpi = bpy.context.preferences.system.dpi
|
||||
gap = 12
|
||||
x_pos, y_pos = pos2d
|
||||
font_id = 0
|
||||
blf.size(font_id, fsize, dpi)
|
||||
|
||||
# Compute the height of one line.
|
||||
mwidth, mheight = blf.dimensions(font_id, "Tp") # Use high and low letters.
|
||||
mheight *= 1.5
|
||||
|
||||
# Split text into lines.
|
||||
if isinstance(display_text, str):
|
||||
mylines = display_text.split("\n")
|
||||
else:
|
||||
mylines = display_text
|
||||
maxwidth = 0
|
||||
maxheight = len(mylines) * mheight
|
||||
|
||||
for idx, line in enumerate(mylines):
|
||||
text_width, text_height = blf.dimensions(font_id, line)
|
||||
if align == "C":
|
||||
newx = x_pos - text_width / 2
|
||||
elif align == "R":
|
||||
newx = x_pos - text_width - gap
|
||||
else:
|
||||
newx = x_pos
|
||||
|
||||
# Draw
|
||||
blf.position(font_id, newx, y_pos - mheight * idx, 0)
|
||||
blf.color(font_id, rgba[0], rgba[1], rgba[2], rgba[3])
|
||||
blf.draw(font_id, " " + line)
|
||||
|
||||
# saves max width
|
||||
if maxwidth < text_width:
|
||||
maxwidth = text_width
|
||||
|
||||
return maxwidth, maxheight
|
||||
|
||||
|
||||
def aabox(v1: Float2, v2: Float2, rgba: Float4):
|
||||
"""Draw an axis-aligned box."""
|
||||
coords = [
|
||||
(v1[0], v1[1]),
|
||||
(v1[0], v2[1]),
|
||||
(v2[0], v2[1]),
|
||||
(v2[0], v1[1]),
|
||||
]
|
||||
shader.bind()
|
||||
shader.uniform_float("color", rgba)
|
||||
|
||||
batch = batch_for_shader(shader, "TRI_FAN", {"pos": coords})
|
||||
batch.draw(shader)
|
||||
|
||||
|
||||
def aabox_with_texture(v1: Float2, v2: Float2):
|
||||
"""Draw an axis-aligned box with a texture."""
|
||||
coords = [
|
||||
(v1[0], v1[1]),
|
||||
(v1[0], v2[1]),
|
||||
(v2[0], v2[1]),
|
||||
(v2[0], v1[1]),
|
||||
]
|
||||
texture_shader.bind()
|
||||
texture_shader.uniform_int("image", 0)
|
||||
|
||||
batch = batch_for_shader(
|
||||
texture_shader,
|
||||
"TRI_FAN",
|
||||
{
|
||||
"pos": coords,
|
||||
"texCoord": ((0, 0), (0, 1), (1, 1), (1, 0)),
|
||||
},
|
||||
)
|
||||
batch.draw(texture_shader)
|
||||
|
||||
|
||||
def bind_texture(texture: bpy.types.Image):
|
||||
"""Bind a Blender image to a GL texture slot."""
|
||||
bgl.glActiveTexture(bgl.GL_TEXTURE0)
|
||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, texture.bindcode)
|
||||
|
||||
|
||||
def load_texture(texture: bpy.types.Image) -> int:
|
||||
"""Load the texture, return OpenGL error code."""
|
||||
return texture.gl_load()
|
BIN
blender_cloud/texture_browser/icons/error.png
Normal file
BIN
blender_cloud/texture_browser/icons/error.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 11 KiB |
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB |
209
blender_cloud/texture_browser/menu_item.py
Normal file
209
blender_cloud/texture_browser/menu_item.py
Normal file
@ -0,0 +1,209 @@
|
||||
import logging
|
||||
import os.path
|
||||
|
||||
import bpy
|
||||
import bgl
|
||||
|
||||
import pillarsdk
|
||||
from . import nodes
|
||||
|
||||
if bpy.app.version < (2, 80):
|
||||
from . import draw_27 as draw
|
||||
else:
|
||||
from . import draw
|
||||
|
||||
|
||||
library_icons_path = os.path.join(os.path.dirname(__file__), "icons")
|
||||
|
||||
ICON_WIDTH = 128
|
||||
ICON_HEIGHT = 128
|
||||
|
||||
|
||||
class MenuItem:
|
||||
"""GUI menu item for the 3D View GUI."""
|
||||
|
||||
icon_margin_x = 4
|
||||
icon_margin_y = 4
|
||||
text_margin_x = 6
|
||||
|
||||
text_size = 12
|
||||
text_size_small = 10
|
||||
|
||||
DEFAULT_ICONS = {
|
||||
"FOLDER": os.path.join(library_icons_path, "folder.png"),
|
||||
"SPINNER": os.path.join(library_icons_path, "spinner.png"),
|
||||
"ERROR": os.path.join(library_icons_path, "error.png"),
|
||||
}
|
||||
|
||||
FOLDER_NODE_TYPES = {
|
||||
"group_texture",
|
||||
"group_hdri",
|
||||
nodes.UpNode.NODE_TYPE,
|
||||
nodes.ProjectNode.NODE_TYPE,
|
||||
}
|
||||
SUPPORTED_NODE_TYPES = {"texture", "hdri"}.union(FOLDER_NODE_TYPES)
|
||||
|
||||
def __init__(self, node, file_desc, thumb_path: str, label_text):
|
||||
self.log = logging.getLogger("%s.MenuItem" % __name__)
|
||||
if node["node_type"] not in self.SUPPORTED_NODE_TYPES:
|
||||
self.log.info("Invalid node type in node: %s", node)
|
||||
raise TypeError(
|
||||
"Node of type %r not supported; supported are %r."
|
||||
% (node["node_type"], self.SUPPORTED_NODE_TYPES)
|
||||
)
|
||||
|
||||
assert isinstance(node, pillarsdk.Node), "wrong type for node: %r" % type(node)
|
||||
assert isinstance(node["_id"], str), 'wrong type for node["_id"]: %r' % type(
|
||||
node["_id"]
|
||||
)
|
||||
self.node = node # pillarsdk.Node, contains 'node_type' key to indicate type
|
||||
self.file_desc = file_desc # pillarsdk.File object, or None if a 'folder' node.
|
||||
self.label_text = label_text
|
||||
self.small_text = self._small_text_from_node()
|
||||
self._thumb_path = ""
|
||||
self.icon = None
|
||||
self._is_folder = node["node_type"] in self.FOLDER_NODE_TYPES
|
||||
self._is_spinning = False
|
||||
|
||||
# Determine sorting order.
|
||||
# by default, sort all the way at the end and folders first.
|
||||
self._order = 0 if self._is_folder else 10000
|
||||
if node and node.properties and node.properties.order is not None:
|
||||
self._order = node.properties.order
|
||||
|
||||
self.thumb_path = thumb_path
|
||||
|
||||
# Updated when drawing the image
|
||||
self.x = 0
|
||||
self.y = 0
|
||||
self.width = 0
|
||||
self.height = 0
|
||||
|
||||
def _small_text_from_node(self) -> str:
|
||||
"""Return the components of the texture (i.e. which map types are available)."""
|
||||
|
||||
if not self.node:
|
||||
return ""
|
||||
|
||||
try:
|
||||
node_files = self.node.properties.files
|
||||
except AttributeError:
|
||||
# Happens for nodes that don't have .properties.files.
|
||||
return ""
|
||||
if not node_files:
|
||||
return ""
|
||||
|
||||
map_types = {f.map_type for f in node_files if f.map_type}
|
||||
map_types.discard("color") # all textures have colour
|
||||
if not map_types:
|
||||
return ""
|
||||
return ", ".join(sorted(map_types))
|
||||
|
||||
def sort_key(self):
|
||||
"""Key for sorting lists of MenuItems."""
|
||||
return self._order, self.label_text
|
||||
|
||||
@property
|
||||
def thumb_path(self) -> str:
|
||||
return self._thumb_path
|
||||
|
||||
@thumb_path.setter
|
||||
def thumb_path(self, new_thumb_path: str):
|
||||
self._is_spinning = new_thumb_path == "SPINNER"
|
||||
|
||||
self._thumb_path = self.DEFAULT_ICONS.get(new_thumb_path, new_thumb_path)
|
||||
if self._thumb_path:
|
||||
self.icon = bpy.data.images.load(filepath=self._thumb_path)
|
||||
else:
|
||||
self.icon = None
|
||||
|
||||
@property
|
||||
def node_uuid(self) -> str:
|
||||
return self.node["_id"]
|
||||
|
||||
def represents(self, node) -> bool:
|
||||
"""Returns True iff this MenuItem represents the given node."""
|
||||
|
||||
node_uuid = node["_id"]
|
||||
return self.node_uuid == node_uuid
|
||||
|
||||
def update(self, node, file_desc, thumb_path: str, label_text=None):
|
||||
# We can get updated information about our Node, but a MenuItem should
|
||||
# always represent one node, and it shouldn't be shared between nodes.
|
||||
if self.node_uuid != node["_id"]:
|
||||
raise ValueError(
|
||||
"Don't change the node ID this MenuItem reflects, "
|
||||
"just create a new one."
|
||||
)
|
||||
self.node = node
|
||||
self.file_desc = file_desc # pillarsdk.File object, or None if a 'folder' node.
|
||||
self.thumb_path = thumb_path
|
||||
|
||||
if label_text is not None:
|
||||
self.label_text = label_text
|
||||
|
||||
if thumb_path == "ERROR":
|
||||
self.small_text = "This open is broken"
|
||||
else:
|
||||
self.small_text = self._small_text_from_node()
|
||||
|
||||
@property
|
||||
def is_folder(self) -> bool:
|
||||
return self._is_folder
|
||||
|
||||
@property
|
||||
def is_spinning(self) -> bool:
|
||||
return self._is_spinning
|
||||
|
||||
def update_placement(self, x, y, width, height):
|
||||
"""Use OpenGL to draw this one menu item."""
|
||||
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.width = width
|
||||
self.height = height
|
||||
|
||||
def draw(self, highlighted: bool):
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
if highlighted:
|
||||
color = (0.555, 0.555, 0.555, 0.8)
|
||||
else:
|
||||
color = (0.447, 0.447, 0.447, 0.8)
|
||||
|
||||
draw.aabox((self.x, self.y), (self.x + self.width, self.y + self.height), color)
|
||||
|
||||
texture = self.icon
|
||||
if texture:
|
||||
err = draw.load_texture(texture)
|
||||
assert not err, "OpenGL error: %i" % err
|
||||
|
||||
# ------ TEXTURE ---------#
|
||||
if texture:
|
||||
draw.bind_texture(texture)
|
||||
bgl.glBlendFunc(bgl.GL_SRC_ALPHA, bgl.GL_ONE_MINUS_SRC_ALPHA)
|
||||
|
||||
draw.aabox_with_texture(
|
||||
(self.x + self.icon_margin_x, self.y),
|
||||
(self.x + self.icon_margin_x + ICON_WIDTH, self.y + ICON_HEIGHT),
|
||||
)
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
if texture:
|
||||
texture.gl_free()
|
||||
|
||||
# draw some text
|
||||
text_x = self.x + self.icon_margin_x + ICON_WIDTH + self.text_margin_x
|
||||
text_y = self.y + ICON_HEIGHT * 0.5 - 0.25 * self.text_size
|
||||
draw.text((text_x, text_y), self.label_text, fsize=self.text_size)
|
||||
draw.text(
|
||||
(text_x, self.y + 0.5 * self.text_size_small),
|
||||
self.small_text,
|
||||
fsize=self.text_size_small,
|
||||
rgba=(1.0, 1.0, 1.0, 0.5),
|
||||
)
|
||||
|
||||
def hits(self, mouse_x: int, mouse_y: int) -> bool:
|
||||
return (
|
||||
self.x < mouse_x < self.x + self.width
|
||||
and self.y < mouse_y < self.y + self.height
|
||||
)
|
28
blender_cloud/texture_browser/nodes.py
Normal file
28
blender_cloud/texture_browser/nodes.py
Normal file
@ -0,0 +1,28 @@
|
||||
import pillarsdk
|
||||
|
||||
|
||||
class SpecialFolderNode(pillarsdk.Node):
|
||||
NODE_TYPE = "SPECIAL"
|
||||
|
||||
|
||||
class UpNode(SpecialFolderNode):
|
||||
NODE_TYPE = "UP"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self["_id"] = "UP"
|
||||
self["node_type"] = self.NODE_TYPE
|
||||
|
||||
|
||||
class ProjectNode(SpecialFolderNode):
|
||||
NODE_TYPE = "PROJECT"
|
||||
|
||||
def __init__(self, project):
|
||||
super().__init__()
|
||||
|
||||
assert isinstance(
|
||||
project, pillarsdk.Project
|
||||
), "wrong type for project: %r" % type(project)
|
||||
|
||||
self.merge(project.to_dict())
|
||||
self["node_type"] = self.NODE_TYPE
|
@ -16,24 +16,27 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import json
|
||||
import pathlib
|
||||
import typing
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
|
||||
|
||||
def sizeof_fmt(num: int, suffix='B') -> str:
|
||||
def sizeof_fmt(num: int, suffix="B") -> str:
|
||||
"""Returns a human-readable size.
|
||||
|
||||
Source: http://stackoverflow.com/a/1094933/875379
|
||||
"""
|
||||
|
||||
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
|
||||
for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
|
||||
if abs(num) < 1024:
|
||||
return '%.1f %s%s' % (num, unit, suffix)
|
||||
num /= 1024
|
||||
return "%.1f %s%s" % (num, unit, suffix)
|
||||
num //= 1024
|
||||
|
||||
return '%.1f Yi%s' % (num, suffix)
|
||||
return "%.1f Yi%s" % (num, suffix)
|
||||
|
||||
|
||||
def find_in_path(path: pathlib.Path, filename: str) -> pathlib.Path:
|
||||
def find_in_path(path: pathlib.Path, filename: str) -> Optional[pathlib.Path]:
|
||||
"""Performs a breadth-first search for the filename.
|
||||
|
||||
Returns the path that contains the file, or None if not found.
|
||||
@ -64,39 +67,43 @@ def find_in_path(path: pathlib.Path, filename: str) -> pathlib.Path:
|
||||
return None
|
||||
|
||||
|
||||
def pyside_cache(propname):
|
||||
# Mapping from (module name, function name) to the last value returned by that function.
|
||||
_pyside_cache: Dict[Tuple[str, str], Any] = {}
|
||||
|
||||
|
||||
def pyside_cache(wrapped):
|
||||
"""Decorator, stores the result of the decorated callable in Python-managed memory.
|
||||
|
||||
This is to work around the warning at
|
||||
https://www.blender.org/api/blender_python_api_master/bpy.props.html#bpy.props.EnumProperty
|
||||
"""
|
||||
|
||||
if callable(propname):
|
||||
raise TypeError('Usage: pyside_cache("property_name")')
|
||||
|
||||
def decorator(wrapped):
|
||||
"""Stores the result of the callable in Python-managed memory.
|
||||
|
||||
This is to work around the warning at
|
||||
https://www.blender.org/api/blender_python_api_master/bpy.props.html#bpy.props.EnumProperty
|
||||
"""
|
||||
|
||||
import functools
|
||||
|
||||
@functools.wraps(wrapped)
|
||||
# We can't use (*args, **kwargs), because EnumProperty explicitly checks
|
||||
# for the number of fixed positional arguments.
|
||||
def wrapper(self, context):
|
||||
def decorator(self, context):
|
||||
result = None
|
||||
try:
|
||||
result = wrapped(self, context)
|
||||
return result
|
||||
finally:
|
||||
rna_type, rna_info = getattr(self.bl_rna, propname)
|
||||
rna_info['_cached_result'] = result
|
||||
return wrapper
|
||||
_pyside_cache[wrapped.__module__, wrapped.__name__] = result
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def redraw(self, context):
|
||||
if context.area is None:
|
||||
return
|
||||
context.area.tag_redraw()
|
||||
|
||||
|
||||
class JSONEncoder(json.JSONEncoder):
|
||||
"""JSON encoder with support for some Blender types."""
|
||||
|
||||
def default(self, o):
|
||||
if o.__class__.__name__ == "IDPropertyGroup" and hasattr(o, "to_dict"):
|
||||
return o.to_dict()
|
||||
return super().default(o)
|
||||
|
@ -37,26 +37,43 @@ def load_wheel(module_name, fname_prefix):
|
||||
try:
|
||||
module = __import__(module_name)
|
||||
except ImportError as ex:
|
||||
log.debug('Unable to import %s directly, will try wheel: %s',
|
||||
module_name, ex)
|
||||
log.debug("Unable to import %s directly, will try wheel: %s", module_name, ex)
|
||||
else:
|
||||
log.debug('Was able to load %s from %s, no need to load wheel %s',
|
||||
module_name, module.__file__, fname_prefix)
|
||||
log.debug(
|
||||
"Was able to load %s from %s, no need to load wheel %s",
|
||||
module_name,
|
||||
module.__file__,
|
||||
fname_prefix,
|
||||
)
|
||||
return
|
||||
|
||||
path_pattern = os.path.join(my_dir, '%s*.whl' % fname_prefix)
|
||||
sys.path.append(wheel_filename(fname_prefix))
|
||||
module = __import__(module_name)
|
||||
log.debug("Loaded %s from %s", module_name, module.__file__)
|
||||
|
||||
|
||||
def wheel_filename(fname_prefix: str) -> str:
|
||||
path_pattern = os.path.join(my_dir, "%s*.whl" % fname_prefix)
|
||||
wheels = glob.glob(path_pattern)
|
||||
if not wheels:
|
||||
raise RuntimeError('Unable to find wheel at %r' % path_pattern)
|
||||
raise RuntimeError("Unable to find wheel at %r" % path_pattern)
|
||||
|
||||
# If there are multiple wheels that match, load the latest one.
|
||||
wheels.sort()
|
||||
sys.path.append(wheels[-1])
|
||||
module = __import__(module_name)
|
||||
log.debug('Loaded %s from %s', module_name, module.__file__)
|
||||
# If there are multiple wheels that match, load the last-modified one.
|
||||
# Alphabetical sorting isn't going to cut it since BAT 1.10 was released.
|
||||
def modtime(filename: str) -> int:
|
||||
return os.stat(filename).st_mtime
|
||||
|
||||
wheels.sort(key=modtime)
|
||||
return wheels[-1]
|
||||
|
||||
|
||||
def load_wheels():
|
||||
load_wheel('lockfile', 'lockfile')
|
||||
load_wheel('cachecontrol', 'CacheControl')
|
||||
load_wheel('pillarsdk', 'pillarsdk')
|
||||
load_wheel("blender_asset_tracer", "blender_asset_tracer")
|
||||
load_wheel("lockfile", "lockfile")
|
||||
load_wheel("cachecontrol", "CacheControl")
|
||||
load_wheel("pillarsdk", "pillarsdk")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
wheel = wheel_filename("blender_asset_tracer")
|
||||
print(f"Wheel: {wheel}")
|
||||
|
13
deploy-to-shared.sh
Executable file
13
deploy-to-shared.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
FULLNAME="$(python3 setup.py --fullname)"
|
||||
echo "Press [ENTER] to deploy $FULLNAME to /shared"
|
||||
read dummy
|
||||
|
||||
./clear_wheels.sh
|
||||
python3 setup.py wheels bdist
|
||||
|
||||
DISTDIR=$(pwd)/dist
|
||||
cd /shared/software/addons
|
||||
rm -vf blender_cloud/wheels/*.whl # remove obsolete wheel files
|
||||
unzip -o $DISTDIR/$FULLNAME.addon.zip
|
@ -1,15 +1,17 @@
|
||||
# Primary requirements:
|
||||
-e git+https://github.com/sybrenstuvel/cachecontrol.git@sybren-filecache-delete-crash-fix#egg=CacheControl
|
||||
lockfile==0.12.2
|
||||
pillarsdk==1.6.1
|
||||
pillarsdk==1.8.0
|
||||
wheel==0.29.0
|
||||
blender-asset-tracer==1.11
|
||||
|
||||
# Secondary requirements:
|
||||
cffi==1.6.0
|
||||
cryptography==1.3.1
|
||||
idna==2.1
|
||||
asn1crypto==0.24.0
|
||||
cffi==1.11.2
|
||||
cryptography==2.1.4
|
||||
idna==2.6
|
||||
pyasn1==0.1.9
|
||||
pycparser==2.14
|
||||
pyOpenSSL==16.0.0
|
||||
pycparser==2.18
|
||||
pyOpenSSL==17.5.0
|
||||
requests==2.10.0
|
||||
six==1.10.0
|
||||
six==1.11.0
|
||||
|
179
setup.py
179
setup.py
@ -18,7 +18,6 @@
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess
|
||||
@ -29,13 +28,20 @@ import zipfile
|
||||
from distutils import log
|
||||
from distutils.core import Command
|
||||
from distutils.command.bdist import bdist
|
||||
from distutils.command.install import install
|
||||
from distutils.command.install import install, INSTALL_SCHEMES
|
||||
from distutils.command.install_egg_info import install_egg_info
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
requirement_re = re.compile('[><=]+')
|
||||
requirement_re = re.compile("[><=]+")
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
# Download wheels from pypi. The specific versions are taken from requirements.txt
|
||||
wheels = [
|
||||
"lockfile",
|
||||
"pillarsdk",
|
||||
"blender-asset-tracer",
|
||||
]
|
||||
|
||||
|
||||
def set_default_path(var, default):
|
||||
"""convert CLI-arguments (string) to Paths"""
|
||||
@ -51,35 +57,38 @@ class BuildWheels(Command):
|
||||
|
||||
description = "builds/downloads the dependencies as wheel files"
|
||||
user_options = [
|
||||
('wheels-path=', None, "wheel file installation path"),
|
||||
('deps-path=', None, "path in which dependencies are built"),
|
||||
('cachecontrol-path=', None, "subdir of deps-path containing CacheControl"),
|
||||
("wheels-path=", None, "wheel file installation path"),
|
||||
("deps-path=", None, "path in which dependencies are built"),
|
||||
("cachecontrol-path=", None, "subdir of deps-path containing CacheControl"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.wheels_path = None # path that will contain the installed wheels.
|
||||
self.deps_path = None # path in which dependencies are built.
|
||||
self.cachecontrol_path = None # subdir of deps_path containing CacheControl
|
||||
self.bat_path = None # subdir of deps_path containing Blender-Asset-Tracer
|
||||
|
||||
def finalize_options(self):
|
||||
self.my_path = pathlib.Path(__file__).resolve().parent
|
||||
package_path = self.my_path / self.distribution.get_name()
|
||||
|
||||
self.wheels_path = set_default_path(self.wheels_path, package_path / 'wheels')
|
||||
self.deps_path = set_default_path(self.deps_path, self.my_path / 'build/deps')
|
||||
self.cachecontrol_path = set_default_path(self.cachecontrol_path,
|
||||
self.deps_path / 'cachecontrol')
|
||||
self.wheels_path = set_default_path(self.wheels_path, package_path / "wheels")
|
||||
self.deps_path = set_default_path(self.deps_path, self.my_path / "build/deps")
|
||||
self.cachecontrol_path = set_default_path(
|
||||
self.cachecontrol_path, self.deps_path / "cachecontrol"
|
||||
)
|
||||
self.bat_path = self.deps_path / "bat"
|
||||
|
||||
def run(self):
|
||||
log.info('Storing wheels in %s', self.wheels_path)
|
||||
log.info("Storing wheels in %s", self.wheels_path)
|
||||
|
||||
# Parse the requirements.txt file
|
||||
requirements = {}
|
||||
with open(str(self.my_path / 'requirements.txt')) as reqfile:
|
||||
with open(str(self.my_path / "requirements.txt")) as reqfile:
|
||||
for line in reqfile.readlines():
|
||||
line = line.strip()
|
||||
|
||||
if not line or line.startswith('#'):
|
||||
if not line or line.startswith("#"):
|
||||
# comments are lines that start with # only
|
||||
continue
|
||||
|
||||
@ -90,43 +99,46 @@ class BuildWheels(Command):
|
||||
# log.info(' - %s = %s / %s', package, line, line_req[-1])
|
||||
|
||||
self.wheels_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Download lockfile, as there is a suitable wheel on pypi.
|
||||
if not list(self.wheels_path.glob('lockfile*.whl')):
|
||||
log.info('Downloading lockfile wheel')
|
||||
self.download_wheel(requirements['lockfile'])
|
||||
|
||||
# Download Pillar Python SDK from pypi.
|
||||
if not list(self.wheels_path.glob('pillarsdk*.whl')):
|
||||
log.info('Downloading Pillar Python SDK wheel')
|
||||
self.download_wheel(requirements['pillarsdk'])
|
||||
for package in wheels:
|
||||
pattern = package.replace("-", "_") + "*.whl"
|
||||
if list(self.wheels_path.glob(pattern)):
|
||||
continue
|
||||
self.download_wheel(requirements[package])
|
||||
|
||||
# Build CacheControl.
|
||||
if not list(self.wheels_path.glob('CacheControl*.whl')):
|
||||
log.info('Building CacheControl in %s', self.cachecontrol_path)
|
||||
if not list(self.wheels_path.glob("CacheControl*.whl")):
|
||||
log.info("Building CacheControl in %s", self.cachecontrol_path)
|
||||
# self.git_clone(self.cachecontrol_path,
|
||||
# 'https://github.com/ionrock/cachecontrol.git',
|
||||
# 'v%s' % requirements['CacheControl'][1])
|
||||
# FIXME: we need my clone until pull request #125 has been merged & released
|
||||
self.git_clone(self.cachecontrol_path,
|
||||
'https://github.com/sybrenstuvel/cachecontrol.git',
|
||||
'sybren-filecache-delete-crash-fix')
|
||||
self.git_clone(
|
||||
self.cachecontrol_path,
|
||||
"https://github.com/sybrenstuvel/cachecontrol.git",
|
||||
"sybren-filecache-delete-crash-fix",
|
||||
)
|
||||
self.build_copy_wheel(self.cachecontrol_path)
|
||||
|
||||
# Ensure that the wheels are added to the data files.
|
||||
self.distribution.data_files.append(
|
||||
('blender_cloud/wheels', (str(p) for p in self.wheels_path.glob('*.whl')))
|
||||
("blender_cloud/wheels", (str(p) for p in self.wheels_path.glob("*.whl")))
|
||||
)
|
||||
|
||||
def download_wheel(self, requirement):
|
||||
"""Downloads a wheel from PyPI and saves it in self.wheels_path."""
|
||||
|
||||
subprocess.check_call([
|
||||
'pip', 'download',
|
||||
'--no-deps',
|
||||
'--dest', str(self.wheels_path),
|
||||
requirement[0]
|
||||
])
|
||||
subprocess.check_call(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"--no-deps",
|
||||
"--dest",
|
||||
str(self.wheels_path),
|
||||
requirement[0],
|
||||
]
|
||||
)
|
||||
|
||||
def git_clone(self, workdir: pathlib.Path, git_url: str, checkout: str = None):
|
||||
if workdir.exists():
|
||||
@ -135,24 +147,25 @@ class BuildWheels(Command):
|
||||
|
||||
workdir.mkdir(parents=True)
|
||||
|
||||
subprocess.check_call(['git', 'clone', git_url, str(workdir)],
|
||||
cwd=str(workdir.parent))
|
||||
subprocess.check_call(
|
||||
["git", "clone", git_url, str(workdir)], cwd=str(workdir.parent)
|
||||
)
|
||||
|
||||
if checkout:
|
||||
subprocess.check_call(['git', 'checkout', checkout],
|
||||
cwd=str(workdir))
|
||||
subprocess.check_call(["git", "checkout", checkout], cwd=str(workdir))
|
||||
|
||||
def build_copy_wheel(self, package_path: pathlib.Path):
|
||||
# Make sure no wheels exist yet, so that we know which one to copy later.
|
||||
to_remove = list((package_path / 'dist').glob('*.whl'))
|
||||
to_remove = list((package_path / "dist").glob("*.whl"))
|
||||
for fname in to_remove:
|
||||
fname.unlink()
|
||||
|
||||
subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'],
|
||||
cwd=str(package_path))
|
||||
subprocess.check_call(
|
||||
[sys.executable, "setup.py", "bdist_wheel"], cwd=str(package_path)
|
||||
)
|
||||
|
||||
wheel = next((package_path / 'dist').glob('*.whl'))
|
||||
log.info('copying %s to %s', wheel, self.wheels_path)
|
||||
wheel = next((package_path / "dist").glob("*.whl"))
|
||||
log.info("copying %s to %s", wheel, self.wheels_path)
|
||||
shutil.copy(str(wheel), str(self.wheels_path))
|
||||
|
||||
|
||||
@ -162,11 +175,19 @@ class BlenderAddonBdist(bdist):
|
||||
|
||||
def initialize_options(self):
|
||||
super().initialize_options()
|
||||
self.formats = ['zip']
|
||||
self.plat_name = 'addon' # use this instead of 'linux-x86_64' or similar.
|
||||
self.formats = ["zip"]
|
||||
self.plat_name = "addon" # use this instead of 'linux-x86_64' or similar.
|
||||
self.fix_local_prefix()
|
||||
|
||||
def fix_local_prefix(self):
|
||||
"""Place data files in blender_cloud instead of local/blender_cloud."""
|
||||
for key in INSTALL_SCHEMES:
|
||||
if "data" not in INSTALL_SCHEMES[key]:
|
||||
continue
|
||||
INSTALL_SCHEMES[key]["data"] = "$base"
|
||||
|
||||
def run(self):
|
||||
self.run_command('wheels')
|
||||
self.run_command("wheels")
|
||||
super().run()
|
||||
|
||||
|
||||
@ -175,7 +196,7 @@ class BlenderAddonFdist(BlenderAddonBdist):
|
||||
"""Ensures that 'python setup.py fdist' creates a plain folder structure."""
|
||||
|
||||
user_options = [
|
||||
('dest-path=', None, 'addon installation path'),
|
||||
("dest-path=", None, "addon installation path"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
@ -189,12 +210,12 @@ class BlenderAddonFdist(BlenderAddonBdist):
|
||||
filepath = self.distribution.dist_files[0][2]
|
||||
|
||||
# if dest_path is not specified use the filename as the dest_path (minus the .zip)
|
||||
assert filepath.endswith('.zip')
|
||||
assert filepath.endswith(".zip")
|
||||
target_folder = self.dest_path or filepath[:-4]
|
||||
|
||||
print('Unzipping the package on {}.'.format(target_folder))
|
||||
print("Unzipping the package on {}.".format(target_folder))
|
||||
|
||||
with zipfile.ZipFile(filepath, 'r') as zip_ref:
|
||||
with zipfile.ZipFile(filepath, "r") as zip_ref:
|
||||
zip_ref.extractall(target_folder)
|
||||
|
||||
|
||||
@ -204,8 +225,8 @@ class BlenderAddonInstall(install):
|
||||
|
||||
def initialize_options(self):
|
||||
super().initialize_options()
|
||||
self.prefix = ''
|
||||
self.install_lib = ''
|
||||
self.prefix = ""
|
||||
self.install_lib = ""
|
||||
|
||||
|
||||
class AvoidEggInfo(install_egg_info):
|
||||
@ -220,30 +241,38 @@ class AvoidEggInfo(install_egg_info):
|
||||
|
||||
|
||||
setup(
|
||||
cmdclass={'bdist': BlenderAddonBdist,
|
||||
'fdist': BlenderAddonFdist,
|
||||
'install': BlenderAddonInstall,
|
||||
'install_egg_info': AvoidEggInfo,
|
||||
'wheels': BuildWheels},
|
||||
name='blender_cloud',
|
||||
description='The Blender Cloud addon allows browsing the Blender Cloud from Blender.',
|
||||
version='1.6.2',
|
||||
author='Sybren A. Stüvel',
|
||||
author_email='sybren@stuvel.eu',
|
||||
packages=find_packages('.'),
|
||||
data_files=[('blender_cloud', ['README.md', 'README-flamenco.md']),
|
||||
('blender_cloud/icons', glob.glob('blender_cloud/icons/*'))],
|
||||
cmdclass={
|
||||
"bdist": BlenderAddonBdist,
|
||||
"fdist": BlenderAddonFdist,
|
||||
"install": BlenderAddonInstall,
|
||||
"install_egg_info": AvoidEggInfo,
|
||||
"wheels": BuildWheels,
|
||||
},
|
||||
name="blender_cloud",
|
||||
description="The Blender Cloud addon allows browsing the Blender Cloud from Blender.",
|
||||
version="1.25",
|
||||
author="Sybren A. Stüvel",
|
||||
author_email="sybren@stuvel.eu",
|
||||
packages=find_packages("."),
|
||||
data_files=[
|
||||
("blender_cloud", ["README.md", "README-flamenco.md", "CHANGELOG.md"]),
|
||||
("blender_cloud/icons", glob.glob("blender_cloud/icons/*")),
|
||||
(
|
||||
"blender_cloud/texture_browser/icons",
|
||||
glob.glob("blender_cloud/texture_browser/icons/*"),
|
||||
),
|
||||
],
|
||||
scripts=[],
|
||||
url='https://developer.blender.org/diffusion/BCA/',
|
||||
license='GNU General Public License v2 or later (GPLv2+)',
|
||||
platforms='',
|
||||
url="https://developer.blender.org/diffusion/BCA/",
|
||||
license="GNU General Public License v2 or later (GPLv2+)",
|
||||
platforms="",
|
||||
classifiers=[
|
||||
'Intended Audience :: End Users/Desktop',
|
||||
'Operating System :: OS Independent',
|
||||
'Environment :: Plugins',
|
||||
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Operating System :: OS Independent",
|
||||
"Environment :: Plugins",
|
||||
"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
],
|
||||
zip_safe=False,
|
||||
)
|
||||
|
120
tests/test_path_replacement.py
Normal file
120
tests/test_path_replacement.py
Normal file
@ -0,0 +1,120 @@
|
||||
"""Unittests for blender_cloud.utils.
|
||||
|
||||
This unittest requires bpy to be importable, so build Blender as a module and install it
|
||||
into your virtualenv. See https://stuvel.eu/files/bconf2016/#/10 for notes how.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import pathlib
|
||||
import unittest.mock
|
||||
|
||||
import pillarsdk.utils
|
||||
|
||||
from blender_cloud.flamenco import sdk
|
||||
|
||||
|
||||
class PathReplacementTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.test_manager = sdk.Manager(
|
||||
{
|
||||
"_created": datetime.datetime(
|
||||
2017, 5, 31, 15, 12, 32, tzinfo=pillarsdk.utils.utc
|
||||
),
|
||||
"_etag": "c39942ee4bcc4658adcc21e4bcdfb0ae",
|
||||
"_id": "592edd609837732a2a272c62",
|
||||
"_updated": datetime.datetime(
|
||||
2017, 6, 8, 14, 51, 3, tzinfo=pillarsdk.utils.utc
|
||||
),
|
||||
"description": 'Manager formerly known as "testman"',
|
||||
"job_types": {"sleep": {"vars": {}}},
|
||||
"name": '<script>alert("this is a manager")</script>',
|
||||
"owner": "592edd609837732a2a272c63",
|
||||
"path_replacement": {
|
||||
"job_storage": {
|
||||
"darwin": "/Volume/shared",
|
||||
"linux": "/shared",
|
||||
"windows": "s:/",
|
||||
},
|
||||
"render": {
|
||||
"darwin": "/Volume/render/",
|
||||
"linux": "/render/",
|
||||
"windows": "r:/",
|
||||
},
|
||||
"longrender": {
|
||||
"darwin": "/Volume/render/long",
|
||||
"linux": "/render/long",
|
||||
"windows": "r:/long",
|
||||
},
|
||||
},
|
||||
"projects": ["58cbdd5698377322d95eb55e"],
|
||||
"service_account": "592edd609837732a2a272c60",
|
||||
"stats": {"nr_of_workers": 3},
|
||||
"url": "http://192.168.3.101:8083/",
|
||||
"user_groups": ["58cbdd5698377322d95eb55f"],
|
||||
"variables": {
|
||||
"blender": {
|
||||
"darwin": "/opt/myblenderbuild/blender",
|
||||
"linux": "/home/sybren/workspace/build_linux/bin/blender "
|
||||
"--enable-new-depsgraph --factory-startup",
|
||||
"windows": "c:/temp/blender.exe",
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
def test_linux(self):
|
||||
# (expected result, input)
|
||||
test_paths = [
|
||||
("/doesnotexistreally", "/doesnotexistreally"),
|
||||
("{render}/agent327/scenes/A_01_03_B", "/render/agent327/scenes/A_01_03_B"),
|
||||
("{job_storage}/render/agent327/scenes", "/shared/render/agent327/scenes"),
|
||||
("{longrender}/agent327/scenes", "/render/long/agent327/scenes"),
|
||||
]
|
||||
|
||||
self._do_test(test_paths, "linux", pathlib.PurePosixPath)
|
||||
|
||||
def test_windows(self):
|
||||
# (expected result, input)
|
||||
test_paths = [
|
||||
("c:/doesnotexistreally", "c:/doesnotexistreally"),
|
||||
("c:/some/path", r"c:\some\path"),
|
||||
("{render}/agent327/scenes/A_01_03_B", r"R:\agent327\scenes\A_01_03_B"),
|
||||
("{render}/agent327/scenes/A_01_03_B", r"r:\agent327\scenes\A_01_03_B"),
|
||||
("{render}/agent327/scenes/A_01_03_B", r"r:/agent327/scenes/A_01_03_B"),
|
||||
("{job_storage}/render/agent327/scenes", "s:/render/agent327/scenes"),
|
||||
("{longrender}/agent327/scenes", "r:/long/agent327/scenes"),
|
||||
]
|
||||
|
||||
self._do_test(test_paths, "windows", pathlib.PureWindowsPath)
|
||||
|
||||
def test_darwin(self):
|
||||
# (expected result, input)
|
||||
test_paths = [
|
||||
("/Volume/doesnotexistreally", "/Volume/doesnotexistreally"),
|
||||
(
|
||||
"{render}/agent327/scenes/A_01_03_B",
|
||||
r"/Volume/render/agent327/scenes/A_01_03_B",
|
||||
),
|
||||
(
|
||||
"{job_storage}/render/agent327/scenes",
|
||||
"/Volume/shared/render/agent327/scenes",
|
||||
),
|
||||
("{longrender}/agent327/scenes", "/Volume/render/long/agent327/scenes"),
|
||||
]
|
||||
|
||||
self._do_test(test_paths, "darwin", pathlib.PurePosixPath)
|
||||
|
||||
def _do_test(self, test_paths, platform, pathclass):
|
||||
self.test_manager.PurePlatformPath = pathclass
|
||||
|
||||
def mocked_system():
|
||||
return platform
|
||||
|
||||
with unittest.mock.patch("platform.system", mocked_system):
|
||||
for expected_result, input_path in test_paths:
|
||||
as_path_instance = pathclass(input_path)
|
||||
self.assertEqual(
|
||||
expected_result,
|
||||
self.test_manager.replace_path(as_path_instance),
|
||||
"for input %r on platform %s" % (as_path_instance, platform),
|
||||
)
|
@ -8,18 +8,18 @@ from blender_cloud import utils
|
||||
|
||||
class FindInPathTest(unittest.TestCase):
|
||||
def test_nonexistant_path(self):
|
||||
path = pathlib.Path('/doesnotexistreally')
|
||||
path = pathlib.Path("/doesnotexistreally")
|
||||
self.assertFalse(path.exists())
|
||||
self.assertIsNone(utils.find_in_path(path, 'jemoeder.blend'))
|
||||
self.assertIsNone(utils.find_in_path(path, "jemoeder.blend"))
|
||||
|
||||
def test_really_breadth_first(self):
|
||||
"""A depth-first test might find dir_a1/dir_a2/dir_a3/find_me.txt first."""
|
||||
|
||||
path = pathlib.Path(__file__).parent / 'test_really_breadth_first'
|
||||
found = utils.find_in_path(path, 'find_me.txt')
|
||||
self.assertEqual(path / 'dir_b1' / 'dir_b2' / 'find_me.txt', found)
|
||||
path = pathlib.Path(__file__).parent / "test_really_breadth_first"
|
||||
found = utils.find_in_path(path, "find_me.txt")
|
||||
self.assertEqual(path / "dir_b1" / "dir_b2" / "find_me.txt", found)
|
||||
|
||||
def test_nonexistant_file(self):
|
||||
path = pathlib.Path(__file__).parent / 'test_really_breadth_first'
|
||||
found = utils.find_in_path(path, 'do_not_find_me.txt')
|
||||
path = pathlib.Path(__file__).parent / "test_really_breadth_first"
|
||||
found = utils.find_in_path(path, "do_not_find_me.txt")
|
||||
self.assertEqual(None, found)
|
||||
|
@ -1,15 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
VERSION="${1/version-}"
|
||||
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Usage: $0 new-version" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BL_INFO_VER=$(echo "$1" | sed 's/\./, /g')
|
||||
BL_INFO_VER=$(echo "$VERSION" | sed 's/\./, /g')
|
||||
|
||||
sed "s/version='[^']*'/version='$1'/" -i setup.py
|
||||
sed "s/'version': ([^)]*)/'version': ($BL_INFO_VER)/" -i blender_cloud/__init__.py
|
||||
sed "s/version=\"[^\"]*\"/version=\"$VERSION\"/" -i setup.py
|
||||
sed "s/\"version\": ([^)]*)/\"version\": ($BL_INFO_VER)/" -i blender_cloud/__init__.py
|
||||
|
||||
git diff
|
||||
echo
|
||||
echo "Don't forget to commit!"
|
||||
echo "Don't forget to commit and tag:"
|
||||
echo git commit -m \'Bumped version to $VERSION\' setup.py blender_cloud/__init__.py
|
||||
echo git tag -a version-$VERSION -m \'Tagged version $VERSION\'
|
||||
echo
|
||||
echo "To build a distribution ZIP:"
|
||||
echo python3 setup.py bdist
|
||||
|
Reference in New Issue
Block a user