Compare commits
324 Commits
version-1.
...
version-1.
Author | SHA1 | Date | |
---|---|---|---|
56fb1ec3df | |||
e93094cb88 | |||
33718a1a35 | |||
db82dbe730 | |||
8d405330ee | |||
66ddc7b47b | |||
2fa8cb4054 | |||
e7b5c75046 | |||
1d93bd9e5e | |||
ac2d0c033c | |||
61fa63eb1d | |||
7022412889 | |||
b4f71745b0 | |||
1d41fce1ae | |||
e636fde4ce | |||
82a9dc5226 | |||
1f40915ac8 | |||
32693c0f64 | |||
c38748eb05 | |||
ac85bea111 | |||
7b5613ce77 | |||
ec5f317dac | |||
a51f61d9b5 | |||
13bc9a89c8 | |||
996b722813 | |||
e7f2567bfc | |||
ff8e71c542 | |||
543da5c8d8 | |||
01ae0f5f54 | |||
1e80446870 | |||
8d5c97931e | |||
1a0c00b87a | |||
32befc51f8 | |||
06126862d4 | |||
7b8713881e | |||
7c65851b75 | |||
ec72091268 | |||
cf7adb065f | |||
74220e4fc4 | |||
0ebd4435e5 | |||
c24501661e | |||
5b77ae50a1 | |||
74958cf217 | |||
5026dfc441 | |||
843667e612 | |||
cf3f7234eb | |||
4647175a7e | |||
33da5195f3 | |||
3814fb2683 | |||
15484a65cd | |||
d9e2b36204 | |||
cc690ec8c9 | |||
0422070d55 | |||
8cefb4fb07 | |||
23549fa676 | |||
cb73030e6a | |||
fbf02c3625 | |||
95699aca36 | |||
60018cd78c | |||
5f73837d3c | |||
9272e22129 | |||
e14a0aa53c | |||
51cf097c8f | |||
4608204f1d | |||
3f95249196 | |||
9df016da09 | |||
64e29e695b | |||
d5f285a381 | |||
e39429272d | |||
bdb00eeaaa | |||
3ef2ca0c07 | |||
35d4f85010 | |||
8151b952b9 | |||
2d2585b8d7 | |||
65204db228 | |||
570b1d4bfe | |||
68b046c714 | |||
cb20d6ee03 | |||
![]() |
645bdd950f | ||
![]() |
74a5830dae | ||
da4d4df5fb | |||
9c3098cc0d | |||
98beaf7fb7 | |||
3364371ac6 | |||
2723b07fa2 | |||
c2a037ca89 | |||
d3451d4de3 | |||
5094977614 | |||
![]() |
a11a55be22 | ||
68d2fc8e42 | |||
2de4a8e87c | |||
39b2bacdcc | |||
a1416f99dd | |||
0fa7d60028 | |||
c1b6480f9a | |||
56353d4177 | |||
469a9318af | |||
e265081131 | |||
115eea82c6 | |||
900068a6f5 | |||
c8229500d1 | |||
65ff9da428 | |||
fcba8a2e0f | |||
7ef5e522f8 | |||
ae570e5907 | |||
16b90d2ea8 | |||
875c92ee9d | |||
cbfc75a89c | |||
54e676b36f | |||
c94b0f5f2d | |||
a58bfe9a76 | |||
d332e1e50a | |||
![]() |
dd66d5ce93 | ||
965b02cec4 | |||
fd67675c12 | |||
06a661126b | |||
191b150280 | |||
feb62ddae0 | |||
603159f0d1 | |||
d2ae3f9cb7 | |||
079f8ff4c3 | |||
c97859ef33 | |||
f1ebea8948 | |||
1b82977c6e | |||
a7307bf7b5 | |||
11fd12e125 | |||
54dccb20ba | |||
61a8db3f96 | |||
0b2f0a3ec1 | |||
5117ec7cde | |||
74f61fa83a | |||
f73671c4f0 | |||
6f970a41e5 | |||
ccedb7cbb1 | |||
abcd8b0168 | |||
534a5a6ac4 | |||
6b5faa423e | |||
232e8f6167 | |||
af0dee0c9d | |||
baac86f59b | |||
19d54b7fd6 | |||
0067157251 | |||
4c84fbf339 | |||
6a5c392b5b | |||
cbaccaed49 | |||
cfc53e007c | |||
2768f0a59f | |||
b6c7ec1546 | |||
417b6e80f5 | |||
90259297ca | |||
3d9f4e893a | |||
4be497ed27 | |||
28fe6e8f96 | |||
22e4f2dc5e | |||
537dcf846a | |||
8ca4159fe8 | |||
d7bf001ffe | |||
6ea15d2bfe | |||
6fda496652 | |||
8dab01138e | |||
3da76ddb24 | |||
c57da7ab2b | |||
63b976cb44 | |||
73a62da8da | |||
2c70ceb489 | |||
38ccb54b50 | |||
1df113ca01 | |||
887a9cc697 | |||
143456ae1d | |||
f41ea8c5a3 | |||
7d90a92e24 | |||
2388f800dc | |||
38a3bcba71 | |||
2cf400a74c | |||
54ebb0bf5d | |||
9e84d2a416 | |||
772e6b0b1b | |||
b6232c8c13 | |||
6d4ba51c6c | |||
b9caecfce9 | |||
4ce8db88c6 | |||
dfff0cb55b | |||
1a515bfbda | |||
a47dfa8f32 | |||
8890ad5421 | |||
f0d42ed2cc | |||
76ca59251b | |||
b33ec74347 | |||
b5e33c52c1 | |||
8b56918989 | |||
99257bd88b | |||
c2363d248b | |||
3776246d70 | |||
9bc8c30443 | |||
56b622a723 | |||
8edf9c7428 | |||
10bf3e62ec | |||
3ec1a3d26d | |||
3ce89ad5f4 | |||
7cf858855e | |||
a10b4a804c | |||
514968de40 | |||
c73dce169f | |||
369e082880 | |||
6cd9cb1713 | |||
37f701edaf | |||
b04f9adb40 | |||
70a0aba10a | |||
f6d05c4c84 | |||
8e9d62b5c5 | |||
e300c32d64 | |||
63eaaf7dc9 | |||
6fcea9469f | |||
61f86d63e0 | |||
0d69b1d7ec | |||
d5139c767e | |||
f0d829da49 | |||
a4817259c8 | |||
f899f6d1ab | |||
9a0873eea4 | |||
388a059400 | |||
80d2b5b2e7 | |||
53ab2fc6df | |||
1e2c74e82d | |||
ecb8f8575f | |||
acd62b4917 | |||
65faeba7b0 | |||
8f8e14b66e | |||
250939dc32 | |||
2e617287fd | |||
36bbead1e1 | |||
89a9055aa4 | |||
6339f75406 | |||
a9aa961b92 | |||
4da601be0c | |||
3c9e4e2873 | |||
4762f0292d | |||
959e83229b | |||
662b6cf221 | |||
96616dbdff | |||
dbbffcc28e | |||
0a1f1972da | |||
c9a92dd5d1 | |||
1c2def3b84 | |||
e29b61b649 | |||
1d1c8cf3d6 | |||
fc01e32f0d | |||
7577b348a5 | |||
be99bcb250 | |||
2190bd795e | |||
76d1f88c4e | |||
f0b7a0451d | |||
6eab5ba0af | |||
d457c77b19 | |||
ef70d20a77 | |||
db10495e7f | |||
586905a183 | |||
822c8daf07 | |||
e044607d91 | |||
e484d6496c | |||
78d567793e | |||
7e105167c0 | |||
d53938e03b | |||
0f26551368 | |||
645529bf35 | |||
4d2314e08f | |||
a5df609d95 | |||
e9a08c11b3 | |||
7bdfa28a3f | |||
e73e9d3df7 | |||
671e9f31fa | |||
6de026c8e2 | |||
6470feac7c | |||
6462561f2d | |||
2080f92558 | |||
a6f5a16583 | |||
6f376027e5 | |||
2ee9d1ebfa | |||
ed02816872 | |||
d100232428 | |||
9044bfadb9 | |||
4cdf2cee9c | |||
9c527520a9 | |||
56137c485f | |||
eb77461ca0 | |||
884d68ebe8 | |||
36d62082f3 | |||
af53d61cf2 | |||
332c32ca9c | |||
988dc72ba1 | |||
82c7560c7b | |||
73e2fd77e2 | |||
483e847ffe | |||
ef822208c8 | |||
791b3f480c | |||
efb1456596 | |||
58785977e7 | |||
8a5efc18db | |||
b970530f44 | |||
ded05b6ca9 | |||
5f5f0d8db9 | |||
30f71ac9fc | |||
bdef942b0b | |||
2a0ef39b12 | |||
c57a3bc902 | |||
b94998d12e | |||
1cd42e246e | |||
079689a532 | |||
597ba6de1c | |||
7b59391872 | |||
8201ba7691 | |||
8f2b0f8faa | |||
33b52cc8a9 | |||
be46b9cf81 | |||
ba4c951d32 | |||
5c7343f8c9 | |||
64d36818fe | |||
07f28d3072 | |||
48ca91a364 | |||
7ee052f71b | |||
2bb859efd9 | |||
ac3943fe6c | |||
5eaee872bf | |||
6ce4399407 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,9 +1,10 @@
|
||||
*.pyc
|
||||
*.swp
|
||||
*.blend
|
||||
*.blend[1-9]
|
||||
/*.blend*
|
||||
blender_cloud/wheels/*.whl
|
||||
/textures*/
|
||||
/test_*.py
|
||||
/dist/
|
||||
/build/
|
||||
/addon-bundle/*.zip
|
||||
__pycache__
|
||||
|
63
CHANGELOG.md
Normal file
63
CHANGELOG.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# Blender Cloud changelog
|
||||
|
||||
## Version 1.7.3 (in development)
|
||||
|
||||
- Default to scene frame range when no frame range is given.
|
||||
- Refuse to render on Flamenco before blend file is saved at least once.
|
||||
- Fixed some Windows-specific issues.
|
||||
|
||||
|
||||
## Version 1.7.2 (2017-06-22)
|
||||
|
||||
- Fixed compatibility with Blender 2.78c.
|
||||
|
||||
|
||||
## Version 1.7.1 (2017-06-13)
|
||||
|
||||
- Fixed asyncio issues on Windows
|
||||
|
||||
|
||||
## Version 1.7.0 (2017-06-09)
|
||||
|
||||
- Fixed reloading after upgrading from 1.4.4 (our last public release).
|
||||
- Fixed bug handling a symlinked project path.
|
||||
- Added support for Manager-defined path replacement variables.
|
||||
|
||||
|
||||
## Version 1.6.4 (2017-04-21)
|
||||
|
||||
- Added file exclusion filter for Flamenco. A filter like `*.abc;*.mkv;*.mov` can be
|
||||
used to prevent certain files from being copied to the job storage directory.
|
||||
Requires a Blender that is bundled with BAM 1.1.7 or newer.
|
||||
|
||||
|
||||
## Version 1.6.3 (2017-03-21)
|
||||
|
||||
- Fixed bug where local project path wasn't shown for projects only set up for Flamenco
|
||||
(and not Attract).
|
||||
- Added this CHANGELOG.md file, which will contain user-relevant changes.
|
||||
|
||||
|
||||
## Version 1.6.2 (2017-03-17)
|
||||
|
||||
- Flamenco: when opening non-existing file path, open parent instead
|
||||
- Fix T50954: Improve Blender Cloud add-on project selector
|
||||
|
||||
|
||||
## Version 1.6.1 (2017-03-07)
|
||||
|
||||
- Show error in GUI when Blender Cloud is unreachable
|
||||
- Fixed sample count when using branched path tracing
|
||||
|
||||
|
||||
## Version 1.6.0 (2017-02-14)
|
||||
|
||||
- Default to frame chunk size of 1 (instead of 10).
|
||||
- Turn off "use overwrite" and "use placeholder" for Flamenco blend files.
|
||||
- Fixed bugs when blendfile is outside the project directory
|
||||
|
||||
|
||||
## Older versions
|
||||
|
||||
For the history of older versions, please refer to the
|
||||
[Git history](https://developer.blender.org/diffusion/BCA/)
|
57
README-flamenco.md
Normal file
57
README-flamenco.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# Flamenco
|
||||
|
||||
The Blender Cloud add-on has preliminary support for [Flamenco](https://flamenco.io/).
|
||||
It requires a project on the [Blender Cloud](https://cloud.blender.org/) that is set up for
|
||||
Flamenco, and it requires you to be logged in as a user with rights to use Flamenco.
|
||||
|
||||
|
||||
## Quirks
|
||||
|
||||
Flamenco support is unpolished, so it has some quirks.
|
||||
|
||||
- Project selection happens through the Attract project selector. As a result, you can only
|
||||
select Attract-enabled projects (even when they are not set up for Flamenco). Be careful
|
||||
which project you select.
|
||||
- The top level directory of the project is also set through the Attract properties.
|
||||
- Defaults are heavily biased for our use in the Blender Institute.
|
||||
- Settings that should be project-specific are not, i.e. are regular add-on preferences.
|
||||
- Sending a project to Flamenco will check the "File Extensions" setting in the Output panel,
|
||||
and save the blend file to the current filename.
|
||||
|
||||
## Render job file locations
|
||||
|
||||
Rendering via Flamenco roughly comprises of two steps:
|
||||
|
||||
1. Packing the file to render with its dependencies, and placing them in the "job file path".
|
||||
2. Rendering, and placing the output files in the "job output path".
|
||||
|
||||
### Job file path
|
||||
|
||||
The "job file path" consists of the following path components:
|
||||
|
||||
1. The add-on preference "job file path", e.g. `/render/_flamenco/storage`
|
||||
2. The current date and time, your Blender Cloud username, and the name of the current blend file.
|
||||
3. The name of the current blend file.
|
||||
|
||||
For example:
|
||||
|
||||
`/render/_flamenco/storage/2017-01-18-104841.931387-sybren-03_02_A.layout/03_02_A.layout.blend`
|
||||
|
||||
### Job output path
|
||||
|
||||
The file path of output files consists of the following path components:
|
||||
|
||||
1. The add-on preference "job file path", e.g. `/render/agent327/frames`
|
||||
2. The path of the current blend file, relative to the project directory. The first N components
|
||||
of this path can be stripped; when N=1 it turns `scenes/03-searching/03_02_A-snooping/` into
|
||||
`03-searching/03_02_A-snooping/`.
|
||||
3. The name of the current blend file, without `.blend`.
|
||||
4. The file name depends on the type of output:
|
||||
- When rendering to image files: A 5-digit frame number with the required file extension.
|
||||
- When rendering to a video file: The frame range with the required file extension.
|
||||
|
||||
For example:
|
||||
|
||||
`/render/agent327/frames/03-searching/03_02_A-snooping/03_02_A.layout/00441.exr`
|
||||
|
||||
`/render/agent327/frames/03-searching/03_02_A-snooping/03_02_A.layout/14-51,60-133.mkv`
|
@@ -24,6 +24,8 @@ Installing the addon
|
||||
|
||||
* If you don't have one already, sign up for an account at
|
||||
the [Blender ID site](https://www.blender.org/id/).
|
||||
* If you had a previous version of the addon installed, deactivate it
|
||||
and restart Blender.
|
||||
* Install and log in with the
|
||||
[Blender ID addon](https://developer.blender.org/diffusion/BIA/).
|
||||
* Install the Blender Cloud addon in Blender (User Preferences →
|
||||
|
52
addon-bundle/README.txt
Normal file
52
addon-bundle/README.txt
Normal file
@@ -0,0 +1,52 @@
|
||||
Blender Cloud Addon
|
||||
===================
|
||||
|
||||
Congratulations on downloading the Blender Cloud addon. For your
|
||||
convenience, we have bundled it with the Blender ID addon.
|
||||
|
||||
To use the Blender Cloud addon, perform the following steps:
|
||||
|
||||
- Use Blender (File, User Preferences, Addons, Install from file)
|
||||
to install blender_id-x.x.x.addon.zip
|
||||
|
||||
- If you had a previous version of the Blender Cloud addon installed,
|
||||
restart Blender now.
|
||||
|
||||
- Log in with your Blender ID.
|
||||
|
||||
- Use Blender to install blender_cloud-x.x.x.addon.zip
|
||||
|
||||
If you don't see the addon in the list, enable the Testing
|
||||
category.
|
||||
|
||||
- Press Ctrl+Alt+Shift+A to start the texture browser.
|
||||
|
||||
- Visit the User Preferences, Addons panel, to use the Blender Sync
|
||||
feature.
|
||||
|
||||
|
||||
Support for Blenders not from blender.org
|
||||
-----------------------------------------
|
||||
|
||||
Maybe you use Blender from another source than blender.org, such as an
|
||||
Ubuntu package. If that is the case, you have to make sure that the
|
||||
Python package "requests" is installed. On Ubuntu Linux this can be
|
||||
done with the command
|
||||
|
||||
sudo apt-get install python3-requests
|
||||
|
||||
On other platforms & distributions this might be different.
|
||||
|
||||
Blender uses Python 3.5, so make sure you install the package for the
|
||||
correct version of Python.
|
||||
|
||||
|
||||
Subscribing to the Blender Cloud
|
||||
--------------------------------
|
||||
|
||||
The Blender Sync feature is free to use for everybody with a Blender
|
||||
ID account. In order to use the Texture Browser you need to have a
|
||||
Blender Cloud subscription. If you didn't subscribe yet, go to:
|
||||
|
||||
https://cloud.blender.org/join
|
||||
|
36
addon-bundle/bundle.sh
Executable file
36
addon-bundle/bundle.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
cd $(dirname $(readlink -f $0))
|
||||
|
||||
BCLOUD=$(ls ../dist/blender_cloud-*.addon.zip | tail -n 1)
|
||||
BID=$(ls ../../../blender-id-addon/dist/blender_id-*.addon.zip | tail -n 1)
|
||||
|
||||
[ -z "$BCLOUD" ] && echo "BCloud addon not found" >&2
|
||||
[ -z "$BID" ] && echo "B'ID addon not found" >&2
|
||||
|
||||
cp -va $BCLOUD $BID .
|
||||
|
||||
BUNDLE=$(basename $BCLOUD)
|
||||
BUNDLE=${BUNDLE/.addon.zip/-bundle-UNZIP_ME_FIRST.zip}
|
||||
|
||||
zip -9 $BUNDLE $(basename $BCLOUD) $(basename $BID) README.txt
|
||||
|
||||
dolphin --select $BUNDLE 2>/dev/null >/dev/null & disown
|
||||
echo "CREATED: $BUNDLE"
|
@@ -19,19 +19,20 @@
|
||||
# <pep8 compliant>
|
||||
|
||||
bl_info = {
|
||||
'name': 'Blender Cloud Texture Browser',
|
||||
'author': 'Sybren A. Stüvel and Francesco Siddi',
|
||||
'version': (0, 2, 0),
|
||||
'name': 'Blender Cloud',
|
||||
"author": "Sybren A. Stüvel, Francesco Siddi, Inês Almeida, Antony Riakiotakis",
|
||||
'version': (1, 7, 3),
|
||||
'blender': (2, 77, 0),
|
||||
'location': 'Ctrl+Shift+Alt+A anywhere',
|
||||
'description': 'Allows downloading of textures from the Blender Cloud. Requires '
|
||||
'the Blender ID addon and Blender 2.77a or newer.',
|
||||
'wiki_url': 'http://wiki.blender.org/index.php/Extensions:2.6/Py/'
|
||||
'location': 'Addon Preferences panel, and Ctrl+Shift+Alt+A anywhere for texture browser',
|
||||
'description': 'Texture library browser and Blender Sync. Requires the Blender ID addon '
|
||||
'and Blender 2.77a or newer.',
|
||||
'wiki_url': 'https://wiki.blender.org/index.php/Extensions:2.6/Py/'
|
||||
'Scripts/System/BlenderCloud',
|
||||
'category': 'System',
|
||||
'support': 'TESTING'
|
||||
}
|
||||
|
||||
import logging
|
||||
|
||||
# Support reloading
|
||||
if 'pillar' in locals():
|
||||
import importlib
|
||||
@@ -43,43 +44,91 @@ if 'pillar' in locals():
|
||||
cache = importlib.reload(cache)
|
||||
else:
|
||||
from . import wheels
|
||||
|
||||
wheels.load_wheels()
|
||||
|
||||
from . import pillar, cache
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def register():
|
||||
"""Late-loads and registers the Blender-dependent submodules."""
|
||||
|
||||
import sys
|
||||
|
||||
_monkey_patch_requests()
|
||||
|
||||
# Support reloading
|
||||
if '%s.blender' % __name__ in sys.modules:
|
||||
import importlib
|
||||
|
||||
def reload_mod(name):
|
||||
modname = '%s.%s' % (__name__, name)
|
||||
module = importlib.reload(sys.modules[modname])
|
||||
sys.modules[modname] = module
|
||||
return module
|
||||
try:
|
||||
old_module = sys.modules[modname]
|
||||
except KeyError:
|
||||
# Wasn't loaded before -- can happen after an upgrade.
|
||||
new_module = importlib.import_module(modname)
|
||||
else:
|
||||
new_module = importlib.reload(old_module)
|
||||
|
||||
sys.modules[modname] = new_module
|
||||
return new_module
|
||||
|
||||
reload_mod('blendfile')
|
||||
reload_mod('home_project')
|
||||
reload_mod('utils')
|
||||
|
||||
blender = reload_mod('blender')
|
||||
gui = reload_mod('gui')
|
||||
async_loop = reload_mod('async_loop')
|
||||
flamenco = reload_mod('flamenco')
|
||||
attract = reload_mod('attract')
|
||||
texture_browser = reload_mod('texture_browser')
|
||||
settings_sync = reload_mod('settings_sync')
|
||||
image_sharing = reload_mod('image_sharing')
|
||||
blender = reload_mod('blender')
|
||||
else:
|
||||
from . import blender, gui, async_loop
|
||||
from . import (blender, texture_browser, async_loop, settings_sync, blendfile, home_project,
|
||||
image_sharing, attract, flamenco)
|
||||
|
||||
async_loop.setup_asyncio_executor()
|
||||
async_loop.register()
|
||||
|
||||
flamenco.register()
|
||||
attract.register()
|
||||
texture_browser.register()
|
||||
settings_sync.register()
|
||||
image_sharing.register()
|
||||
blender.register()
|
||||
gui.register()
|
||||
|
||||
blender.handle_project_update()
|
||||
|
||||
|
||||
def _monkey_patch_requests():
|
||||
"""Monkey-patch old versions of Requests.
|
||||
|
||||
This is required for the Mac version of Blender 2.77a.
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
if requests.__build__ >= 0x020601:
|
||||
return
|
||||
|
||||
log.info('Monkey-patching requests version %s', requests.__version__)
|
||||
from requests.packages.urllib3.response import HTTPResponse
|
||||
HTTPResponse.chunked = False
|
||||
HTTPResponse.chunk_left = None
|
||||
|
||||
|
||||
def unregister():
|
||||
from . import blender, gui, async_loop
|
||||
from . import (blender, texture_browser, async_loop, settings_sync, image_sharing, attract,
|
||||
flamenco)
|
||||
|
||||
gui.unregister()
|
||||
image_sharing.unregister()
|
||||
attract.unregister()
|
||||
settings_sync.unregister()
|
||||
blender.unregister()
|
||||
texture_browser.unregister()
|
||||
async_loop.unregister()
|
||||
|
||||
flamenco.unregister()
|
||||
|
@@ -1,3 +1,21 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""Manages the asyncio loop."""
|
||||
|
||||
import asyncio
|
||||
@@ -15,18 +33,28 @@ _loop_kicking_operator_running = False
|
||||
|
||||
|
||||
def setup_asyncio_executor():
|
||||
"""Sets up AsyncIO to run on a single thread.
|
||||
"""Sets up AsyncIO to run properly on each platform."""
|
||||
|
||||
This ensures that only one Pillar HTTP call is performed at the same time. Other
|
||||
calls that could be performed in parallel are queued, and thus we can
|
||||
reliably cancel them.
|
||||
"""
|
||||
import sys
|
||||
|
||||
executor = concurrent.futures.ThreadPoolExecutor()
|
||||
loop = asyncio.get_event_loop()
|
||||
if sys.platform == 'win32':
|
||||
asyncio.get_event_loop().close()
|
||||
# On Windows, the default event loop is SelectorEventLoop, which does
|
||||
# not support subprocesses. ProactorEventLoop should be used instead.
|
||||
# Source: https://docs.python.org/3/library/asyncio-subprocess.html
|
||||
loop = asyncio.ProactorEventLoop()
|
||||
asyncio.set_event_loop(loop)
|
||||
else:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
executor = concurrent.futures.ThreadPoolExecutor(max_workers=10)
|
||||
loop.set_default_executor(executor)
|
||||
# loop.set_debug(True)
|
||||
|
||||
from . import pillar
|
||||
# No more than this many Pillar calls should be made simultaneously
|
||||
pillar.pillar_semaphore = asyncio.Semaphore(3, loop=loop)
|
||||
|
||||
|
||||
def kick_async_loop(*args) -> bool:
|
||||
"""Performs a single iteration of the asyncio event loop.
|
||||
@@ -87,6 +115,15 @@ def ensure_async_loop():
|
||||
log.debug('Result of starting modal operator is %r', result)
|
||||
|
||||
|
||||
def erase_async_loop():
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
log.debug('Erasing async loop')
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.stop()
|
||||
|
||||
|
||||
class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
bl_idname = 'asyncio.loop'
|
||||
bl_label = 'Runs the asyncio main loop'
|
||||
@@ -94,6 +131,14 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
timer = None
|
||||
log = logging.getLogger(__name__ + '.AsyncLoopModalOperator')
|
||||
|
||||
def __del__(self):
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
# This can be required when the operator is running while Blender
|
||||
# (re)loads a file. The operator then doesn't get the chance to
|
||||
# finish the async tasks, hence stop_after_this_kick is never True.
|
||||
_loop_kicking_operator_running = False
|
||||
|
||||
def execute(self, context):
|
||||
return self.invoke(context, None)
|
||||
|
||||
@@ -115,6 +160,12 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
def modal(self, context, event):
|
||||
global _loop_kicking_operator_running
|
||||
|
||||
# If _loop_kicking_operator_running is set to False, someone called
|
||||
# erase_async_loop(). This is a signal that we really should stop
|
||||
# running.
|
||||
if not _loop_kicking_operator_running:
|
||||
return {'FINISHED'}
|
||||
|
||||
if event.type != 'TIMER':
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
@@ -130,6 +181,108 @@ class AsyncLoopModalOperator(bpy.types.Operator):
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class AsyncModalOperatorMixin:
|
||||
async_task = None # asyncio task for fetching thumbnails
|
||||
signalling_future = None # asyncio future for signalling that we want to cancel everything.
|
||||
log = logging.getLogger('%s.AsyncModalOperatorMixin' % __name__)
|
||||
|
||||
_state = 'INITIALIZING'
|
||||
stop_upon_exception = False
|
||||
|
||||
def invoke(self, context, event):
|
||||
context.window_manager.modal_handler_add(self)
|
||||
self.timer = context.window_manager.event_timer_add(1 / 15, context.window)
|
||||
|
||||
self.log.info('Starting')
|
||||
self._new_async_task(self.async_execute(context))
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
async def async_execute(self, context):
|
||||
"""Entry point of the asynchronous operator.
|
||||
|
||||
Implement in a subclass.
|
||||
"""
|
||||
return
|
||||
|
||||
def quit(self):
|
||||
"""Signals the state machine to stop this operator from running."""
|
||||
self._state = 'QUIT'
|
||||
|
||||
def execute(self, context):
|
||||
return self.invoke(context, None)
|
||||
|
||||
def modal(self, context, event):
|
||||
task = self.async_task
|
||||
|
||||
if self._state != 'EXCEPTION' and task and task.done() and not task.cancelled():
|
||||
ex = task.exception()
|
||||
if ex is not None:
|
||||
self._state = 'EXCEPTION'
|
||||
self.log.error('Exception while running task: %s', ex)
|
||||
if self.stop_upon_exception:
|
||||
self.quit()
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
if self._state == 'QUIT':
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def _finish(self, context):
|
||||
self._stop_async_task()
|
||||
context.window_manager.event_timer_remove(self.timer)
|
||||
|
||||
def _new_async_task(self, async_task: asyncio.coroutine, future: asyncio.Future = None):
|
||||
"""Stops the currently running async task, and starts another one."""
|
||||
|
||||
self.log.debug('Setting up a new task %r, so any existing task must be stopped', async_task)
|
||||
self._stop_async_task()
|
||||
|
||||
# Download the previews asynchronously.
|
||||
self.signalling_future = future or asyncio.Future()
|
||||
self.async_task = asyncio.ensure_future(async_task)
|
||||
self.log.debug('Created new task %r', self.async_task)
|
||||
|
||||
# Start the async manager so everything happens.
|
||||
ensure_async_loop()
|
||||
|
||||
def _stop_async_task(self):
|
||||
self.log.debug('Stopping async task')
|
||||
if self.async_task is None:
|
||||
self.log.debug('No async task, trivially stopped')
|
||||
return
|
||||
|
||||
# Signal that we want to stop.
|
||||
self.async_task.cancel()
|
||||
if not self.signalling_future.done():
|
||||
self.log.info("Signalling that we want to cancel anything that's running.")
|
||||
self.signalling_future.cancel()
|
||||
|
||||
# Wait until the asynchronous task is done.
|
||||
if not self.async_task.done():
|
||||
self.log.info("blocking until async task is done.")
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(self.async_task)
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
return
|
||||
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
self.async_task.result() # This re-raises any exception of the task.
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
except Exception:
|
||||
self.log.exception("Exception from asynchronous task")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(AsyncLoopModalOperator)
|
||||
|
||||
|
1000
blender_cloud/attract/__init__.py
Normal file
1000
blender_cloud/attract/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
182
blender_cloud/attract/draw.py
Normal file
182
blender_cloud/attract/draw.py
Normal file
@@ -0,0 +1,182 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
import logging
|
||||
import collections
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
strip_status_colour = {
|
||||
None: (0.7, 0.7, 0.7),
|
||||
'approved': (0.6392156862745098, 0.8784313725490196, 0.30196078431372547),
|
||||
'final': (0.9058823529411765, 0.9607843137254902, 0.8274509803921568),
|
||||
'in_progress': (1.0, 0.7450980392156863, 0.0),
|
||||
'on_hold': (0.796078431372549, 0.6196078431372549, 0.08235294117647059),
|
||||
'review': (0.8941176470588236, 0.9607843137254902, 0.9764705882352941),
|
||||
'todo': (1.0, 0.5019607843137255, 0.5019607843137255)
|
||||
}
|
||||
|
||||
CONFLICT_COLOUR = (0.576, 0.118, 0.035) # RGB tuple
|
||||
|
||||
|
||||
def get_strip_rectf(strip):
|
||||
# Get x and y in terms of the grid's frames and channels
|
||||
x1 = strip.frame_final_start
|
||||
x2 = strip.frame_final_end
|
||||
y1 = strip.channel + 0.2
|
||||
y2 = strip.channel - 0.2 + 1
|
||||
|
||||
return x1, y1, x2, y2
|
||||
|
||||
|
||||
def draw_underline_in_strip(strip_coords, pixel_size_x, color):
|
||||
from bgl import glColor4f, glRectf, glEnable, glDisable, GL_BLEND
|
||||
import bgl
|
||||
|
||||
context = bpy.context
|
||||
|
||||
# Strip coords
|
||||
s_x1, s_y1, s_x2, s_y2 = strip_coords
|
||||
|
||||
# be careful not to draw over the current frame line
|
||||
cf_x = context.scene.frame_current_final
|
||||
|
||||
bgl.glPushAttrib(bgl.GL_COLOR_BUFFER_BIT | bgl.GL_LINE_BIT)
|
||||
|
||||
glColor4f(*color)
|
||||
glEnable(GL_BLEND)
|
||||
bgl.glLineWidth(2)
|
||||
bgl.glBegin(bgl.GL_LINES)
|
||||
|
||||
bgl.glVertex2f(s_x1, s_y1)
|
||||
if s_x1 < cf_x < s_x2:
|
||||
# Bad luck, the line passes our strip
|
||||
bgl.glVertex2f(cf_x - pixel_size_x, s_y1)
|
||||
bgl.glVertex2f(cf_x + pixel_size_x, s_y1)
|
||||
bgl.glVertex2f(s_x2, s_y1)
|
||||
|
||||
bgl.glEnd()
|
||||
bgl.glPopAttrib()
|
||||
|
||||
|
||||
def draw_strip_conflict(strip_coords, pixel_size_x):
|
||||
"""Draws conflicting states between strips."""
|
||||
|
||||
import bgl
|
||||
|
||||
s_x1, s_y1, s_x2, s_y2 = strip_coords
|
||||
bgl.glPushAttrib(bgl.GL_COLOR_BUFFER_BIT | bgl.GL_LINE_BIT)
|
||||
|
||||
# Always draw the full rectangle, the conflict should be resolved and thus stand out.
|
||||
bgl.glColor3f(*CONFLICT_COLOUR)
|
||||
bgl.glLineWidth(2)
|
||||
|
||||
bgl.glBegin(bgl.GL_LINE_LOOP)
|
||||
bgl.glVertex2f(s_x1, s_y1)
|
||||
bgl.glVertex2f(s_x2, s_y1)
|
||||
bgl.glVertex2f(s_x2, s_y2)
|
||||
bgl.glVertex2f(s_x1, s_y2)
|
||||
bgl.glEnd()
|
||||
|
||||
bgl.glPopAttrib()
|
||||
|
||||
|
||||
def draw_callback_px():
|
||||
context = bpy.context
|
||||
|
||||
if not context.scene.sequence_editor:
|
||||
return
|
||||
|
||||
from . import shown_strips
|
||||
|
||||
region = context.region
|
||||
xwin1, ywin1 = region.view2d.region_to_view(0, 0)
|
||||
xwin2, ywin2 = region.view2d.region_to_view(region.width, region.height)
|
||||
one_pixel_further_x, one_pixel_further_y = region.view2d.region_to_view(1, 1)
|
||||
pixel_size_x = one_pixel_further_x - xwin1
|
||||
|
||||
strips = shown_strips(context)
|
||||
|
||||
for strip in strips:
|
||||
if not strip.atc_object_id:
|
||||
continue
|
||||
|
||||
# Get corners (x1, y1), (x2, y2) of the strip rectangle in px region coords
|
||||
strip_coords = get_strip_rectf(strip)
|
||||
|
||||
# check if any of the coordinates are out of bounds
|
||||
if strip_coords[0] > xwin2 or strip_coords[2] < xwin1 or strip_coords[1] > ywin2 or \
|
||||
strip_coords[3] < ywin1:
|
||||
continue
|
||||
|
||||
# Draw
|
||||
status = strip.atc_status
|
||||
if status in strip_status_colour:
|
||||
color = strip_status_colour[status]
|
||||
else:
|
||||
color = strip_status_colour[None]
|
||||
|
||||
alpha = 1.0 if strip.atc_is_synced else 0.5
|
||||
|
||||
draw_underline_in_strip(strip_coords, pixel_size_x, color + (alpha,))
|
||||
if strip.atc_is_synced and strip.atc_object_id_conflict:
|
||||
draw_strip_conflict(strip_coords, pixel_size_x)
|
||||
|
||||
|
||||
def tag_redraw_all_sequencer_editors():
|
||||
context = bpy.context
|
||||
|
||||
# Py cant access notifiers
|
||||
for window in context.window_manager.windows:
|
||||
for area in window.screen.areas:
|
||||
if area.type == 'SEQUENCE_EDITOR':
|
||||
for region in area.regions:
|
||||
if region.type == 'WINDOW':
|
||||
region.tag_redraw()
|
||||
|
||||
|
||||
# This is a list so it can be changed instead of set
|
||||
# if it is only changed, it does not have to be declared as a global everywhere
|
||||
cb_handle = []
|
||||
|
||||
|
||||
def callback_enable():
|
||||
if cb_handle:
|
||||
return
|
||||
|
||||
cb_handle[:] = bpy.types.SpaceSequenceEditor.draw_handler_add(
|
||||
draw_callback_px, (), 'WINDOW', 'POST_VIEW'),
|
||||
|
||||
tag_redraw_all_sequencer_editors()
|
||||
|
||||
|
||||
def callback_disable():
|
||||
if not cb_handle:
|
||||
return
|
||||
|
||||
try:
|
||||
bpy.types.SpaceSequenceEditor.draw_handler_remove(cb_handle[0], 'WINDOW')
|
||||
except ValueError:
|
||||
# Thrown when already removed.
|
||||
pass
|
||||
cb_handle.clear()
|
||||
|
||||
tag_redraw_all_sequencer_editors()
|
@@ -1,48 +1,267 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""Blender-specific code.
|
||||
|
||||
Separated from __init__.py so that we can import & run from non-Blender environments.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os.path
|
||||
|
||||
import bpy
|
||||
from bpy.types import AddonPreferences, Operator, WindowManager, Scene
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import AddonPreferences, Operator, WindowManager, Scene, PropertyGroup
|
||||
from bpy.props import StringProperty, EnumProperty, PointerProperty, BoolProperty, IntProperty
|
||||
import rna_prop_ui
|
||||
|
||||
from . import pillar, gui
|
||||
from . import pillar, async_loop, flamenco
|
||||
from .utils import pyside_cache, redraw
|
||||
|
||||
PILLAR_SERVER_URL = 'https://cloudapi.blender.org/'
|
||||
# PILLAR_SERVER_URL = 'http://localhost:5000/'
|
||||
PILLAR_WEB_SERVER_URL = os.environ.get('BCLOUD_SERVER', 'https://cloud.blender.org/')
|
||||
PILLAR_SERVER_URL = '%sapi/' % PILLAR_WEB_SERVER_URL
|
||||
|
||||
ADDON_NAME = 'blender_cloud'
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
icons = None
|
||||
|
||||
|
||||
@pyside_cache('version')
|
||||
def blender_syncable_versions(self, context):
|
||||
"""Returns the list of items used by SyncStatusProperties.version EnumProperty."""
|
||||
|
||||
bss = context.window_manager.blender_sync_status
|
||||
versions = bss.available_blender_versions
|
||||
if not versions:
|
||||
return [('', 'No settings stored in your Blender Cloud', '')]
|
||||
return [(v, v, '') for v in versions]
|
||||
|
||||
|
||||
class SyncStatusProperties(PropertyGroup):
|
||||
status = EnumProperty(
|
||||
items=[
|
||||
('NONE', 'NONE', 'We have done nothing at all yet.'),
|
||||
('IDLE', 'IDLE', 'User requested something, which is done, and we are now idle.'),
|
||||
('SYNCING', 'SYNCING', 'Synchronising with Blender Cloud.'),
|
||||
],
|
||||
name='status',
|
||||
description='Current status of Blender Sync',
|
||||
update=redraw)
|
||||
|
||||
version = EnumProperty(
|
||||
items=blender_syncable_versions,
|
||||
name='Version of Blender from which to pull',
|
||||
description='Version of Blender from which to pull')
|
||||
|
||||
message = StringProperty(name='message', update=redraw)
|
||||
level = EnumProperty(
|
||||
items=[
|
||||
('INFO', 'INFO', ''),
|
||||
('WARNING', 'WARNING', ''),
|
||||
('ERROR', 'ERROR', ''),
|
||||
('SUBSCRIBE', 'SUBSCRIBE', ''),
|
||||
],
|
||||
name='level',
|
||||
update=redraw)
|
||||
|
||||
def report(self, level: set, message: str):
|
||||
assert len(level) == 1, 'level should be a set of one string, not %r' % level
|
||||
self.level = level.pop()
|
||||
self.message = message
|
||||
|
||||
# Message can also be empty, just to erase it from the GUI.
|
||||
# No need to actually log those.
|
||||
if message:
|
||||
try:
|
||||
loglevel = logging._nameToLevel[self.level]
|
||||
except KeyError:
|
||||
loglevel = logging.WARNING
|
||||
log.log(loglevel, message)
|
||||
|
||||
# List of syncable versions is stored in 'available_blender_versions' ID property,
|
||||
# because I don't know how to store a variable list of strings in a proper RNA property.
|
||||
@property
|
||||
def available_blender_versions(self) -> list:
|
||||
return self.get('available_blender_versions', [])
|
||||
|
||||
@available_blender_versions.setter
|
||||
def available_blender_versions(self, new_versions):
|
||||
self['available_blender_versions'] = new_versions
|
||||
|
||||
|
||||
@pyside_cache('project')
|
||||
def bcloud_available_projects(self, context):
|
||||
"""Returns the list of items used by BlenderCloudProjectGroup.project EnumProperty."""
|
||||
|
||||
projs = preferences().project.available_projects
|
||||
if not projs:
|
||||
return [('', 'No projects available in your Blender Cloud', '')]
|
||||
return [(p['_id'], p['name'], '') for p in projs]
|
||||
|
||||
|
||||
@functools.lru_cache(1)
|
||||
def project_extensions(project_id) -> set:
|
||||
"""Returns the extensions the project is enabled for.
|
||||
|
||||
At the moment of writing these are 'attract' and 'flamenco'.
|
||||
"""
|
||||
|
||||
log.debug('Finding extensions for project %s', project_id)
|
||||
|
||||
# We can't use our @property, since the preferences may be loaded from a
|
||||
# preferences blend file, in which case it is not constructed from Python code.
|
||||
available_projects = preferences().project.get('available_projects', [])
|
||||
if not available_projects:
|
||||
log.debug('No projects available.')
|
||||
return set()
|
||||
|
||||
proj = next((p for p in available_projects
|
||||
if p['_id'] == project_id), None)
|
||||
if proj is None:
|
||||
log.debug('Project %s not found in available projects.', project_id)
|
||||
return set()
|
||||
|
||||
return set(proj.get('enabled_for', ()))
|
||||
|
||||
|
||||
def handle_project_update(_=None, _2=None):
|
||||
"""Handles changing projects, which may cause extensions to be disabled/enabled.
|
||||
|
||||
Ignores arguments so that it can be used as property update callback.
|
||||
"""
|
||||
|
||||
project_id = preferences().project.project
|
||||
log.info('Updating internal state to reflect extensions enabled on current project %s.',
|
||||
project_id)
|
||||
|
||||
project_extensions.cache_clear()
|
||||
|
||||
from blender_cloud import attract, flamenco
|
||||
attract.deactivate()
|
||||
flamenco.deactivate()
|
||||
|
||||
enabled_for = project_extensions(project_id)
|
||||
log.info('Project extensions: %s', enabled_for)
|
||||
if 'attract' in enabled_for:
|
||||
attract.activate()
|
||||
if 'flamenco' in enabled_for:
|
||||
flamenco.activate()
|
||||
|
||||
|
||||
class BlenderCloudProjectGroup(PropertyGroup):
|
||||
status = EnumProperty(
|
||||
items=[
|
||||
('NONE', 'NONE', 'We have done nothing at all yet'),
|
||||
('IDLE', 'IDLE', 'User requested something, which is done, and we are now idle'),
|
||||
('FETCHING', 'FETCHING', 'Fetching available projects from Blender Cloud'),
|
||||
],
|
||||
name='status',
|
||||
update=redraw)
|
||||
|
||||
project = EnumProperty(
|
||||
items=bcloud_available_projects,
|
||||
name='Cloud project',
|
||||
description='Which Blender Cloud project to work with',
|
||||
update=handle_project_update
|
||||
)
|
||||
|
||||
# List of projects is stored in 'available_projects' ID property,
|
||||
# because I don't know how to store a variable list of strings in a proper RNA property.
|
||||
@property
|
||||
def available_projects(self) -> list:
|
||||
return self.get('available_projects', [])
|
||||
|
||||
@available_projects.setter
|
||||
def available_projects(self, new_projects):
|
||||
self['available_projects'] = new_projects
|
||||
handle_project_update()
|
||||
|
||||
|
||||
class BlenderCloudPreferences(AddonPreferences):
|
||||
bl_idname = ADDON_NAME
|
||||
|
||||
# The following two properties are read-only to limit the scope of the
|
||||
# addon and allow for proper testing within this scope.
|
||||
pillar_server = bpy.props.StringProperty(
|
||||
pillar_server = StringProperty(
|
||||
name='Blender Cloud Server',
|
||||
description='URL of the Blender Cloud backend server',
|
||||
default=PILLAR_SERVER_URL,
|
||||
get=lambda self: PILLAR_SERVER_URL
|
||||
)
|
||||
|
||||
# TODO: Move to the Scene properties?
|
||||
project_uuid = bpy.props.StringProperty(
|
||||
name='Project UUID',
|
||||
description='UUID of the current Blender Cloud project',
|
||||
default='5672beecc0261b2005ed1a33',
|
||||
get=lambda self: '5672beecc0261b2005ed1a33'
|
||||
)
|
||||
|
||||
local_texture_dir = StringProperty(
|
||||
name='Default Blender Cloud texture storage directory',
|
||||
name='Default Blender Cloud Texture Storage Directory',
|
||||
subtype='DIR_PATH',
|
||||
default='//textures')
|
||||
|
||||
open_browser_after_share = BoolProperty(
|
||||
name='Open Browser after Sharing File',
|
||||
description='When enabled, Blender will open a webbrowser',
|
||||
default=True
|
||||
)
|
||||
|
||||
# TODO: store project-dependent properties with the project, so that people
|
||||
# can switch projects and the Attract and Flamenco properties switch with it.
|
||||
project = PointerProperty(type=BlenderCloudProjectGroup)
|
||||
|
||||
cloud_project_local_path = StringProperty(
|
||||
name='Local Project Path',
|
||||
description='Local path of your Attract project, used to search for blend files; '
|
||||
'usually best to set to an absolute path',
|
||||
subtype='DIR_PATH',
|
||||
default='//../')
|
||||
|
||||
flamenco_manager = PointerProperty(type=flamenco.FlamencoManagerGroup)
|
||||
flamenco_exclude_filter = StringProperty(
|
||||
name='File Exclude Filter',
|
||||
description='Filter like "*.abc;*.mkv" to prevent certain files to be packed '
|
||||
'into the output directory',
|
||||
default='')
|
||||
# TODO: before making Flamenco public, change the defaults to something less Institute-specific.
|
||||
# NOTE: The assumption is that the workers can also find the files in the same path.
|
||||
# This assumption is true for the Blender Institute.
|
||||
flamenco_job_file_path = StringProperty(
|
||||
name='Job File Path',
|
||||
description='Path where to store job files, should be accesible for Workers too',
|
||||
subtype='DIR_PATH',
|
||||
default='/render/_flamenco/storage')
|
||||
|
||||
# TODO: before making Flamenco public, change the defaults to something less Institute-specific.
|
||||
flamenco_job_output_path = StringProperty(
|
||||
name='Job Output Path',
|
||||
description='Path where to store output files, should be accessible for Workers',
|
||||
subtype='DIR_PATH',
|
||||
default='/render/_flamenco/output')
|
||||
flamenco_job_output_strip_components = IntProperty(
|
||||
name='Job Output Path Strip Components',
|
||||
description='The final output path comprises of the job output path, and the blend file '
|
||||
'path relative to the project with this many path components stripped off '
|
||||
'the front',
|
||||
min=0,
|
||||
default=0,
|
||||
soft_max=4,
|
||||
)
|
||||
flamenco_open_browser_after_submit = BoolProperty(
|
||||
name='Open Browser after Submitting Job',
|
||||
description='When enabled, Blender will open a webbrowser',
|
||||
default=True
|
||||
)
|
||||
|
||||
def draw(self, context):
|
||||
import textwrap
|
||||
|
||||
@@ -56,54 +275,221 @@ class BlenderCloudPreferences(AddonPreferences):
|
||||
blender_id_profile = None
|
||||
else:
|
||||
blender_id_profile = blender_id.get_active_profile()
|
||||
|
||||
if blender_id is None:
|
||||
icon = 'ERROR'
|
||||
|
||||
msg_icon = 'ERROR'
|
||||
text = 'This add-on requires Blender ID'
|
||||
help_text = 'Make sure that the Blender ID add-on is installed and activated'
|
||||
elif not blender_id_profile:
|
||||
icon = 'ERROR'
|
||||
msg_icon = 'ERROR'
|
||||
text = 'You are logged out.'
|
||||
help_text = 'To login, go to the Blender ID add-on preferences.'
|
||||
elif pillar.SUBCLIENT_ID not in blender_id_profile.subclients:
|
||||
icon = 'QUESTION'
|
||||
elif bpy.app.debug and pillar.SUBCLIENT_ID not in blender_id_profile.subclients:
|
||||
msg_icon = 'QUESTION'
|
||||
text = 'No Blender Cloud credentials.'
|
||||
help_text = ('You are logged in on Blender ID, but your credentials have not '
|
||||
'been synchronized with Blender Cloud yet. Press the Update '
|
||||
'Credentials button.')
|
||||
else:
|
||||
icon = 'WORLD_DATA'
|
||||
msg_icon = 'WORLD_DATA'
|
||||
text = 'You are logged in as %s.' % blender_id_profile.username
|
||||
help_text = ('To logout or change profile, '
|
||||
'go to the Blender ID add-on preferences.')
|
||||
|
||||
sub = layout.column(align=True)
|
||||
sub.label(text=text, icon=icon)
|
||||
# Authentication stuff
|
||||
auth_box = layout.box()
|
||||
auth_box.label(text=text, icon=msg_icon)
|
||||
|
||||
help_lines = textwrap.wrap(help_text, 80)
|
||||
for line in help_lines:
|
||||
sub.label(text=line)
|
||||
auth_box.label(text=line)
|
||||
if bpy.app.debug:
|
||||
auth_box.operator("pillar.credentials_update")
|
||||
|
||||
sub = layout.column()
|
||||
sub.label(text='Local directory for downloaded textures')
|
||||
# Texture browser stuff
|
||||
texture_box = layout.box()
|
||||
texture_box.enabled = msg_icon != 'ERROR'
|
||||
sub = texture_box.column()
|
||||
sub.label(text='Local directory for downloaded textures', icon_value=icon('CLOUD'))
|
||||
sub.prop(self, "local_texture_dir", text='Default')
|
||||
sub.prop(context.scene, "local_texture_dir", text='Current scene')
|
||||
|
||||
# options for Pillar
|
||||
sub = layout.column()
|
||||
sub.enabled = icon != 'ERROR'
|
||||
# Blender Sync stuff
|
||||
bss = context.window_manager.blender_sync_status
|
||||
bsync_box = layout.box()
|
||||
bsync_box.enabled = msg_icon != 'ERROR'
|
||||
row = bsync_box.row().split(percentage=0.33)
|
||||
row.label('Blender Sync with Blender Cloud', icon_value=icon('CLOUD'))
|
||||
|
||||
# TODO: let users easily pick a project. For now, we just use the
|
||||
# hard-coded server URL and UUID of the textures project.
|
||||
# sub.prop(self, "pillar_server")
|
||||
# sub.prop(self, "project_uuid")
|
||||
sub.operator("pillar.credentials_update")
|
||||
icon_for_level = {
|
||||
'INFO': 'NONE',
|
||||
'WARNING': 'INFO',
|
||||
'ERROR': 'ERROR',
|
||||
'SUBSCRIBE': 'ERROR',
|
||||
}
|
||||
msg_icon = icon_for_level[bss.level] if bss.message else 'NONE'
|
||||
message_container = row.row()
|
||||
message_container.label(bss.message, icon=msg_icon)
|
||||
|
||||
sub = bsync_box.column()
|
||||
|
||||
if bss.level == 'SUBSCRIBE':
|
||||
self.draw_subscribe_button(sub)
|
||||
self.draw_sync_buttons(sub, bss)
|
||||
|
||||
# Image Share stuff
|
||||
share_box = layout.box()
|
||||
share_box.label('Image Sharing on Blender Cloud', icon_value=icon('CLOUD'))
|
||||
share_box.prop(self, 'open_browser_after_share')
|
||||
|
||||
# Project selector
|
||||
project_box = layout.box()
|
||||
project_box.enabled = self.project.status in {'NONE', 'IDLE'}
|
||||
|
||||
self.draw_project_selector(project_box, self.project)
|
||||
extensions = project_extensions(self.project.project)
|
||||
|
||||
# Flamenco stuff
|
||||
if 'flamenco' in extensions:
|
||||
flamenco_box = project_box.column()
|
||||
self.draw_flamenco_buttons(flamenco_box, self.flamenco_manager, context)
|
||||
|
||||
def draw_subscribe_button(self, layout):
|
||||
layout.operator('pillar.subscribe', icon='WORLD')
|
||||
|
||||
def draw_sync_buttons(self, layout, bss):
|
||||
layout.enabled = bss.status in {'NONE', 'IDLE'}
|
||||
|
||||
buttons = layout.column()
|
||||
row_buttons = buttons.row().split(percentage=0.5)
|
||||
row_push = row_buttons.row()
|
||||
row_pull = row_buttons.row(align=True)
|
||||
|
||||
row_push.operator('pillar.sync',
|
||||
text='Save %i.%i settings' % bpy.app.version[:2],
|
||||
icon='TRIA_UP').action = 'PUSH'
|
||||
|
||||
versions = bss.available_blender_versions
|
||||
version = bss.version
|
||||
if bss.status in {'NONE', 'IDLE'}:
|
||||
if not versions or not version:
|
||||
row_pull.operator('pillar.sync',
|
||||
text='Find version to load',
|
||||
icon='TRIA_DOWN').action = 'REFRESH'
|
||||
else:
|
||||
props = row_pull.operator('pillar.sync',
|
||||
text='Load %s settings' % version,
|
||||
icon='TRIA_DOWN')
|
||||
props.action = 'PULL'
|
||||
props.blender_version = version
|
||||
row_pull.operator('pillar.sync',
|
||||
text='',
|
||||
icon='DOTSDOWN').action = 'SELECT'
|
||||
else:
|
||||
row_pull.label('Cloud Sync is running.')
|
||||
|
||||
def draw_project_selector(self, project_box, bcp: BlenderCloudProjectGroup):
|
||||
project_row = project_box.row(align=True)
|
||||
project_row.label('Project settings', icon_value=icon('CLOUD'))
|
||||
|
||||
row_buttons = project_row.row(align=True)
|
||||
|
||||
projects = bcp.available_projects
|
||||
project = bcp.project
|
||||
if bcp.status in {'NONE', 'IDLE'}:
|
||||
if not projects or not project:
|
||||
row_buttons.operator('pillar.projects',
|
||||
text='Find project to load',
|
||||
icon='FILE_REFRESH')
|
||||
else:
|
||||
row_buttons.prop(bcp, 'project')
|
||||
row_buttons.operator('pillar.projects',
|
||||
text='',
|
||||
icon='FILE_REFRESH')
|
||||
else:
|
||||
row_buttons.label('Fetching available projects.')
|
||||
|
||||
enabled_for = project_extensions(project)
|
||||
if not project:
|
||||
return
|
||||
|
||||
if not enabled_for:
|
||||
project_box.label('This project is not set up for Attract or Flamenco')
|
||||
return
|
||||
|
||||
project_box.label('This project is set up for: %s' %
|
||||
', '.join(sorted(enabled_for)))
|
||||
|
||||
# This is only needed when the project is set up for either Attract or Flamenco.
|
||||
project_box.prop(self, 'cloud_project_local_path',
|
||||
text='Local Cloud Project Path')
|
||||
|
||||
def draw_flamenco_buttons(self, flamenco_box, bcp: flamenco.FlamencoManagerGroup, context):
|
||||
from .flamenco import bam_interface
|
||||
|
||||
header_row = flamenco_box.row(align=True)
|
||||
header_row.label('Flamenco:', icon_value=icon('CLOUD'))
|
||||
|
||||
manager_split = flamenco_box.split(0.32, align=True)
|
||||
manager_split.label('Manager:')
|
||||
manager_box = manager_split.row(align=True)
|
||||
|
||||
if bcp.status in {'NONE', 'IDLE'}:
|
||||
if not bcp.available_managers or not bcp.manager:
|
||||
manager_box.operator('flamenco.managers',
|
||||
text='Find Flamenco Managers',
|
||||
icon='FILE_REFRESH')
|
||||
else:
|
||||
manager_box.prop(bcp, 'manager', text='')
|
||||
manager_box.operator('flamenco.managers',
|
||||
text='',
|
||||
icon='FILE_REFRESH')
|
||||
else:
|
||||
manager_box.label('Fetching available managers.')
|
||||
|
||||
path_split = flamenco_box.split(0.32, align=True)
|
||||
path_split.label(text='Job File Path:')
|
||||
path_box = path_split.row(align=True)
|
||||
path_box.prop(self, 'flamenco_job_file_path', text='')
|
||||
props = path_box.operator('flamenco.explore_file_path', text='', icon='DISK_DRIVE')
|
||||
props.path = self.flamenco_job_file_path
|
||||
|
||||
job_output_box = flamenco_box.column(align=True)
|
||||
path_split = job_output_box.split(0.32, align=True)
|
||||
path_split.label(text='Job Output Path:')
|
||||
path_box = path_split.row(align=True)
|
||||
path_box.prop(self, 'flamenco_job_output_path', text='')
|
||||
props = path_box.operator('flamenco.explore_file_path', text='', icon='DISK_DRIVE')
|
||||
props.path = self.flamenco_job_output_path
|
||||
job_output_box.prop(self, 'flamenco_exclude_filter')
|
||||
|
||||
prop_split = job_output_box.split(0.32, align=True)
|
||||
prop_split.label('Strip Components:')
|
||||
prop_split.prop(self, 'flamenco_job_output_strip_components', text='')
|
||||
|
||||
from .flamenco import render_output_path
|
||||
|
||||
path_box = job_output_box.row(align=True)
|
||||
output_path = render_output_path(context)
|
||||
if output_path:
|
||||
path_box.label(str(output_path))
|
||||
props = path_box.operator('flamenco.explore_file_path', text='', icon='DISK_DRIVE')
|
||||
props.path = str(output_path.parent)
|
||||
else:
|
||||
path_box.label('Blend file is not in your project path, '
|
||||
'unable to give output path example.')
|
||||
|
||||
flamenco_box.prop(self, 'flamenco_open_browser_after_submit')
|
||||
|
||||
|
||||
class PillarCredentialsUpdate(Operator):
|
||||
class PillarCredentialsUpdate(pillar.PillarOperatorMixin,
|
||||
Operator):
|
||||
"""Updates the Pillar URL and tests the new URL."""
|
||||
bl_idname = 'pillar.credentials_update'
|
||||
bl_label = 'Update credentials'
|
||||
bl_description = 'Resynchronises your Blender ID login with Blender Cloud'
|
||||
|
||||
log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
@@ -130,7 +516,7 @@ class PillarCredentialsUpdate(Operator):
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(pillar.refresh_pillar_credentials())
|
||||
loop.run_until_complete(self.check_credentials(context, set()))
|
||||
except blender_id.BlenderIdCommError as ex:
|
||||
log.exception('Error sending subclient-specific token to Blender ID')
|
||||
self.report({'ERROR'}, 'Failed to sync Blender ID to Blender Cloud')
|
||||
@@ -144,24 +530,153 @@ class PillarCredentialsUpdate(Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PILLAR_OT_subscribe(Operator):
|
||||
"""Opens a browser to subscribe the user to the Cloud."""
|
||||
bl_idname = 'pillar.subscribe'
|
||||
bl_label = 'Subscribe to the Cloud'
|
||||
bl_description = "Opens a page in a web browser to subscribe to the Blender Cloud"
|
||||
|
||||
def execute(self, context):
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open_new_tab('https://cloud.blender.org/join')
|
||||
self.report({'INFO'}, 'We just started a browser for you.')
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PILLAR_OT_projects(async_loop.AsyncModalOperatorMixin,
|
||||
pillar.AuthenticatedPillarOperatorMixin,
|
||||
Operator):
|
||||
"""Fetches the projects available to the user"""
|
||||
bl_idname = 'pillar.projects'
|
||||
bl_label = 'Fetch available projects'
|
||||
|
||||
stop_upon_exception = True
|
||||
_log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
|
||||
async def async_execute(self, context):
|
||||
if not await self.authenticate(context):
|
||||
return
|
||||
|
||||
import pillarsdk
|
||||
from .pillar import pillar_call
|
||||
|
||||
self.log.info('Going to fetch projects for user %s', self.user_id)
|
||||
|
||||
preferences().project.status = 'FETCHING'
|
||||
|
||||
# Get all projects, except the home project.
|
||||
projects_user = await pillar_call(
|
||||
pillarsdk.Project.all,
|
||||
{'where': {'user': self.user_id,
|
||||
'category': {'$ne': 'home'}},
|
||||
'sort': '-_created',
|
||||
'projection': {'_id': True,
|
||||
'name': True,
|
||||
'extension_props': True},
|
||||
})
|
||||
|
||||
projects_shared = await pillar_call(
|
||||
pillarsdk.Project.all,
|
||||
{'where': {'user': {'$ne': self.user_id},
|
||||
'permissions.groups.group': {'$in': self.db_user.groups}},
|
||||
'sort': '-_created',
|
||||
'projection': {'_id': True,
|
||||
'name': True,
|
||||
'extension_props': True},
|
||||
})
|
||||
|
||||
# We need to convert to regular dicts before storing in ID properties.
|
||||
# Also don't store more properties than we need.
|
||||
def reduce_properties(project_list):
|
||||
for p in project_list:
|
||||
p = p.to_dict()
|
||||
extension_props = p.get('extension_props', {})
|
||||
enabled_for = list(extension_props.keys())
|
||||
|
||||
self._log.debug('Project %r is enabled for %s', p['name'], enabled_for)
|
||||
yield {
|
||||
'_id': p['_id'],
|
||||
'name': p['name'],
|
||||
'enabled_for': enabled_for,
|
||||
}
|
||||
|
||||
projects = list(reduce_properties(projects_user['_items'])) + \
|
||||
list(reduce_properties(projects_shared['_items']))
|
||||
|
||||
preferences().project.available_projects = projects
|
||||
|
||||
self.quit()
|
||||
|
||||
def quit(self):
|
||||
preferences().project.status = 'IDLE'
|
||||
super().quit()
|
||||
|
||||
|
||||
class PILLAR_PT_image_custom_properties(rna_prop_ui.PropertyPanel, bpy.types.Panel):
|
||||
"""Shows custom properties in the image editor."""
|
||||
|
||||
bl_space_type = 'IMAGE_EDITOR'
|
||||
bl_region_type = 'UI'
|
||||
bl_label = 'Custom Properties'
|
||||
|
||||
_context_path = 'edit_image'
|
||||
_property_type = bpy.types.Image
|
||||
|
||||
|
||||
def preferences() -> BlenderCloudPreferences:
|
||||
return bpy.context.user_preferences.addons[ADDON_NAME].preferences
|
||||
|
||||
|
||||
def load_custom_icons():
|
||||
global icons
|
||||
|
||||
if icons is not None:
|
||||
# Already loaded
|
||||
return
|
||||
|
||||
import bpy.utils.previews
|
||||
icons = bpy.utils.previews.new()
|
||||
my_icons_dir = os.path.join(os.path.dirname(__file__), 'icons')
|
||||
icons.load('CLOUD', os.path.join(my_icons_dir, 'icon-cloud.png'), 'IMAGE')
|
||||
|
||||
|
||||
def unload_custom_icons():
|
||||
global icons
|
||||
|
||||
if icons is None:
|
||||
# Already unloaded
|
||||
return
|
||||
|
||||
bpy.utils.previews.remove(icons)
|
||||
icons = None
|
||||
|
||||
|
||||
def icon(icon_name: str) -> int:
|
||||
"""Returns the icon ID for the named icon.
|
||||
|
||||
Use with layout.operator('pillar.image_share', icon_value=icon('CLOUD'))
|
||||
"""
|
||||
|
||||
return icons[icon_name].icon_id
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(BlenderCloudProjectGroup)
|
||||
bpy.utils.register_class(BlenderCloudPreferences)
|
||||
bpy.utils.register_class(PillarCredentialsUpdate)
|
||||
|
||||
WindowManager.blender_cloud_project = StringProperty(
|
||||
name="Blender Cloud project UUID",
|
||||
default='5672beecc0261b2005ed1a33') # TODO: don't hard-code this
|
||||
|
||||
WindowManager.blender_cloud_node = StringProperty(
|
||||
name="Blender Cloud node UUID",
|
||||
default='') # empty == top-level of project
|
||||
bpy.utils.register_class(SyncStatusProperties)
|
||||
bpy.utils.register_class(PILLAR_OT_subscribe)
|
||||
bpy.utils.register_class(PILLAR_OT_projects)
|
||||
bpy.utils.register_class(PILLAR_PT_image_custom_properties)
|
||||
|
||||
addon_prefs = preferences()
|
||||
|
||||
WindowManager.last_blender_cloud_location = StringProperty(
|
||||
name="Last Blender Cloud browser location",
|
||||
default="/")
|
||||
|
||||
def default_if_empty(scene, context):
|
||||
"""The scene's local_texture_dir, if empty, reverts to the addon prefs."""
|
||||
|
||||
@@ -174,13 +689,21 @@ def register():
|
||||
default=addon_prefs.local_texture_dir,
|
||||
update=default_if_empty)
|
||||
|
||||
WindowManager.blender_sync_status = PointerProperty(type=SyncStatusProperties)
|
||||
|
||||
load_custom_icons()
|
||||
|
||||
|
||||
def unregister():
|
||||
gui.unregister()
|
||||
unload_custom_icons()
|
||||
|
||||
bpy.utils.unregister_class(BlenderCloudProjectGroup)
|
||||
bpy.utils.unregister_class(PillarCredentialsUpdate)
|
||||
bpy.utils.unregister_class(BlenderCloudPreferences)
|
||||
bpy.utils.unregister_class(SyncStatusProperties)
|
||||
bpy.utils.unregister_class(PILLAR_OT_subscribe)
|
||||
bpy.utils.unregister_class(PILLAR_OT_projects)
|
||||
bpy.utils.unregister_class(PILLAR_PT_image_custom_properties)
|
||||
|
||||
del WindowManager.blender_cloud_project
|
||||
del WindowManager.blender_cloud_node
|
||||
del WindowManager.blender_cloud_thumbnails
|
||||
del WindowManager.last_blender_cloud_location
|
||||
del WindowManager.blender_sync_status
|
||||
|
929
blender_cloud/blendfile.py
Normal file
929
blender_cloud/blendfile.py
Normal file
@@ -0,0 +1,929 @@
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
# ***** END GPL LICENCE BLOCK *****
|
||||
#
|
||||
# (c) 2009, At Mind B.V. - Jeroen Bakker
|
||||
# (c) 2014, Blender Foundation - Campbell Barton
|
||||
|
||||
import gzip
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
import tempfile
|
||||
|
||||
log = logging.getLogger("blendfile")
|
||||
|
||||
FILE_BUFFER_SIZE = 1024 * 1024
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# module global routines
|
||||
#
|
||||
# read routines
|
||||
# open a filename
|
||||
# determine if the file is compressed
|
||||
# and returns a handle
|
||||
def open_blend(filename, access="rb"):
|
||||
"""Opens a blend file for reading or writing pending on the access
|
||||
supports 2 kind of blend files. Uncompressed and compressed.
|
||||
Known issue: does not support packaged blend files
|
||||
"""
|
||||
handle = open(filename, access)
|
||||
magic_test = b"BLENDER"
|
||||
magic = handle.read(len(magic_test))
|
||||
if magic == magic_test:
|
||||
log.debug("normal blendfile detected")
|
||||
handle.seek(0, os.SEEK_SET)
|
||||
bfile = BlendFile(handle)
|
||||
bfile.is_compressed = False
|
||||
bfile.filepath_orig = filename
|
||||
return bfile
|
||||
elif magic[:2] == b'\x1f\x8b':
|
||||
log.debug("gzip blendfile detected")
|
||||
handle.close()
|
||||
log.debug("decompressing started")
|
||||
fs = gzip.open(filename, "rb")
|
||||
data = fs.read(FILE_BUFFER_SIZE)
|
||||
magic = data[:len(magic_test)]
|
||||
if magic == magic_test:
|
||||
handle = tempfile.TemporaryFile()
|
||||
while data:
|
||||
handle.write(data)
|
||||
data = fs.read(FILE_BUFFER_SIZE)
|
||||
log.debug("decompressing finished")
|
||||
fs.close()
|
||||
log.debug("resetting decompressed file")
|
||||
handle.seek(os.SEEK_SET, 0)
|
||||
bfile = BlendFile(handle)
|
||||
bfile.is_compressed = True
|
||||
bfile.filepath_orig = filename
|
||||
return bfile
|
||||
else:
|
||||
raise Exception("filetype inside gzip not a blend")
|
||||
else:
|
||||
raise Exception("filetype not a blend or a gzip blend")
|
||||
|
||||
|
||||
def pad_up_4(offset):
|
||||
return (offset + 3) & ~3
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# module classes
|
||||
|
||||
|
||||
class BlendFile:
|
||||
"""
|
||||
Blend file.
|
||||
"""
|
||||
__slots__ = (
|
||||
# file (result of open())
|
||||
"handle",
|
||||
# str (original name of the file path)
|
||||
"filepath_orig",
|
||||
# BlendFileHeader
|
||||
"header",
|
||||
# struct.Struct
|
||||
"block_header_struct",
|
||||
# BlendFileBlock
|
||||
"blocks",
|
||||
# [DNAStruct, ...]
|
||||
"structs",
|
||||
# dict {b'StructName': sdna_index}
|
||||
# (where the index is an index into 'structs')
|
||||
"sdna_index_from_id",
|
||||
# dict {addr_old: block}
|
||||
"block_from_offset",
|
||||
# int
|
||||
"code_index",
|
||||
# bool (did we make a change)
|
||||
"is_modified",
|
||||
# bool (is file gzipped)
|
||||
"is_compressed",
|
||||
)
|
||||
|
||||
def __init__(self, handle):
|
||||
log.debug("initializing reading blend-file")
|
||||
self.handle = handle
|
||||
self.header = BlendFileHeader(handle)
|
||||
self.block_header_struct = self.header.create_block_header_struct()
|
||||
self.blocks = []
|
||||
self.code_index = {}
|
||||
|
||||
block = BlendFileBlock(handle, self)
|
||||
while block.code != b'ENDB':
|
||||
if block.code == b'DNA1':
|
||||
(self.structs,
|
||||
self.sdna_index_from_id,
|
||||
) = BlendFile.decode_structs(self.header, block, handle)
|
||||
else:
|
||||
handle.seek(block.size, os.SEEK_CUR)
|
||||
|
||||
self.blocks.append(block)
|
||||
self.code_index.setdefault(block.code, []).append(block)
|
||||
|
||||
block = BlendFileBlock(handle, self)
|
||||
self.is_modified = False
|
||||
self.blocks.append(block)
|
||||
|
||||
# cache (could lazy init, incase we never use?)
|
||||
self.block_from_offset = {block.addr_old: block for block in self.blocks if block.code != b'ENDB'}
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.close()
|
||||
|
||||
def find_blocks_from_code(self, code):
|
||||
assert(type(code) == bytes)
|
||||
if code not in self.code_index:
|
||||
return []
|
||||
return self.code_index[code]
|
||||
|
||||
def find_block_from_offset(self, offset):
|
||||
# same as looking looping over all blocks,
|
||||
# then checking ``block.addr_old == offset``
|
||||
assert(type(offset) is int)
|
||||
return self.block_from_offset.get(offset)
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the blend file
|
||||
writes the blend file to disk if changes has happened
|
||||
"""
|
||||
handle = self.handle
|
||||
|
||||
if self.is_modified:
|
||||
if self.is_compressed:
|
||||
log.debug("close compressed blend file")
|
||||
handle.seek(os.SEEK_SET, 0)
|
||||
log.debug("compressing started")
|
||||
fs = gzip.open(self.filepath_orig, "wb")
|
||||
data = handle.read(FILE_BUFFER_SIZE)
|
||||
while data:
|
||||
fs.write(data)
|
||||
data = handle.read(FILE_BUFFER_SIZE)
|
||||
fs.close()
|
||||
log.debug("compressing finished")
|
||||
|
||||
handle.close()
|
||||
|
||||
def ensure_subtype_smaller(self, sdna_index_curr, sdna_index_next):
|
||||
# never refine to a smaller type
|
||||
if (self.structs[sdna_index_curr].size >
|
||||
self.structs[sdna_index_next].size):
|
||||
|
||||
raise RuntimeError("cant refine to smaller type (%s -> %s)" %
|
||||
(self.structs[sdna_index_curr].dna_type_id.decode('ascii'),
|
||||
self.structs[sdna_index_next].dna_type_id.decode('ascii')))
|
||||
|
||||
@staticmethod
|
||||
def decode_structs(header, block, handle):
|
||||
"""
|
||||
DNACatalog is a catalog of all information in the DNA1 file-block
|
||||
"""
|
||||
log.debug("building DNA catalog")
|
||||
shortstruct = DNA_IO.USHORT[header.endian_index]
|
||||
shortstruct2 = struct.Struct(header.endian_str + b'HH')
|
||||
intstruct = DNA_IO.UINT[header.endian_index]
|
||||
|
||||
data = handle.read(block.size)
|
||||
types = []
|
||||
names = []
|
||||
|
||||
structs = []
|
||||
sdna_index_from_id = {}
|
||||
|
||||
offset = 8
|
||||
names_len = intstruct.unpack_from(data, offset)[0]
|
||||
offset += 4
|
||||
|
||||
log.debug("building #%d names" % names_len)
|
||||
for i in range(names_len):
|
||||
tName = DNA_IO.read_data0_offset(data, offset)
|
||||
offset = offset + len(tName) + 1
|
||||
names.append(DNAName(tName))
|
||||
del names_len
|
||||
|
||||
offset = pad_up_4(offset)
|
||||
offset += 4
|
||||
types_len = intstruct.unpack_from(data, offset)[0]
|
||||
offset += 4
|
||||
log.debug("building #%d types" % types_len)
|
||||
for i in range(types_len):
|
||||
dna_type_id = DNA_IO.read_data0_offset(data, offset)
|
||||
# None will be replaced by the DNAStruct, below
|
||||
types.append(DNAStruct(dna_type_id))
|
||||
offset += len(dna_type_id) + 1
|
||||
|
||||
offset = pad_up_4(offset)
|
||||
offset += 4
|
||||
log.debug("building #%d type-lengths" % types_len)
|
||||
for i in range(types_len):
|
||||
tLen = shortstruct.unpack_from(data, offset)[0]
|
||||
offset = offset + 2
|
||||
types[i].size = tLen
|
||||
del types_len
|
||||
|
||||
offset = pad_up_4(offset)
|
||||
offset += 4
|
||||
|
||||
structs_len = intstruct.unpack_from(data, offset)[0]
|
||||
offset += 4
|
||||
log.debug("building #%d structures" % structs_len)
|
||||
for sdna_index in range(structs_len):
|
||||
d = shortstruct2.unpack_from(data, offset)
|
||||
struct_type_index = d[0]
|
||||
offset += 4
|
||||
dna_struct = types[struct_type_index]
|
||||
sdna_index_from_id[dna_struct.dna_type_id] = sdna_index
|
||||
structs.append(dna_struct)
|
||||
|
||||
fields_len = d[1]
|
||||
dna_offset = 0
|
||||
|
||||
for field_index in range(fields_len):
|
||||
d2 = shortstruct2.unpack_from(data, offset)
|
||||
field_type_index = d2[0]
|
||||
field_name_index = d2[1]
|
||||
offset += 4
|
||||
dna_type = types[field_type_index]
|
||||
dna_name = names[field_name_index]
|
||||
if dna_name.is_pointer or dna_name.is_method_pointer:
|
||||
dna_size = header.pointer_size * dna_name.array_size
|
||||
else:
|
||||
dna_size = dna_type.size * dna_name.array_size
|
||||
|
||||
field = DNAField(dna_type, dna_name, dna_size, dna_offset)
|
||||
dna_struct.fields.append(field)
|
||||
dna_struct.field_from_name[dna_name.name_only] = field
|
||||
dna_offset += dna_size
|
||||
|
||||
return structs, sdna_index_from_id
|
||||
|
||||
|
||||
class BlendFileBlock:
|
||||
"""
|
||||
Instance of a struct.
|
||||
"""
|
||||
__slots__ = (
|
||||
# BlendFile
|
||||
"file",
|
||||
"code",
|
||||
"size",
|
||||
"addr_old",
|
||||
"sdna_index",
|
||||
"count",
|
||||
"file_offset",
|
||||
"user_data",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return ("<%s.%s (%s), size=%d at %s>" %
|
||||
# fields=[%s]
|
||||
(self.__class__.__name__,
|
||||
self.dna_type.dna_type_id.decode('ascii'),
|
||||
self.code.decode(),
|
||||
self.size,
|
||||
# b", ".join(f.dna_name.name_only for f in self.dna_type.fields).decode('ascii'),
|
||||
hex(self.addr_old),
|
||||
))
|
||||
|
||||
def __init__(self, handle, bfile):
|
||||
OLDBLOCK = struct.Struct(b'4sI')
|
||||
|
||||
self.file = bfile
|
||||
self.user_data = None
|
||||
|
||||
data = handle.read(bfile.block_header_struct.size)
|
||||
# header size can be 8, 20, or 24 bytes long
|
||||
# 8: old blend files ENDB block (exception)
|
||||
# 20: normal headers 32 bit platform
|
||||
# 24: normal headers 64 bit platform
|
||||
if len(data) > 15:
|
||||
|
||||
blockheader = bfile.block_header_struct.unpack(data)
|
||||
self.code = blockheader[0].partition(b'\0')[0]
|
||||
if self.code != b'ENDB':
|
||||
self.size = blockheader[1]
|
||||
self.addr_old = blockheader[2]
|
||||
self.sdna_index = blockheader[3]
|
||||
self.count = blockheader[4]
|
||||
self.file_offset = handle.tell()
|
||||
else:
|
||||
self.size = 0
|
||||
self.addr_old = 0
|
||||
self.sdna_index = 0
|
||||
self.count = 0
|
||||
self.file_offset = 0
|
||||
else:
|
||||
blockheader = OLDBLOCK.unpack(data)
|
||||
self.code = blockheader[0].partition(b'\0')[0]
|
||||
self.code = DNA_IO.read_data0(blockheader[0])
|
||||
self.size = 0
|
||||
self.addr_old = 0
|
||||
self.sdna_index = 0
|
||||
self.count = 0
|
||||
self.file_offset = 0
|
||||
|
||||
@property
|
||||
def dna_type(self):
|
||||
return self.file.structs[self.sdna_index]
|
||||
|
||||
def refine_type_from_index(self, sdna_index_next):
|
||||
assert(type(sdna_index_next) is int)
|
||||
sdna_index_curr = self.sdna_index
|
||||
self.file.ensure_subtype_smaller(sdna_index_curr, sdna_index_next)
|
||||
self.sdna_index = sdna_index_next
|
||||
|
||||
def refine_type(self, dna_type_id):
|
||||
assert(type(dna_type_id) is bytes)
|
||||
self.refine_type_from_index(self.file.sdna_index_from_id[dna_type_id])
|
||||
|
||||
def get_file_offset(self, path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
base_index=0,
|
||||
):
|
||||
"""
|
||||
Return (offset, length)
|
||||
"""
|
||||
assert(type(path) is bytes)
|
||||
|
||||
ofs = self.file_offset
|
||||
if base_index != 0:
|
||||
assert(base_index < self.count)
|
||||
ofs += (self.size // self.count) * base_index
|
||||
self.file.handle.seek(ofs, os.SEEK_SET)
|
||||
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
else:
|
||||
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
field = dna_struct.field_from_path(
|
||||
self.file.header, self.file.handle, path)
|
||||
|
||||
return (self.file.handle.tell(), field.dna_name.array_size)
|
||||
|
||||
def get(self, path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
use_nil=True, use_str=True,
|
||||
base_index=0,
|
||||
):
|
||||
|
||||
ofs = self.file_offset
|
||||
if base_index != 0:
|
||||
assert(base_index < self.count)
|
||||
ofs += (self.size // self.count) * base_index
|
||||
self.file.handle.seek(ofs, os.SEEK_SET)
|
||||
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
else:
|
||||
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
return dna_struct.field_get(
|
||||
self.file.header, self.file.handle, path,
|
||||
default=default,
|
||||
use_nil=use_nil, use_str=use_str,
|
||||
)
|
||||
|
||||
def get_recursive_iter(self, path, path_root=b"",
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
use_nil=True, use_str=True,
|
||||
base_index=0,
|
||||
):
|
||||
if path_root:
|
||||
path_full = (
|
||||
(path_root if type(path_root) is tuple else (path_root, )) +
|
||||
(path if type(path) is tuple else (path, )))
|
||||
else:
|
||||
path_full = path
|
||||
|
||||
try:
|
||||
yield (path_full, self.get(path_full, default, sdna_index_refine, use_nil, use_str, base_index))
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
struct_index = self.file.sdna_index_from_id.get(dna_type.dna_type_id, None)
|
||||
if struct_index is None:
|
||||
yield (path_full, "<%s>" % dna_type.dna_type_id.decode('ascii'))
|
||||
else:
|
||||
struct = self.file.structs[struct_index]
|
||||
for f in struct.fields:
|
||||
yield from self.get_recursive_iter(
|
||||
f.dna_name.name_only, path_full, default, None, use_nil, use_str, 0)
|
||||
|
||||
def items_recursive_iter(self):
|
||||
for k in self.keys():
|
||||
yield from self.get_recursive_iter(k, use_str=False)
|
||||
|
||||
def get_data_hash(self):
|
||||
"""
|
||||
Generates a 'hash' that can be used instead of addr_old as block id, and that should be 'stable' across .blend
|
||||
file load & save (i.e. it does not changes due to pointer addresses variations).
|
||||
"""
|
||||
# TODO This implementation is most likely far from optimal... and CRC32 is not renown as the best hashing
|
||||
# algo either. But for now does the job!
|
||||
import zlib
|
||||
def _is_pointer(self, k):
|
||||
return self.file.structs[self.sdna_index].field_from_path(
|
||||
self.file.header, self.file.handle, k).dna_name.is_pointer
|
||||
|
||||
hsh = 1
|
||||
for k, v in self.items_recursive_iter():
|
||||
if not _is_pointer(self, k):
|
||||
hsh = zlib.adler32(str(v).encode(), hsh)
|
||||
return hsh
|
||||
|
||||
def set(self, path, value,
|
||||
sdna_index_refine=None,
|
||||
):
|
||||
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
else:
|
||||
self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
|
||||
|
||||
dna_struct = self.file.structs[sdna_index_refine]
|
||||
self.file.handle.seek(self.file_offset, os.SEEK_SET)
|
||||
self.file.is_modified = True
|
||||
return dna_struct.field_set(
|
||||
self.file.header, self.file.handle, path, value)
|
||||
|
||||
# ---------------
|
||||
# Utility get/set
|
||||
#
|
||||
# avoid inline pointer casting
|
||||
def get_pointer(
|
||||
self, path,
|
||||
default=...,
|
||||
sdna_index_refine=None,
|
||||
base_index=0,
|
||||
):
|
||||
if sdna_index_refine is None:
|
||||
sdna_index_refine = self.sdna_index
|
||||
result = self.get(path, default, sdna_index_refine=sdna_index_refine, base_index=base_index)
|
||||
|
||||
# default
|
||||
if type(result) is not int:
|
||||
return result
|
||||
|
||||
assert(self.file.structs[sdna_index_refine].field_from_path(
|
||||
self.file.header, self.file.handle, path).dna_name.is_pointer)
|
||||
if result != 0:
|
||||
# possible (but unlikely)
|
||||
# that this fails and returns None
|
||||
# maybe we want to raise some exception in this case
|
||||
return self.file.find_block_from_offset(result)
|
||||
else:
|
||||
return None
|
||||
|
||||
# ----------------------
|
||||
# Python convenience API
|
||||
|
||||
# dict like access
|
||||
def __getitem__(self, item):
|
||||
return self.get(item, use_str=False)
|
||||
|
||||
def __setitem__(self, item, value):
|
||||
self.set(item, value)
|
||||
|
||||
def keys(self):
|
||||
return (f.dna_name.name_only for f in self.dna_type.fields)
|
||||
|
||||
def values(self):
|
||||
for k in self.keys():
|
||||
try:
|
||||
yield self[k]
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
yield "<%s>" % dna_type.dna_type_id.decode('ascii')
|
||||
|
||||
def items(self):
|
||||
for k in self.keys():
|
||||
try:
|
||||
yield (k, self[k])
|
||||
except NotImplementedError as ex:
|
||||
msg, dna_name, dna_type = ex.args
|
||||
yield (k, "<%s>" % dna_type.dna_type_id.decode('ascii'))
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Read Magic
|
||||
#
|
||||
# magic = str
|
||||
# pointer_size = int
|
||||
# is_little_endian = bool
|
||||
# version = int
|
||||
|
||||
|
||||
class BlendFileHeader:
|
||||
"""
|
||||
BlendFileHeader allocates the first 12 bytes of a blend file
|
||||
it contains information about the hardware architecture
|
||||
"""
|
||||
__slots__ = (
|
||||
# str
|
||||
"magic",
|
||||
# int 4/8
|
||||
"pointer_size",
|
||||
# bool
|
||||
"is_little_endian",
|
||||
# int
|
||||
"version",
|
||||
# str, used to pass to 'struct'
|
||||
"endian_str",
|
||||
# int, used to index common types
|
||||
"endian_index",
|
||||
)
|
||||
|
||||
def __init__(self, handle):
|
||||
FILEHEADER = struct.Struct(b'7s1s1s3s')
|
||||
|
||||
log.debug("reading blend-file-header")
|
||||
values = FILEHEADER.unpack(handle.read(FILEHEADER.size))
|
||||
self.magic = values[0]
|
||||
pointer_size_id = values[1]
|
||||
if pointer_size_id == b'-':
|
||||
self.pointer_size = 8
|
||||
elif pointer_size_id == b'_':
|
||||
self.pointer_size = 4
|
||||
else:
|
||||
assert(0)
|
||||
endian_id = values[2]
|
||||
if endian_id == b'v':
|
||||
self.is_little_endian = True
|
||||
self.endian_str = b'<'
|
||||
self.endian_index = 0
|
||||
elif endian_id == b'V':
|
||||
self.is_little_endian = False
|
||||
self.endian_index = 1
|
||||
self.endian_str = b'>'
|
||||
else:
|
||||
assert(0)
|
||||
|
||||
version_id = values[3]
|
||||
self.version = int(version_id)
|
||||
|
||||
def create_block_header_struct(self):
|
||||
return struct.Struct(b''.join((
|
||||
self.endian_str,
|
||||
b'4sI',
|
||||
b'I' if self.pointer_size == 4 else b'Q',
|
||||
b'II',
|
||||
)))
|
||||
|
||||
|
||||
class DNAName:
|
||||
"""
|
||||
DNAName is a C-type name stored in the DNA
|
||||
"""
|
||||
__slots__ = (
|
||||
"name_full",
|
||||
"name_only",
|
||||
"is_pointer",
|
||||
"is_method_pointer",
|
||||
"array_size",
|
||||
)
|
||||
|
||||
def __init__(self, name_full):
|
||||
self.name_full = name_full
|
||||
self.name_only = self.calc_name_only()
|
||||
self.is_pointer = self.calc_is_pointer()
|
||||
self.is_method_pointer = self.calc_is_method_pointer()
|
||||
self.array_size = self.calc_array_size()
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (type(self).__qualname__, self.name_full)
|
||||
|
||||
def as_reference(self, parent):
|
||||
if parent is None:
|
||||
result = b''
|
||||
else:
|
||||
result = parent + b'.'
|
||||
|
||||
result = result + self.name_only
|
||||
return result
|
||||
|
||||
def calc_name_only(self):
|
||||
result = self.name_full.strip(b'*()')
|
||||
index = result.find(b'[')
|
||||
if index != -1:
|
||||
result = result[:index]
|
||||
return result
|
||||
|
||||
def calc_is_pointer(self):
|
||||
return (b'*' in self.name_full)
|
||||
|
||||
def calc_is_method_pointer(self):
|
||||
return (b'(*' in self.name_full)
|
||||
|
||||
def calc_array_size(self):
|
||||
result = 1
|
||||
temp = self.name_full
|
||||
index = temp.find(b'[')
|
||||
|
||||
while index != -1:
|
||||
index_2 = temp.find(b']')
|
||||
result *= int(temp[index + 1:index_2])
|
||||
temp = temp[index_2 + 1:]
|
||||
index = temp.find(b'[')
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class DNAField:
|
||||
"""
|
||||
DNAField is a coupled DNAStruct and DNAName
|
||||
and cache offset for reuse
|
||||
"""
|
||||
__slots__ = (
|
||||
# DNAName
|
||||
"dna_name",
|
||||
# tuple of 3 items
|
||||
# [bytes (struct name), int (struct size), DNAStruct]
|
||||
"dna_type",
|
||||
# size on-disk
|
||||
"dna_size",
|
||||
# cached info (avoid looping over fields each time)
|
||||
"dna_offset",
|
||||
)
|
||||
|
||||
def __init__(self, dna_type, dna_name, dna_size, dna_offset):
|
||||
self.dna_type = dna_type
|
||||
self.dna_name = dna_name
|
||||
self.dna_size = dna_size
|
||||
self.dna_offset = dna_offset
|
||||
|
||||
|
||||
class DNAStruct:
|
||||
"""
|
||||
DNAStruct is a C-type structure stored in the DNA
|
||||
"""
|
||||
__slots__ = (
|
||||
"dna_type_id",
|
||||
"size",
|
||||
"fields",
|
||||
"field_from_name",
|
||||
"user_data",
|
||||
)
|
||||
|
||||
def __init__(self, dna_type_id):
|
||||
self.dna_type_id = dna_type_id
|
||||
self.fields = []
|
||||
self.field_from_name = {}
|
||||
self.user_data = None
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (type(self).__qualname__, self.dna_type_id)
|
||||
|
||||
def field_from_path(self, header, handle, path):
|
||||
"""
|
||||
Support lookups as bytes or a tuple of bytes and optional index.
|
||||
|
||||
C style 'id.name' --> (b'id', b'name')
|
||||
C style 'array[4]' --> ('array', 4)
|
||||
"""
|
||||
if type(path) is tuple:
|
||||
name = path[0]
|
||||
if len(path) >= 2 and type(path[1]) is not bytes:
|
||||
name_tail = path[2:]
|
||||
index = path[1]
|
||||
assert(type(index) is int)
|
||||
else:
|
||||
name_tail = path[1:]
|
||||
index = 0
|
||||
else:
|
||||
name = path
|
||||
name_tail = None
|
||||
index = 0
|
||||
|
||||
assert(type(name) is bytes)
|
||||
|
||||
field = self.field_from_name.get(name)
|
||||
|
||||
if field is not None:
|
||||
handle.seek(field.dna_offset, os.SEEK_CUR)
|
||||
if index != 0:
|
||||
if field.dna_name.is_pointer:
|
||||
index_offset = header.pointer_size * index
|
||||
else:
|
||||
index_offset = field.dna_type.size * index
|
||||
assert(index_offset < field.dna_size)
|
||||
handle.seek(index_offset, os.SEEK_CUR)
|
||||
if not name_tail: # None or ()
|
||||
return field
|
||||
else:
|
||||
return field.dna_type.field_from_path(header, handle, name_tail)
|
||||
|
||||
def field_get(self, header, handle, path,
|
||||
default=...,
|
||||
use_nil=True, use_str=True,
|
||||
):
|
||||
field = self.field_from_path(header, handle, path)
|
||||
if field is None:
|
||||
if default is not ...:
|
||||
return default
|
||||
else:
|
||||
raise KeyError("%r not found in %r (%r)" %
|
||||
(path, [f.dna_name.name_only for f in self.fields], self.dna_type_id))
|
||||
|
||||
dna_type = field.dna_type
|
||||
dna_name = field.dna_name
|
||||
|
||||
if dna_name.is_pointer:
|
||||
return DNA_IO.read_pointer(handle, header)
|
||||
elif dna_type.dna_type_id == b'int':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_int(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_int(handle, header)
|
||||
elif dna_type.dna_type_id == b'short':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_short(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_short(handle, header)
|
||||
elif dna_type.dna_type_id == b'uint64_t':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_ulong(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_ulong(handle, header)
|
||||
elif dna_type.dna_type_id == b'float':
|
||||
if dna_name.array_size > 1:
|
||||
return [DNA_IO.read_float(handle, header) for i in range(dna_name.array_size)]
|
||||
return DNA_IO.read_float(handle, header)
|
||||
elif dna_type.dna_type_id == b'char':
|
||||
if use_str:
|
||||
if use_nil:
|
||||
return DNA_IO.read_string0(handle, dna_name.array_size)
|
||||
else:
|
||||
return DNA_IO.read_string(handle, dna_name.array_size)
|
||||
else:
|
||||
if use_nil:
|
||||
return DNA_IO.read_bytes0(handle, dna_name.array_size)
|
||||
else:
|
||||
return DNA_IO.read_bytes(handle, dna_name.array_size)
|
||||
else:
|
||||
raise NotImplementedError("%r exists but isn't pointer, can't resolve field %r" %
|
||||
(path, dna_name.name_only), dna_name, dna_type)
|
||||
|
||||
def field_set(self, header, handle, path, value):
|
||||
assert(type(path) == bytes)
|
||||
|
||||
field = self.field_from_path(header, handle, path)
|
||||
if field is None:
|
||||
raise KeyError("%r not found in %r" %
|
||||
(path, [f.dna_name.name_only for f in self.fields]))
|
||||
|
||||
dna_type = field.dna_type
|
||||
dna_name = field.dna_name
|
||||
|
||||
if dna_type.dna_type_id == b'char':
|
||||
if type(value) is str:
|
||||
return DNA_IO.write_string(handle, value, dna_name.array_size)
|
||||
else:
|
||||
return DNA_IO.write_bytes(handle, value, dna_name.array_size)
|
||||
elif dna_type.dna_type_id == b'int':
|
||||
DNA_IO.write_int(handle, header, value)
|
||||
else:
|
||||
raise NotImplementedError("Setting %r is not yet supported for %r" %
|
||||
(dna_type, dna_name), dna_name, dna_type)
|
||||
|
||||
|
||||
class DNA_IO:
|
||||
"""
|
||||
Module like class, for read-write utility functions.
|
||||
|
||||
Only stores static methods & constants.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
raise RuntimeError("%s should not be instantiated" % cls)
|
||||
|
||||
@staticmethod
|
||||
def write_string(handle, astring, fieldlen):
|
||||
assert(isinstance(astring, str))
|
||||
if len(astring) >= fieldlen:
|
||||
stringw = astring[0:fieldlen]
|
||||
else:
|
||||
stringw = astring + '\0'
|
||||
handle.write(stringw.encode('utf-8'))
|
||||
|
||||
@staticmethod
|
||||
def write_bytes(handle, astring, fieldlen):
|
||||
assert(isinstance(astring, (bytes, bytearray)))
|
||||
if len(astring) >= fieldlen:
|
||||
stringw = astring[0:fieldlen]
|
||||
else:
|
||||
stringw = astring + b'\0'
|
||||
|
||||
handle.write(stringw)
|
||||
|
||||
@staticmethod
|
||||
def read_bytes(handle, length):
|
||||
data = handle.read(length)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def read_bytes0(handle, length):
|
||||
data = handle.read(length)
|
||||
return DNA_IO.read_data0(data)
|
||||
|
||||
@staticmethod
|
||||
def read_string(handle, length):
|
||||
return DNA_IO.read_bytes(handle, length).decode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def read_string0(handle, length):
|
||||
return DNA_IO.read_bytes0(handle, length).decode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def read_data0_offset(data, offset):
|
||||
add = data.find(b'\0', offset) - offset
|
||||
return data[offset:offset + add]
|
||||
|
||||
@staticmethod
|
||||
def read_data0(data):
|
||||
add = data.find(b'\0')
|
||||
return data[:add]
|
||||
|
||||
USHORT = struct.Struct(b'<H'), struct.Struct(b'>H')
|
||||
|
||||
@staticmethod
|
||||
def read_ushort(handle, fileheader):
|
||||
st = DNA_IO.USHORT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
SSHORT = struct.Struct(b'<h'), struct.Struct(b'>h')
|
||||
|
||||
@staticmethod
|
||||
def read_short(handle, fileheader):
|
||||
st = DNA_IO.SSHORT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
UINT = struct.Struct(b'<I'), struct.Struct(b'>I')
|
||||
|
||||
@staticmethod
|
||||
def read_uint(handle, fileheader):
|
||||
st = DNA_IO.UINT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
SINT = struct.Struct(b'<i'), struct.Struct(b'>i')
|
||||
|
||||
@staticmethod
|
||||
def read_int(handle, fileheader):
|
||||
st = DNA_IO.SINT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
@staticmethod
|
||||
def write_int(handle, fileheader, value):
|
||||
assert isinstance(value, int), 'value must be int, but is %r: %r' % (type(value), value)
|
||||
st = DNA_IO.SINT[fileheader.endian_index]
|
||||
to_write = st.pack(value)
|
||||
handle.write(to_write)
|
||||
|
||||
FLOAT = struct.Struct(b'<f'), struct.Struct(b'>f')
|
||||
|
||||
@staticmethod
|
||||
def read_float(handle, fileheader):
|
||||
st = DNA_IO.FLOAT[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
ULONG = struct.Struct(b'<Q'), struct.Struct(b'>Q')
|
||||
|
||||
@staticmethod
|
||||
def read_ulong(handle, fileheader):
|
||||
st = DNA_IO.ULONG[fileheader.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
|
||||
@staticmethod
|
||||
def read_pointer(handle, header):
|
||||
"""
|
||||
reads an pointer from a file handle
|
||||
the pointer size is given by the header (BlendFileHeader)
|
||||
"""
|
||||
if header.pointer_size == 4:
|
||||
st = DNA_IO.UINT[header.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
||||
if header.pointer_size == 8:
|
||||
st = DNA_IO.ULONG[header.endian_index]
|
||||
return st.unpack(handle.read(st.size))[0]
|
@@ -1,3 +1,21 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""HTTP Cache management.
|
||||
|
||||
This module configures a cached session for the Requests package.
|
||||
|
748
blender_cloud/flamenco/__init__.py
Normal file
748
blender_cloud/flamenco/__init__.py
Normal file
@@ -0,0 +1,748 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""Flamenco interface.
|
||||
|
||||
The preferences are managed blender.py, the rest of the Flamenco-specific stuff is here.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
import typing
|
||||
|
||||
|
||||
if "bpy" in locals():
|
||||
import importlib
|
||||
|
||||
try:
|
||||
bam_interface = importlib.reload(bam_interface)
|
||||
sdk = importlib.reload(sdk)
|
||||
except NameError:
|
||||
from . import bam_interface, sdk
|
||||
else:
|
||||
from . import bam_interface, sdk
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.types import AddonPreferences, Operator, WindowManager, Scene, PropertyGroup
|
||||
from bpy.props import StringProperty, EnumProperty, PointerProperty, BoolProperty, IntProperty
|
||||
|
||||
from .. import async_loop, pillar
|
||||
from ..utils import pyside_cache, redraw
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Global flag used to determine whether panels etc. can be drawn.
|
||||
flamenco_is_active = False
|
||||
|
||||
|
||||
@pyside_cache('manager')
|
||||
def available_managers(self, context):
|
||||
"""Returns the list of items used by a manager-selector EnumProperty."""
|
||||
|
||||
from ..blender import preferences
|
||||
|
||||
mngrs = preferences().flamenco_manager.available_managers
|
||||
if not mngrs:
|
||||
return [('', 'No managers available in your Blender Cloud', '')]
|
||||
return [(p['_id'], p['name'], '') for p in mngrs]
|
||||
|
||||
|
||||
class FlamencoManagerGroup(PropertyGroup):
|
||||
manager = EnumProperty(
|
||||
items=available_managers,
|
||||
name='Flamenco Manager',
|
||||
description='Which Flamenco Manager to use for jobs')
|
||||
|
||||
status = EnumProperty(
|
||||
items=[
|
||||
('NONE', 'NONE', 'We have done nothing at all yet'),
|
||||
('IDLE', 'IDLE', 'User requested something, which is done, and we are now idle'),
|
||||
('FETCHING', 'FETCHING', 'Fetching available Flamenco managers from Blender Cloud'),
|
||||
],
|
||||
name='status',
|
||||
update=redraw)
|
||||
|
||||
# List of managers is stored in 'available_managers' ID property,
|
||||
# because I don't know how to store a variable list of strings in a proper RNA property.
|
||||
@property
|
||||
def available_managers(self) -> list:
|
||||
return self.get('available_managers', [])
|
||||
|
||||
@available_managers.setter
|
||||
def available_managers(self, new_managers):
|
||||
self['available_managers'] = new_managers
|
||||
|
||||
|
||||
class FlamencoPollMixin:
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return flamenco_is_active
|
||||
|
||||
|
||||
class FLAMENCO_OT_fmanagers(async_loop.AsyncModalOperatorMixin,
|
||||
pillar.AuthenticatedPillarOperatorMixin,
|
||||
FlamencoPollMixin,
|
||||
Operator):
|
||||
"""Fetches the Flamenco Managers available to the user"""
|
||||
bl_idname = 'flamenco.managers'
|
||||
bl_label = 'Fetch available Flamenco Managers'
|
||||
|
||||
stop_upon_exception = True
|
||||
log = logging.getLogger('%s.FLAMENCO_OT_fmanagers' % __name__)
|
||||
|
||||
@property
|
||||
def mypref(self) -> FlamencoManagerGroup:
|
||||
from ..blender import preferences
|
||||
|
||||
return preferences().flamenco_manager
|
||||
|
||||
async def async_execute(self, context):
|
||||
if not await self.authenticate(context):
|
||||
return
|
||||
|
||||
from .sdk import Manager
|
||||
from ..pillar import pillar_call
|
||||
|
||||
self.log.info('Going to fetch managers for user %s', self.user_id)
|
||||
|
||||
self.mypref.status = 'FETCHING'
|
||||
managers = await pillar_call(Manager.all)
|
||||
|
||||
# We need to convert to regular dicts before storing in ID properties.
|
||||
# Also don't store more properties than we need.
|
||||
as_list = [{'_id': p['_id'], 'name': p['name']} for p in managers['_items']]
|
||||
|
||||
self.mypref.available_managers = as_list
|
||||
self.quit()
|
||||
|
||||
def quit(self):
|
||||
self.mypref.status = 'IDLE'
|
||||
super().quit()
|
||||
|
||||
|
||||
class FLAMENCO_OT_render(async_loop.AsyncModalOperatorMixin,
|
||||
pillar.AuthenticatedPillarOperatorMixin,
|
||||
FlamencoPollMixin,
|
||||
Operator):
|
||||
"""Performs a Blender render on Flamenco."""
|
||||
bl_idname = 'flamenco.render'
|
||||
bl_label = 'Render on Flamenco'
|
||||
bl_description = __doc__.rstrip('.')
|
||||
|
||||
stop_upon_exception = True
|
||||
log = logging.getLogger('%s.FLAMENCO_OT_render' % __name__)
|
||||
|
||||
async def async_execute(self, context):
|
||||
# Refuse to start if the file hasn't been saved. It's okay if
|
||||
# it's dirty, but we do need a filename and a location.
|
||||
if not os.path.exists(context.blend_data.filepath):
|
||||
self.report({'ERROR'}, 'Please save your Blend file before using '
|
||||
'the Blender Cloud addon.')
|
||||
self.quit()
|
||||
return
|
||||
|
||||
if not await self.authenticate(context):
|
||||
return
|
||||
|
||||
import pillarsdk.exceptions
|
||||
from .sdk import Manager
|
||||
from ..pillar import pillar_call
|
||||
from ..blender import preferences
|
||||
|
||||
scene = context.scene
|
||||
|
||||
# Save to a different file, specifically for Flamenco.
|
||||
context.window_manager.flamenco_status = 'PACKING'
|
||||
filepath = await self._save_blendfile(context)
|
||||
|
||||
# Determine where the render output will be stored.
|
||||
render_output = render_output_path(context, filepath)
|
||||
if render_output is None:
|
||||
self.report({'ERROR'}, 'Current file is outside of project path.')
|
||||
self.quit()
|
||||
return
|
||||
self.log.info('Will output render files to %s', render_output)
|
||||
|
||||
# BAM-pack the files to the destination directory.
|
||||
outfile, missing_sources = await self.bam_pack(filepath)
|
||||
if not outfile:
|
||||
return
|
||||
|
||||
# Fetch Manager for doing path replacement.
|
||||
self.log.info('Going to fetch manager %s', self.user_id)
|
||||
prefs = preferences()
|
||||
|
||||
manager_id = prefs.flamenco_manager.manager
|
||||
try:
|
||||
manager = await pillar_call(Manager.find, manager_id)
|
||||
except pillarsdk.exceptions.ResourceNotFound:
|
||||
self.report({'ERROR'}, 'Manager %s not found, refresh your managers in '
|
||||
'the Blender Cloud add-on settings.' % manager_id)
|
||||
self.quit()
|
||||
return
|
||||
|
||||
# Create the job at Flamenco Server.
|
||||
context.window_manager.flamenco_status = 'COMMUNICATING'
|
||||
|
||||
frame_range = scene.flamenco_render_frame_range.strip() or scene_frame_range(context)
|
||||
settings = {'blender_cmd': '{blender}',
|
||||
'chunk_size': scene.flamenco_render_fchunk_size,
|
||||
'filepath': manager.replace_path(outfile),
|
||||
'frames': frame_range,
|
||||
'render_output': manager.replace_path(render_output),
|
||||
}
|
||||
|
||||
# Add extra settings specific to the job type
|
||||
if scene.flamenco_render_job_type == 'blender-render-progressive':
|
||||
if scene.cycles.progressive == 'BRANCHED_PATH':
|
||||
samples = scene.cycles.aa_samples
|
||||
else:
|
||||
samples = scene.cycles.samples
|
||||
|
||||
if scene.cycles.use_square_samples:
|
||||
samples **= 2
|
||||
|
||||
settings['cycles_num_chunks'] = scene.flamenco_render_schunk_count
|
||||
settings['cycles_sample_count'] = samples
|
||||
settings['format'] = 'EXR'
|
||||
|
||||
try:
|
||||
job_info = await create_job(self.user_id,
|
||||
prefs.project.project,
|
||||
manager_id,
|
||||
scene.flamenco_render_job_type,
|
||||
settings,
|
||||
'Render %s' % filepath.name,
|
||||
priority=scene.flamenco_render_job_priority)
|
||||
except Exception as ex:
|
||||
self.report({'ERROR'}, 'Error creating Flamenco job: %s' % ex)
|
||||
self.quit()
|
||||
return
|
||||
|
||||
# Store the job ID in a file in the output dir.
|
||||
with open(str(outfile.parent / 'jobinfo.json'), 'w', encoding='utf8') as outfile:
|
||||
import json
|
||||
|
||||
job_info['missing_files'] = [str(mf) for mf in missing_sources]
|
||||
json.dump(job_info, outfile, sort_keys=True, indent=4)
|
||||
|
||||
# We can now remove the local copy we made with bpy.ops.wm.save_as_mainfile().
|
||||
# Strictly speaking we can already remove it after the BAM-pack, but it may come in
|
||||
# handy in case of failures.
|
||||
try:
|
||||
self.log.info('Removing temporary file %s', filepath)
|
||||
filepath.unlink()
|
||||
except Exception as ex:
|
||||
self.report({'ERROR'}, 'Unable to remove file: %s' % ex)
|
||||
self.quit()
|
||||
return
|
||||
|
||||
if prefs.flamenco_open_browser_after_submit:
|
||||
import webbrowser
|
||||
from urllib.parse import urljoin
|
||||
from ..blender import PILLAR_WEB_SERVER_URL
|
||||
|
||||
url = urljoin(PILLAR_WEB_SERVER_URL, '/flamenco/jobs/%s/redir' % job_info['_id'])
|
||||
webbrowser.open_new_tab(url)
|
||||
|
||||
# Do a final report.
|
||||
if missing_sources:
|
||||
names = (ms.name for ms in missing_sources)
|
||||
self.report({'WARNING'}, 'Flamenco job created with missing files: %s' %
|
||||
'; '.join(names))
|
||||
else:
|
||||
self.report({'INFO'}, 'Flamenco job created.')
|
||||
|
||||
self.quit()
|
||||
|
||||
async def _save_blendfile(self, context):
|
||||
"""Save to a different file, specifically for Flamenco.
|
||||
|
||||
We shouldn't overwrite the artist's file.
|
||||
We can compress, since this file won't be managed by SVN and doesn't need diffability.
|
||||
"""
|
||||
|
||||
render = context.scene.render
|
||||
|
||||
# Remember settings we need to restore after saving.
|
||||
old_use_file_extension = render.use_file_extension
|
||||
old_use_overwrite = render.use_overwrite
|
||||
old_use_placeholder = render.use_placeholder
|
||||
|
||||
try:
|
||||
|
||||
# The file extension should be determined by the render settings, not necessarily
|
||||
# by the setttings in the output panel.
|
||||
render.use_file_extension = True
|
||||
|
||||
# Rescheduling should not overwrite existing frames.
|
||||
render.use_overwrite = False
|
||||
render.use_placeholder = False
|
||||
|
||||
filepath = Path(context.blend_data.filepath).with_suffix('.flamenco.blend')
|
||||
self.log.info('Saving copy to temporary file %s', filepath)
|
||||
bpy.ops.wm.save_as_mainfile(filepath=str(filepath),
|
||||
compress=True,
|
||||
copy=True)
|
||||
finally:
|
||||
# Restore the settings we changed, even after an exception.
|
||||
render.use_file_extension = old_use_file_extension
|
||||
render.use_overwrite = old_use_overwrite
|
||||
render.use_placeholder = old_use_placeholder
|
||||
|
||||
return filepath
|
||||
|
||||
def quit(self):
|
||||
super().quit()
|
||||
bpy.context.window_manager.flamenco_status = 'IDLE'
|
||||
|
||||
async def bam_pack(self, filepath: Path) -> (typing.Optional[Path], typing.List[Path]):
|
||||
"""BAM-packs the blendfile to the destination directory.
|
||||
|
||||
Returns the path of the destination blend file.
|
||||
|
||||
:param filepath: the blend file to pack (i.e. the current blend file)
|
||||
:returns: the destination blend file, or None if there were errors BAM-packing,
|
||||
and a list of missing paths.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from ..blender import preferences
|
||||
|
||||
prefs = preferences()
|
||||
|
||||
# Create a unique directory that is still more or less identifyable.
|
||||
# This should work better than a random ID.
|
||||
# BAM doesn't like output directories that end in '.blend'.
|
||||
unique_dir = '%s-%s-%s' % (datetime.now().isoformat('-').replace(':', ''),
|
||||
self.db_user['username'],
|
||||
filepath.stem)
|
||||
outdir = Path(prefs.flamenco_job_file_path) / unique_dir
|
||||
outfile = outdir / filepath.name
|
||||
|
||||
exclusion_filter = prefs.flamenco_exclude_filter or None
|
||||
|
||||
try:
|
||||
outdir.mkdir(parents=True)
|
||||
except Exception as ex:
|
||||
self.log.exception('Unable to create output path %s', outdir)
|
||||
self.report({'ERROR'}, 'Unable to create output path: %s' % ex)
|
||||
self.quit()
|
||||
return None, []
|
||||
|
||||
try:
|
||||
missing_sources = await bam_interface.bam_copy(filepath, outfile, exclusion_filter)
|
||||
except bam_interface.CommandExecutionError as ex:
|
||||
self.log.exception('Unable to execute BAM pack')
|
||||
self.report({'ERROR'}, 'Unable to execute BAM pack: %s' % ex)
|
||||
self.quit()
|
||||
return None, []
|
||||
|
||||
return outfile, missing_sources
|
||||
|
||||
|
||||
def scene_frame_range(context) -> str:
|
||||
"""Returns the frame range string for the current scene."""
|
||||
|
||||
s = context.scene
|
||||
return '%i-%i' % (s.frame_start, s.frame_end)
|
||||
|
||||
|
||||
class FLAMENCO_OT_scene_to_frame_range(FlamencoPollMixin, Operator):
|
||||
"""Sets the scene frame range as the Flamenco render frame range."""
|
||||
bl_idname = 'flamenco.scene_to_frame_range'
|
||||
bl_label = 'Sets the scene frame range as the Flamenco render frame range'
|
||||
bl_description = __doc__.rstrip('.')
|
||||
|
||||
def execute(self, context):
|
||||
context.scene.flamenco_render_frame_range = scene_frame_range(context)
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class FLAMENCO_OT_copy_files(Operator,
|
||||
FlamencoPollMixin,
|
||||
async_loop.AsyncModalOperatorMixin):
|
||||
"""Uses BAM to copy the current blendfile + dependencies to the target directory."""
|
||||
bl_idname = 'flamenco.copy_files'
|
||||
bl_label = 'Copy files to target'
|
||||
bl_description = __doc__.rstrip('.')
|
||||
|
||||
stop_upon_exception = True
|
||||
|
||||
async def async_execute(self, context):
|
||||
from pathlib import Path
|
||||
from ..blender import preferences
|
||||
|
||||
context.window_manager.flamenco_status = 'PACKING'
|
||||
exclusion_filter = preferences().flamenco_exclude_filter or None
|
||||
|
||||
missing_sources = await bam_interface.bam_copy(
|
||||
Path(context.blend_data.filepath),
|
||||
Path(preferences().flamenco_job_file_path),
|
||||
exclusion_filter
|
||||
)
|
||||
|
||||
if missing_sources:
|
||||
names = (ms.name for ms in missing_sources)
|
||||
self.report({'ERROR'}, 'Missing source files: %s' % '; '.join(names))
|
||||
|
||||
self.quit()
|
||||
|
||||
def quit(self):
|
||||
super().quit()
|
||||
bpy.context.window_manager.flamenco_status = 'IDLE'
|
||||
|
||||
|
||||
class FLAMENCO_OT_explore_file_path(FlamencoPollMixin,
|
||||
Operator):
|
||||
"""Opens the Flamenco job storage path in a file explorer.
|
||||
|
||||
If the path cannot be found, this operator tries to open its parent.
|
||||
"""
|
||||
|
||||
bl_idname = 'flamenco.explore_file_path'
|
||||
bl_label = 'Open in file explorer'
|
||||
bl_description = __doc__.rstrip('.')
|
||||
|
||||
path = StringProperty(name='Path', description='Path to explore', subtype='DIR_PATH')
|
||||
|
||||
def execute(self, context):
|
||||
import platform
|
||||
import pathlib
|
||||
|
||||
# Possibly open a parent of the path
|
||||
to_open = pathlib.Path(self.path)
|
||||
while to_open.parent != to_open: # while we're not at the root
|
||||
if to_open.exists():
|
||||
break
|
||||
to_open = to_open.parent
|
||||
else:
|
||||
self.report({'ERROR'}, 'Unable to open %s or any of its parents.' % self.path)
|
||||
return {'CANCELLED'}
|
||||
to_open = str(to_open)
|
||||
|
||||
if platform.system() == "Windows":
|
||||
import os
|
||||
os.startfile(to_open)
|
||||
|
||||
elif platform.system() == "Darwin":
|
||||
import subprocess
|
||||
subprocess.Popen(["open", to_open])
|
||||
|
||||
else:
|
||||
import subprocess
|
||||
subprocess.Popen(["xdg-open", to_open])
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
async def create_job(user_id: str,
|
||||
project_id: str,
|
||||
manager_id: str,
|
||||
job_type: str,
|
||||
job_settings: dict,
|
||||
job_name: str = None,
|
||||
*,
|
||||
priority: int = 50,
|
||||
job_description: str = None) -> dict:
|
||||
"""Creates a render job at Flamenco Server, returning the job object as dictionary."""
|
||||
|
||||
import json
|
||||
from .sdk import Job
|
||||
from ..pillar import pillar_call
|
||||
|
||||
job_attrs = {
|
||||
'status': 'queued',
|
||||
'priority': priority,
|
||||
'name': job_name,
|
||||
'settings': job_settings,
|
||||
'job_type': job_type,
|
||||
'user': user_id,
|
||||
'manager': manager_id,
|
||||
'project': project_id,
|
||||
}
|
||||
if job_description:
|
||||
job_attrs['description'] = job_description
|
||||
|
||||
log.info('Going to create Flamenco job:\n%s',
|
||||
json.dumps(job_attrs, indent=4, sort_keys=True))
|
||||
|
||||
job = Job(job_attrs)
|
||||
await pillar_call(job.create)
|
||||
|
||||
log.info('Job created succesfully: %s', job._id)
|
||||
return job.to_dict()
|
||||
|
||||
|
||||
def is_image_type(render_output_type: str) -> bool:
|
||||
"""Determines whether the render output type is an image (True) or video (False)."""
|
||||
|
||||
# This list is taken from rna_scene.c:273, rna_enum_image_type_items.
|
||||
video_types = {'AVI_JPEG', 'AVI_RAW', 'FRAMESERVER', 'FFMPEG', 'QUICKTIME'}
|
||||
return render_output_type not in video_types
|
||||
|
||||
|
||||
@functools.lru_cache(1)
|
||||
def _render_output_path(
|
||||
local_project_path: str,
|
||||
blend_filepath: Path,
|
||||
flamenco_job_output_strip_components: int,
|
||||
flamenco_job_output_path: str,
|
||||
render_image_format: str,
|
||||
flamenco_render_frame_range: str,
|
||||
) -> typing.Optional[PurePath]:
|
||||
"""Cached version of render_output_path()
|
||||
|
||||
This ensures that redraws of the Flamenco Render and Add-on preferences panels
|
||||
is fast.
|
||||
"""
|
||||
|
||||
try:
|
||||
project_path = Path(bpy.path.abspath(local_project_path)).resolve()
|
||||
except FileNotFoundError:
|
||||
# Path.resolve() will raise a FileNotFoundError if the project path doesn't exist.
|
||||
return None
|
||||
|
||||
try:
|
||||
blend_abspath = blend_filepath.resolve().absolute()
|
||||
except FileNotFoundError:
|
||||
# Path.resolve() will raise a FileNotFoundError if the path doesn't exist.
|
||||
return None
|
||||
|
||||
try:
|
||||
proj_rel = blend_abspath.parent.relative_to(project_path)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
rel_parts = proj_rel.parts[flamenco_job_output_strip_components:]
|
||||
output_top = Path(flamenco_job_output_path)
|
||||
|
||||
# Strip off '.flamenco' too; we use 'xxx.flamenco.blend' as job file, but
|
||||
# don't want to have all the output paths ending in '.flamenco'.
|
||||
stem = blend_filepath.stem
|
||||
if stem.endswith('.flamenco'):
|
||||
stem = stem[:-9]
|
||||
|
||||
dir_components = output_top.joinpath(*rel_parts) / stem
|
||||
|
||||
# Blender will have to append the file extensions by itself.
|
||||
if is_image_type(render_image_format):
|
||||
return dir_components / '######'
|
||||
return dir_components / flamenco_render_frame_range
|
||||
|
||||
|
||||
def render_output_path(context, filepath: Path = None) -> typing.Optional[PurePath]:
|
||||
"""Returns the render output path to be sent to Flamenco.
|
||||
|
||||
:param context: the Blender context (used to find Flamenco preferences etc.)
|
||||
:param filepath: the Path of the blend file to render, or None for the current file.
|
||||
|
||||
Returns None when the current blend file is outside the project path.
|
||||
"""
|
||||
|
||||
from ..blender import preferences
|
||||
|
||||
scene = context.scene
|
||||
prefs = preferences()
|
||||
|
||||
if filepath is None:
|
||||
filepath = Path(context.blend_data.filepath)
|
||||
|
||||
return _render_output_path(
|
||||
prefs.cloud_project_local_path,
|
||||
filepath,
|
||||
prefs.flamenco_job_output_strip_components,
|
||||
prefs.flamenco_job_output_path,
|
||||
scene.render.image_settings.file_format,
|
||||
scene.flamenco_render_frame_range,
|
||||
)
|
||||
|
||||
|
||||
class FLAMENCO_PT_render(bpy.types.Panel, FlamencoPollMixin):
|
||||
bl_label = "Flamenco Render"
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_context = "render"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
from ..blender import preferences
|
||||
|
||||
prefs = preferences()
|
||||
|
||||
labeled_row = layout.split(0.25, align=True)
|
||||
labeled_row.label('Job Type:')
|
||||
labeled_row.prop(context.scene, 'flamenco_render_job_type', text='')
|
||||
|
||||
labeled_row = layout.split(0.25, align=True)
|
||||
labeled_row.label('Frame Range:')
|
||||
prop_btn_row = labeled_row.row(align=True)
|
||||
prop_btn_row.prop(context.scene, 'flamenco_render_frame_range', text='')
|
||||
prop_btn_row.operator('flamenco.scene_to_frame_range', text='', icon='ARROW_LEFTRIGHT')
|
||||
|
||||
layout.prop(context.scene, 'flamenco_render_job_priority')
|
||||
layout.prop(context.scene, 'flamenco_render_fchunk_size')
|
||||
|
||||
if getattr(context.scene, 'flamenco_render_job_type', None) == 'blender-render-progressive':
|
||||
layout.prop(context.scene, 'flamenco_render_schunk_count')
|
||||
|
||||
readonly_stuff = layout.column(align=True)
|
||||
labeled_row = readonly_stuff.split(0.25, align=True)
|
||||
labeled_row.label('Storage:')
|
||||
prop_btn_row = labeled_row.row(align=True)
|
||||
prop_btn_row.label(prefs.flamenco_job_file_path)
|
||||
props = prop_btn_row.operator(FLAMENCO_OT_explore_file_path.bl_idname,
|
||||
text='', icon='DISK_DRIVE')
|
||||
props.path = prefs.flamenco_job_file_path
|
||||
|
||||
labeled_row = readonly_stuff.split(0.25, align=True)
|
||||
labeled_row.label('Output:')
|
||||
prop_btn_row = labeled_row.row(align=True)
|
||||
render_output = render_output_path(context)
|
||||
|
||||
if render_output is None:
|
||||
prop_btn_row.label('Unable to render with Flamenco, outside of project directory.')
|
||||
else:
|
||||
prop_btn_row.label(str(render_output))
|
||||
props = prop_btn_row.operator(FLAMENCO_OT_explore_file_path.bl_idname,
|
||||
text='', icon='DISK_DRIVE')
|
||||
props.path = str(render_output.parent)
|
||||
|
||||
flamenco_status = context.window_manager.flamenco_status
|
||||
if flamenco_status == 'IDLE':
|
||||
layout.operator(FLAMENCO_OT_render.bl_idname,
|
||||
text='Render on Flamenco',
|
||||
icon='RENDER_ANIMATION')
|
||||
elif flamenco_status == 'PACKING':
|
||||
layout.label('Flamenco is packing your file + dependencies')
|
||||
elif flamenco_status == 'COMMUNICATING':
|
||||
layout.label('Communicating with Flamenco Server')
|
||||
else:
|
||||
layout.label('Unknown Flamenco status %s' % flamenco_status)
|
||||
|
||||
|
||||
def activate():
|
||||
"""Activates draw callbacks, menu items etc. for Flamenco."""
|
||||
|
||||
global flamenco_is_active
|
||||
log.info('Activating Flamenco')
|
||||
flamenco_is_active = True
|
||||
_render_output_path.cache_clear()
|
||||
|
||||
|
||||
def deactivate():
|
||||
"""Deactivates draw callbacks, menu items etc. for Flamenco."""
|
||||
|
||||
global flamenco_is_active
|
||||
log.info('Deactivating Flamenco')
|
||||
flamenco_is_active = False
|
||||
_render_output_path.cache_clear()
|
||||
|
||||
|
||||
def register():
|
||||
from ..utils import redraw
|
||||
|
||||
bpy.utils.register_class(FlamencoManagerGroup)
|
||||
bpy.utils.register_class(FLAMENCO_OT_fmanagers)
|
||||
bpy.utils.register_class(FLAMENCO_OT_render)
|
||||
bpy.utils.register_class(FLAMENCO_OT_scene_to_frame_range)
|
||||
bpy.utils.register_class(FLAMENCO_OT_copy_files)
|
||||
bpy.utils.register_class(FLAMENCO_OT_explore_file_path)
|
||||
bpy.utils.register_class(FLAMENCO_PT_render)
|
||||
|
||||
scene = bpy.types.Scene
|
||||
scene.flamenco_render_fchunk_size = IntProperty(
|
||||
name='Frame Chunk Size',
|
||||
description='Maximum number of frames to render per task',
|
||||
min=1,
|
||||
default=1,
|
||||
)
|
||||
scene.flamenco_render_schunk_count = IntProperty(
|
||||
name='Number of Sample Chunks',
|
||||
description='Number of Cycles samples chunks to use per frame',
|
||||
min=2,
|
||||
default=3,
|
||||
soft_max=10,
|
||||
)
|
||||
scene.flamenco_render_frame_range = StringProperty(
|
||||
name='Frame Range',
|
||||
description='Frames to render, in "printer range" notation'
|
||||
)
|
||||
scene.flamenco_render_job_type = EnumProperty(
|
||||
name='Job Type',
|
||||
items=[
|
||||
('blender-render', 'Simple Render', 'Simple frame-by-frame render'),
|
||||
('blender-render-progressive', 'Progressive Render',
|
||||
'Each frame is rendered multiple times with different Cycles sample chunks, then combined'),
|
||||
]
|
||||
)
|
||||
|
||||
scene.flamenco_render_job_priority = IntProperty(
|
||||
name='Job Priority',
|
||||
min=0,
|
||||
default=50,
|
||||
max=100,
|
||||
description='Higher numbers mean higher priority'
|
||||
)
|
||||
|
||||
bpy.types.WindowManager.flamenco_status = EnumProperty(
|
||||
items=[
|
||||
('IDLE', 'IDLE', 'Not doing anything.'),
|
||||
('PACKING', 'PACKING', 'BAM-packing all dependencies.'),
|
||||
('COMMUNICATING', 'COMMUNICATING', 'Communicating with Flamenco Server.'),
|
||||
],
|
||||
name='flamenco_status',
|
||||
default='IDLE',
|
||||
description='Current status of the Flamenco add-on',
|
||||
update=redraw)
|
||||
|
||||
|
||||
def unregister():
|
||||
deactivate()
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
try:
|
||||
del bpy.types.Scene.flamenco_render_fchunk_size
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del bpy.types.Scene.flamenco_render_schunk_count
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del bpy.types.Scene.flamenco_render_frame_range
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del bpy.types.Scene.flamenco_render_job_type
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del bpy.types.Scene.flamenco_render_job_priority
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del bpy.types.WindowManager.flamenco_status
|
||||
except AttributeError:
|
||||
pass
|
185
blender_cloud/flamenco/bam_interface.py
Normal file
185
blender_cloud/flamenco/bam_interface.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""BAM packing interface for Flamenco."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import typing
|
||||
|
||||
# Timeout of the BAM subprocess, in seconds.
|
||||
SUBPROC_READLINE_TIMEOUT = 600
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CommandExecutionError(Exception):
|
||||
"""Raised when there was an error executing a BAM command."""
|
||||
pass
|
||||
|
||||
|
||||
def wheel_pythonpath_278() -> str:
|
||||
"""Returns the value of a PYTHONPATH environment variable needed to run BAM from its wheel file.
|
||||
|
||||
Workaround for Blender 2.78c not having io_blend_utils.pythonpath()
|
||||
"""
|
||||
|
||||
import os
|
||||
from ..wheels import wheel_filename
|
||||
|
||||
# Find the wheel to run.
|
||||
wheelpath = wheel_filename('blender_bam')
|
||||
|
||||
log.info('Using wheel %s to run BAM-Pack', wheelpath)
|
||||
|
||||
# Update the PYTHONPATH to include that wheel.
|
||||
existing_pypath = os.environ.get('PYTHONPATH', '')
|
||||
if existing_pypath:
|
||||
return os.pathsep.join((existing_pypath, wheelpath))
|
||||
|
||||
return wheelpath
|
||||
|
||||
|
||||
async def bam_copy(base_blendfile: Path, target_blendfile: Path,
|
||||
exclusion_filter: str) -> typing.List[Path]:
|
||||
"""Uses BAM to copy the given file and dependencies to the target blendfile.
|
||||
|
||||
Due to the way blendfile_pack.py is programmed/structured, we cannot import it
|
||||
and call a function; it has to be run in a subprocess.
|
||||
|
||||
:raises: asyncio.CanceledError if the task was cancelled.
|
||||
:raises: asyncio.TimeoutError if reading a line from the BAM process timed out.
|
||||
:raises: CommandExecutionError if the subprocess failed or output invalid UTF-8.
|
||||
:returns: a list of missing sources; hopefully empty.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import shlex
|
||||
import subprocess
|
||||
|
||||
import bpy
|
||||
import io_blend_utils
|
||||
|
||||
args = [
|
||||
bpy.app.binary_path_python,
|
||||
'-m', 'bam.pack',
|
||||
'--input', str(base_blendfile),
|
||||
'--output', str(target_blendfile),
|
||||
'--mode', 'FILE',
|
||||
]
|
||||
|
||||
if exclusion_filter:
|
||||
args.extend(['--exclude', exclusion_filter])
|
||||
|
||||
cmd_to_log = ' '.join(shlex.quote(s) for s in args)
|
||||
log.info('Executing %s', cmd_to_log)
|
||||
|
||||
# Workaround for Blender 2.78c not having io_blend_utils.pythonpath()
|
||||
if hasattr(io_blend_utils, 'pythonpath'):
|
||||
pythonpath = io_blend_utils.pythonpath()
|
||||
else:
|
||||
pythonpath = wheel_pythonpath_278()
|
||||
|
||||
env = {
|
||||
'PYTHONPATH': pythonpath,
|
||||
# Needed on Windows because http://bugs.python.org/issue8557
|
||||
'PATH': os.environ['PATH'],
|
||||
}
|
||||
if 'SYSTEMROOT' in os.environ: # Windows http://bugs.python.org/issue20614
|
||||
env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*args,
|
||||
env=env,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
missing_sources = []
|
||||
|
||||
try:
|
||||
while not proc.stdout.at_eof():
|
||||
line = await asyncio.wait_for(proc.stdout.readline(),
|
||||
SUBPROC_READLINE_TIMEOUT)
|
||||
if not line:
|
||||
# EOF received, so let's bail.
|
||||
break
|
||||
|
||||
try:
|
||||
line = line.decode('utf8')
|
||||
except UnicodeDecodeError as ex:
|
||||
raise CommandExecutionError('Command produced non-UTF8 output, '
|
||||
'aborting: %s' % ex)
|
||||
|
||||
line = line.rstrip()
|
||||
if 'source missing:' in line:
|
||||
path = parse_missing_source(line)
|
||||
missing_sources.append(path)
|
||||
log.warning('Source is missing: %s', path)
|
||||
|
||||
log.info(' %s', line)
|
||||
finally:
|
||||
if proc.returncode is None:
|
||||
# Always wait for the process, to avoid zombies.
|
||||
try:
|
||||
proc.kill()
|
||||
except ProcessLookupError:
|
||||
# The process is already stopped, so killing is impossible. That's ok.
|
||||
log.debug("The process was already stopped, aborting is impossible. That's ok.")
|
||||
await proc.wait()
|
||||
log.info('The process stopped with status code %i', proc.returncode)
|
||||
|
||||
if proc.returncode:
|
||||
raise CommandExecutionError('Process stopped with status %i' % proc.returncode)
|
||||
|
||||
return missing_sources
|
||||
|
||||
|
||||
def parse_missing_source(line: str) -> Path:
|
||||
r"""Parses a "missing source" line into a pathlib.Path.
|
||||
|
||||
>>> parse_missing_source(r" source missing: b'D\xc3\xaffficult \xc3\x9cTF-8 filename'")
|
||||
PosixPath('Dïfficult ÜTF-8 filename')
|
||||
>>> parse_missing_source(r" source missing: b'D\xfffficult Win1252 f\xeflen\xe6me'")
|
||||
PosixPath('D<EFBFBD>fficult Win1252 f<>len<65>me')
|
||||
"""
|
||||
|
||||
_, missing_source = line.split(': ', 1)
|
||||
missing_source_as_bytes = parse_byte_literal(missing_source.strip())
|
||||
|
||||
# The file could originate from any platform, so UTF-8 and the current platform's
|
||||
# filesystem encodings are just guesses.
|
||||
try:
|
||||
missing_source = missing_source_as_bytes.decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
import sys
|
||||
try:
|
||||
missing_source = missing_source_as_bytes.decode(sys.getfilesystemencoding())
|
||||
except UnicodeDecodeError:
|
||||
missing_source = missing_source_as_bytes.decode('ascii', errors='replace')
|
||||
|
||||
path = Path(missing_source)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def parse_byte_literal(bytes_literal: str) -> bytes:
|
||||
r"""Parses a repr(bytes) output into a bytes object.
|
||||
|
||||
>>> parse_byte_literal(r"b'D\xc3\xaffficult \xc3\x9cTF-8 filename'")
|
||||
b'D\xc3\xaffficult \xc3\x9cTF-8 filename'
|
||||
>>> parse_byte_literal(r"b'D\xeffficult Win1252 f\xeflen\xe6me'")
|
||||
b'D\xeffficult Win1252 f\xeflen\xe6me'
|
||||
"""
|
||||
|
||||
# Some very basic assertions to make sure we have a proper bytes literal.
|
||||
assert bytes_literal[0] == "b"
|
||||
assert bytes_literal[1] in {'"', "'"}
|
||||
assert bytes_literal[-1] == bytes_literal[1]
|
||||
|
||||
import ast
|
||||
return ast.literal_eval(bytes_literal)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
54
blender_cloud/flamenco/sdk.py
Normal file
54
blender_cloud/flamenco/sdk.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import functools
|
||||
import pathlib
|
||||
|
||||
from pillarsdk.resource import List, Find, Create
|
||||
|
||||
|
||||
class Manager(List, Find):
|
||||
"""Manager class wrapping the REST nodes endpoint"""
|
||||
path = 'flamenco/managers'
|
||||
PurePlatformPath = pathlib.PurePath
|
||||
|
||||
@functools.lru_cache()
|
||||
def _sorted_path_replacements(self) -> list:
|
||||
import platform
|
||||
|
||||
if self.path_replacement is None:
|
||||
return []
|
||||
|
||||
items = self.path_replacement.to_dict().items()
|
||||
|
||||
def by_length(item):
|
||||
return -len(item[0]), item[0]
|
||||
|
||||
this_platform = platform.system().lower()
|
||||
return [(varname, platform_replacements[this_platform])
|
||||
for varname, platform_replacements in sorted(items, key=by_length)
|
||||
if this_platform in platform_replacements]
|
||||
|
||||
def replace_path(self, some_path: pathlib.PurePath) -> str:
|
||||
"""Performs path variable replacement.
|
||||
|
||||
Tries to find platform-specific path prefixes, and replaces them with
|
||||
variables.
|
||||
"""
|
||||
|
||||
for varname, path in self._sorted_path_replacements():
|
||||
replacement = self.PurePlatformPath(path)
|
||||
try:
|
||||
relpath = some_path.relative_to(replacement)
|
||||
except ValueError:
|
||||
# Not relative to each other, so no replacement possible
|
||||
continue
|
||||
|
||||
replacement_root = self.PurePlatformPath('{%s}' % varname)
|
||||
return (replacement_root / relpath).as_posix()
|
||||
|
||||
return some_path.as_posix()
|
||||
|
||||
|
||||
class Job(List, Find, Create):
|
||||
"""Job class wrapping the REST nodes endpoint
|
||||
"""
|
||||
path = 'flamenco/jobs'
|
||||
ensure_query_projections = {'project': 1}
|
@@ -1,729 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# Copyright (C) 2014 Blender Aid
|
||||
# http://www.blendearaid.com
|
||||
# blenderaid@gmail.com
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import bpy
|
||||
import bgl
|
||||
import blf
|
||||
import os
|
||||
|
||||
from bpy.types import AddonPreferences
|
||||
from bpy.props import (BoolProperty, EnumProperty,
|
||||
FloatProperty, FloatVectorProperty,
|
||||
IntProperty, StringProperty)
|
||||
|
||||
import pillarsdk
|
||||
from . import async_loop, pillar, cache
|
||||
|
||||
icon_width = 128
|
||||
icon_height = 128
|
||||
target_item_width = 400
|
||||
target_item_height = 128
|
||||
|
||||
library_path = '/tmp'
|
||||
library_icons_path = os.path.join(os.path.dirname(__file__), "icons")
|
||||
|
||||
|
||||
class UpNode(pillarsdk.Node):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self['_id'] = 'UP'
|
||||
self['node_type'] = 'UP'
|
||||
|
||||
|
||||
class MenuItem:
|
||||
"""GUI menu item for the 3D View GUI."""
|
||||
|
||||
icon_margin_x = 4
|
||||
icon_margin_y = 4
|
||||
text_margin_x = 6
|
||||
|
||||
text_height = 16
|
||||
text_width = 72
|
||||
|
||||
DEFAULT_ICONS = {
|
||||
'FOLDER': os.path.join(library_icons_path, 'folder.png'),
|
||||
'SPINNER': os.path.join(library_icons_path, 'spinner.png'),
|
||||
}
|
||||
|
||||
SUPPORTED_NODE_TYPES = {'UP', 'group_texture', 'texture'}
|
||||
|
||||
def __init__(self, node, file_desc, thumb_path: str, label_text):
|
||||
if node['node_type'] not in self.SUPPORTED_NODE_TYPES:
|
||||
raise TypeError('Node of type %r not supported; supported are %r.' % (
|
||||
node.group_texture, self.SUPPORTED_NODE_TYPES))
|
||||
|
||||
self.node = node # pillarsdk.Node, contains 'node_type' key to indicate type
|
||||
self.file_desc = file_desc # pillarsdk.File object, or None if a 'folder' node.
|
||||
self.label_text = label_text
|
||||
self._thumb_path = ''
|
||||
self.icon = None
|
||||
self._is_folder = node['node_type'] == 'group_texture' or isinstance(node, UpNode)
|
||||
|
||||
self.thumb_path = thumb_path
|
||||
|
||||
# Updated when drawing the image
|
||||
self.x = 0
|
||||
self.y = 0
|
||||
self.width = 0
|
||||
self.height = 0
|
||||
|
||||
@property
|
||||
def thumb_path(self) -> str:
|
||||
return self._thumb_path
|
||||
|
||||
@thumb_path.setter
|
||||
def thumb_path(self, new_thumb_path: str):
|
||||
self._thumb_path = self.DEFAULT_ICONS.get(new_thumb_path, new_thumb_path)
|
||||
if self._thumb_path:
|
||||
self.icon = bpy.data.images.load(filepath=self._thumb_path)
|
||||
else:
|
||||
self.icon = None
|
||||
|
||||
@property
|
||||
def node_uuid(self) -> str:
|
||||
return self.node['_id']
|
||||
|
||||
def update(self, node, file_desc, thumb_path: str, label_text):
|
||||
# We can get updated information about our Node, but a MenuItem should
|
||||
# always represent one node, and it shouldn't be shared between nodes.
|
||||
if self.node_uuid != node['_id']:
|
||||
raise ValueError("Don't change the node ID this MenuItem reflects, "
|
||||
"just create a new one.")
|
||||
self.node = node
|
||||
self.file_desc = file_desc # pillarsdk.File object, or None if a 'folder' node.
|
||||
self.thumb_path = thumb_path
|
||||
self.label_text = label_text
|
||||
|
||||
@property
|
||||
def is_folder(self) -> bool:
|
||||
return self._is_folder
|
||||
|
||||
def update_placement(self, x, y, width, height):
|
||||
"""Use OpenGL to draw this one menu item."""
|
||||
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.width = width
|
||||
self.height = height
|
||||
|
||||
def draw(self, highlighted: bool):
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
if highlighted:
|
||||
bgl.glColor4f(0.555, 0.555, 0.555, 0.8)
|
||||
else:
|
||||
bgl.glColor4f(0.447, 0.447, 0.447, 0.8)
|
||||
|
||||
bgl.glRectf(self.x, self.y, self.x + self.width, self.y + self.height)
|
||||
|
||||
texture = self.icon
|
||||
err = texture.gl_load(filter=bgl.GL_NEAREST, mag=bgl.GL_NEAREST)
|
||||
assert not err, 'OpenGL error: %i' % err
|
||||
|
||||
bgl.glColor4f(0.0, 0.0, 1.0, 0.5)
|
||||
# bgl.glLineWidth(1.5)
|
||||
|
||||
# ------ TEXTURE ---------#
|
||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, texture.bindcode[0])
|
||||
bgl.glEnable(bgl.GL_TEXTURE_2D)
|
||||
bgl.glBlendFunc(bgl.GL_SRC_ALPHA, bgl.GL_ONE_MINUS_SRC_ALPHA)
|
||||
|
||||
bgl.glColor4f(1, 1, 1, 1)
|
||||
bgl.glBegin(bgl.GL_QUADS)
|
||||
bgl.glTexCoord2d(0, 0)
|
||||
bgl.glVertex2d(self.x + self.icon_margin_x, self.y)
|
||||
bgl.glTexCoord2d(0, 1)
|
||||
bgl.glVertex2d(self.x + self.icon_margin_x, self.y + icon_height)
|
||||
bgl.glTexCoord2d(1, 1)
|
||||
bgl.glVertex2d(self.x + self.icon_margin_x + icon_width, self.y + icon_height)
|
||||
bgl.glTexCoord2d(1, 0)
|
||||
bgl.glVertex2d(self.x + self.icon_margin_x + icon_width, self.y)
|
||||
bgl.glEnd()
|
||||
bgl.glDisable(bgl.GL_TEXTURE_2D)
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
texture.gl_free()
|
||||
|
||||
# draw some text
|
||||
font_id = 0
|
||||
blf.position(font_id,
|
||||
self.x + self.icon_margin_x + icon_width + self.text_margin_x,
|
||||
self.y + icon_height * 0.5 - 0.25 * self.text_height, 0)
|
||||
blf.size(font_id, self.text_height, self.text_width)
|
||||
blf.draw(font_id, self.label_text)
|
||||
|
||||
def hits(self, mouse_x: int, mouse_y: int) -> bool:
|
||||
return self.x < mouse_x < self.x + self.width and self.y < mouse_y < self.y + self.height
|
||||
|
||||
|
||||
class BlenderCloudBrowser(bpy.types.Operator):
|
||||
bl_idname = 'pillar.browser'
|
||||
bl_label = 'Blender Cloud Texture Browser'
|
||||
|
||||
_draw_handle = None
|
||||
|
||||
_state = 'INITIALIZING'
|
||||
|
||||
project_uuid = '5672beecc0261b2005ed1a33' # Blender Cloud project UUID
|
||||
node = None # The Node object we're currently showing, or None if we're at the project top.
|
||||
node_uuid = '' # Blender Cloud node UUID we're currently showing, i.e. None-safe self.node['_id']
|
||||
|
||||
# This contains a stack of Node objects that lead up to the currently browsed node.
|
||||
# This allows us to display the "up" item.
|
||||
path_stack = []
|
||||
|
||||
async_task = None # asyncio task for fetching thumbnails
|
||||
signalling_future = None # asyncio future for signalling that we want to cancel everything.
|
||||
timer = None
|
||||
log = logging.getLogger('%s.BlenderCloudBrowser' % __name__)
|
||||
|
||||
_menu_item_lock = threading.Lock()
|
||||
current_path = ''
|
||||
current_display_content = []
|
||||
loaded_images = set()
|
||||
thumbnails_cache = ''
|
||||
maximized_area = False
|
||||
|
||||
mouse_x = 0
|
||||
mouse_y = 0
|
||||
|
||||
def invoke(self, context, event):
|
||||
# Refuse to start if the file hasn't been saved.
|
||||
if not context.blend_data.is_saved:
|
||||
self.report({'ERROR'}, 'Please save your Blend file before using '
|
||||
'the Blender Cloud addon.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
wm = context.window_manager
|
||||
self.project_uuid = wm.blender_cloud_project
|
||||
self.node_uuid = wm.blender_cloud_node
|
||||
self.path_stack = []
|
||||
|
||||
self.thumbnails_cache = cache.cache_directory('thumbnails')
|
||||
self.mouse_x = event.mouse_x
|
||||
self.mouse_y = event.mouse_y
|
||||
|
||||
# See if we have to maximize the current area
|
||||
if not context.screen.show_fullscreen:
|
||||
self.maximized_area = True
|
||||
bpy.ops.screen.screen_full_area(use_hide_panels=True)
|
||||
|
||||
# Add the region OpenGL drawing callback
|
||||
# draw in view space with 'POST_VIEW' and 'PRE_VIEW'
|
||||
self._draw_handle = context.space_data.draw_handler_add(
|
||||
self.draw_menu, (context,), 'WINDOW', 'POST_PIXEL')
|
||||
|
||||
self.current_display_content = []
|
||||
self.loaded_images = set()
|
||||
self.check_credentials()
|
||||
|
||||
context.window_manager.modal_handler_add(self)
|
||||
self.timer = context.window_manager.event_timer_add(1 / 30, context.window)
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def modal(self, context, event):
|
||||
task = self.async_task
|
||||
if self._state != 'EXCEPTION' and task.done() and not task.cancelled():
|
||||
ex = task.exception()
|
||||
if ex is not None:
|
||||
self._state = 'EXCEPTION'
|
||||
self.log.error('Exception while running task: %s', ex)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
if self._state == 'QUIT':
|
||||
self._finish(context)
|
||||
return {'FINISHED'}
|
||||
|
||||
if event.type == 'TAB' and event.value == 'RELEASE':
|
||||
self.log.info('Ensuring async loop is running')
|
||||
async_loop.ensure_async_loop()
|
||||
|
||||
if event.type == 'TIMER':
|
||||
context.area.tag_redraw()
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
if 'MOUSE' in event.type:
|
||||
context.area.tag_redraw()
|
||||
self.mouse_x = event.mouse_x
|
||||
self.mouse_y = event.mouse_y
|
||||
|
||||
if self._state == 'BROWSING' and event.type == 'LEFTMOUSE' and event.value == 'RELEASE':
|
||||
selected = self.get_clicked()
|
||||
|
||||
if selected is None:
|
||||
# No item clicked, ignore it.
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
if selected.is_folder:
|
||||
self.descend_node(selected.node)
|
||||
else:
|
||||
if selected.file_desc is None:
|
||||
# This can happen when the thumbnail information isn't loaded yet.
|
||||
# Just ignore the click for now.
|
||||
# TODO: think of a way to handle this properly.
|
||||
return {'RUNNING_MODAL'}
|
||||
self.handle_item_selection(context, selected)
|
||||
|
||||
elif event.type in {'RIGHTMOUSE', 'ESC'}:
|
||||
self._finish(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def check_credentials(self):
|
||||
self._state = 'CHECKING_CREDENTIALS'
|
||||
self.log.debug('Checking credentials')
|
||||
self._new_async_task(self._check_credentials())
|
||||
|
||||
async def _check_credentials(self):
|
||||
"""Checks credentials with Pillar, and if ok goes to the BROWSING state."""
|
||||
|
||||
try:
|
||||
await pillar.check_pillar_credentials()
|
||||
except pillar.CredentialsNotSyncedError:
|
||||
self.log.info('Credentials not synced, re-syncing automatically.')
|
||||
else:
|
||||
self.log.info('Credentials okay, browsing assets.')
|
||||
await self.async_download_previews()
|
||||
return
|
||||
|
||||
try:
|
||||
await pillar.refresh_pillar_credentials()
|
||||
except pillar.UserNotLoggedInError:
|
||||
self.error('User not logged in on Blender ID.')
|
||||
else:
|
||||
self.log.info('Credentials refreshed and ok, browsing assets.')
|
||||
await self.async_download_previews()
|
||||
return
|
||||
|
||||
raise pillar.UserNotLoggedInError()
|
||||
# self._new_async_task(self._check_credentials())
|
||||
|
||||
def descend_node(self, node):
|
||||
"""Descends the node hierarchy by visiting this node.
|
||||
|
||||
Also keeps track of the current node, so that we know where the "up" button should go.
|
||||
"""
|
||||
|
||||
# Going up or down?
|
||||
if self.path_stack and isinstance(node, UpNode):
|
||||
self.log.debug('Going up, pop the stack; pre-pop stack is %r', self.path_stack)
|
||||
node = self.path_stack.pop()
|
||||
|
||||
else:
|
||||
# Going down, keep track of where we were (project top-level is None)
|
||||
self.path_stack.append(self.node)
|
||||
self.log.debug('Going up, push the stack; post-push stack is %r', self.path_stack)
|
||||
|
||||
# Set 'current' to the given node
|
||||
self.node_uuid = node['_id'] if node else None
|
||||
self.node = node
|
||||
self.browse_assets()
|
||||
|
||||
def _stop_async_task(self):
|
||||
self.log.debug('Stopping async task')
|
||||
if self.async_task is None:
|
||||
self.log.debug('No async task, trivially stopped')
|
||||
return
|
||||
|
||||
# Signal that we want to stop.
|
||||
self.async_task.cancel()
|
||||
if not self.signalling_future.done():
|
||||
self.log.info("Signalling that we want to cancel anything that's running.")
|
||||
self.signalling_future.cancel()
|
||||
|
||||
# Wait until the asynchronous task is done.
|
||||
if not self.async_task.done():
|
||||
self.log.info("blocking until async task is done.")
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(self.async_task)
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
return
|
||||
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
self.async_task.result() # This re-raises any exception of the task.
|
||||
except asyncio.CancelledError:
|
||||
self.log.info('Asynchronous task was cancelled')
|
||||
except Exception:
|
||||
self.log.exception("Exception from asynchronous task")
|
||||
|
||||
def _finish(self, context):
|
||||
self.log.debug('Finishing the modal operator')
|
||||
self._stop_async_task()
|
||||
self.clear_images()
|
||||
|
||||
context.space_data.draw_handler_remove(self._draw_handle, 'WINDOW')
|
||||
context.window_manager.event_timer_remove(self.timer)
|
||||
|
||||
if self.maximized_area:
|
||||
bpy.ops.screen.screen_full_area(use_hide_panels=True)
|
||||
|
||||
context.area.tag_redraw()
|
||||
self.log.debug('Modal operator finished')
|
||||
|
||||
def clear_images(self):
|
||||
"""Removes all images we loaded from Blender's memory."""
|
||||
|
||||
for image in bpy.data.images:
|
||||
if image.filepath_raw not in self.loaded_images:
|
||||
continue
|
||||
|
||||
image.user_clear()
|
||||
bpy.data.images.remove(image)
|
||||
|
||||
self.loaded_images.clear()
|
||||
self.current_display_content.clear()
|
||||
|
||||
def add_menu_item(self, *args) -> MenuItem:
|
||||
menu_item = MenuItem(*args)
|
||||
|
||||
# Just make this thread-safe to be on the safe side.
|
||||
with self._menu_item_lock:
|
||||
self.current_display_content.append(menu_item)
|
||||
self.loaded_images.add(menu_item.icon.filepath_raw)
|
||||
|
||||
return menu_item
|
||||
|
||||
def update_menu_item(self, node, *args) -> MenuItem:
|
||||
node_uuid = node['_id']
|
||||
|
||||
# Just make this thread-safe to be on the safe side.
|
||||
with self._menu_item_lock:
|
||||
for menu_item in self.current_display_content:
|
||||
if menu_item.node_uuid == node_uuid:
|
||||
menu_item.update(node, *args)
|
||||
self.loaded_images.add(menu_item.icon.filepath_raw)
|
||||
break
|
||||
else:
|
||||
raise ValueError('Unable to find MenuItem(node_uuid=%r)' % node_uuid)
|
||||
|
||||
async def async_download_previews(self):
|
||||
self._state = 'BROWSING'
|
||||
|
||||
thumbnails_directory = self.thumbnails_cache
|
||||
self.log.info('Asynchronously downloading previews to %r', thumbnails_directory)
|
||||
self.clear_images()
|
||||
|
||||
def thumbnail_loading(node, texture_node):
|
||||
self.add_menu_item(node, None, 'SPINNER', texture_node['name'])
|
||||
|
||||
def thumbnail_loaded(node, file_desc, thumb_path):
|
||||
self.update_menu_item(node, file_desc, thumb_path, file_desc['filename'])
|
||||
|
||||
# Download either by group_texture node UUID or by project UUID (which
|
||||
# shows all top-level nodes)
|
||||
if self.node_uuid:
|
||||
self.log.debug('Getting subnodes for parent node %r', self.node_uuid)
|
||||
children = await pillar.get_nodes(parent_node_uuid=self.node_uuid,
|
||||
node_type='group_textures')
|
||||
|
||||
# Make sure we can go up again.
|
||||
if self.path_stack:
|
||||
self.add_menu_item(UpNode(), None, 'FOLDER', '.. up ..')
|
||||
elif self.project_uuid:
|
||||
self.log.debug('Getting subnodes for project node %r', self.project_uuid)
|
||||
children = await pillar.get_nodes(self.project_uuid, '')
|
||||
|
||||
else:
|
||||
# TODO: add "nothing here" icon and trigger re-draw
|
||||
self.log.warning("Not node UUID and no project UUID, I can't do anything!")
|
||||
return
|
||||
|
||||
# Download all child nodes
|
||||
self.log.debug('Iterating over child nodes of %r', self.node_uuid)
|
||||
for child in children:
|
||||
# print(' - %(_id)s = %(name)s' % child)
|
||||
self.add_menu_item(child, None, 'FOLDER', child['name'])
|
||||
|
||||
# There are only sub-nodes at the project level, no texture nodes,
|
||||
# so we won't have to bother looking for textures.
|
||||
if not self.node_uuid:
|
||||
return
|
||||
|
||||
directory = os.path.join(thumbnails_directory, self.project_uuid, self.node_uuid)
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
|
||||
self.log.debug('Fetching texture thumbnails for node %r', self.node_uuid)
|
||||
await pillar.fetch_texture_thumbs(self.node_uuid, 's', directory,
|
||||
thumbnail_loading=thumbnail_loading,
|
||||
thumbnail_loaded=thumbnail_loaded,
|
||||
future=self.signalling_future)
|
||||
|
||||
def browse_assets(self):
|
||||
self.log.debug('Browsing assets at project %r node %r', self.project_uuid, self.node_uuid)
|
||||
self._new_async_task(self.async_download_previews())
|
||||
|
||||
def _new_async_task(self, async_task: asyncio.coroutine, future: asyncio.Future=None):
|
||||
"""Stops the currently running async task, and starts another one."""
|
||||
|
||||
self.log.debug('Setting up a new task %r, so any existing task must be stopped', async_task)
|
||||
self._stop_async_task()
|
||||
|
||||
# Download the previews asynchronously.
|
||||
self.signalling_future = future or asyncio.Future()
|
||||
self.async_task = asyncio.ensure_future(async_task)
|
||||
self.log.debug('Created new task %r', self.async_task)
|
||||
|
||||
# Start the async manager so everything happens.
|
||||
async_loop.ensure_async_loop()
|
||||
|
||||
def draw_menu(self, context):
|
||||
"""Draws the GUI with OpenGL."""
|
||||
|
||||
drawers = {
|
||||
'CHECKING_CREDENTIALS': self._draw_checking_credentials,
|
||||
'BROWSING': self._draw_browser,
|
||||
'DOWNLOADING_TEXTURE': self._draw_downloading,
|
||||
'EXCEPTION': self._draw_exception,
|
||||
}
|
||||
|
||||
if self._state in drawers:
|
||||
drawer = drawers[self._state]
|
||||
drawer(context)
|
||||
|
||||
# For debugging: draw the state
|
||||
font_id = 0
|
||||
bgl.glColor4f(1.0, 1.0, 1.0, 1.0)
|
||||
blf.size(font_id, 20, 72)
|
||||
blf.position(font_id, 5, 5, 0)
|
||||
blf.draw(font_id, self._state)
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
@staticmethod
|
||||
def _window_region(context):
|
||||
window_regions = [region
|
||||
for region in context.area.regions
|
||||
if region.type == 'WINDOW']
|
||||
return window_regions[0]
|
||||
|
||||
def _draw_browser(self, context):
|
||||
"""OpenGL drawing code for the BROWSING state."""
|
||||
|
||||
margin_x = 5
|
||||
margin_y = 5
|
||||
padding_x = 5
|
||||
|
||||
window_region = self._window_region(context)
|
||||
content_width = window_region.width - margin_x * 2
|
||||
content_height = window_region.height - margin_y * 2
|
||||
|
||||
content_x = margin_x
|
||||
content_y = context.area.height - margin_y - target_item_height
|
||||
|
||||
col_count = content_width // target_item_width
|
||||
|
||||
item_width = (content_width - (col_count * padding_x)) / col_count
|
||||
item_height = target_item_height
|
||||
|
||||
block_width = item_width + padding_x
|
||||
block_height = item_height + margin_y
|
||||
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
bgl.glColor4f(0.0, 0.0, 0.0, 0.6)
|
||||
bgl.glRectf(0, 0, window_region.width, window_region.height)
|
||||
|
||||
if self.current_display_content:
|
||||
for item_idx, item in enumerate(self.current_display_content):
|
||||
x = content_x + (item_idx % col_count) * block_width
|
||||
y = content_y - (item_idx // col_count) * block_height
|
||||
|
||||
item.update_placement(x, y, item_width, item_height)
|
||||
item.draw(highlighted=item.hits(self.mouse_x, self.mouse_y))
|
||||
else:
|
||||
font_id = 0
|
||||
text = "Communicating with Blender Cloud"
|
||||
bgl.glColor4f(1.0, 1.0, 1.0, 1.0)
|
||||
blf.size(font_id, 20, 72)
|
||||
text_width, text_height = blf.dimensions(font_id, text)
|
||||
blf.position(font_id,
|
||||
content_x + content_width * 0.5 - text_width * 0.5,
|
||||
content_y - content_height * 0.3 + text_height * 0.5, 0)
|
||||
blf.draw(font_id, text)
|
||||
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
# bgl.glColor4f(0.0, 0.0, 0.0, 1.0)
|
||||
|
||||
def _draw_downloading(self, context):
|
||||
"""OpenGL drawing code for the DOWNLOADING_TEXTURE state."""
|
||||
|
||||
self._draw_text_on_colour(context,
|
||||
'Downloading texture from Blender Cloud',
|
||||
(0.0, 0.0, 0.2, 0.6))
|
||||
|
||||
def _draw_checking_credentials(self, context):
|
||||
"""OpenGL drawing code for the CHECKING_CREDENTIALS state."""
|
||||
|
||||
self._draw_text_on_colour(context,
|
||||
'Checking login credentials',
|
||||
(0.0, 0.0, 0.2, 0.6))
|
||||
|
||||
def _draw_text_on_colour(self, context, text, bgcolour):
|
||||
content_height, content_width = self._window_size(context)
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
bgl.glColor4f(*bgcolour)
|
||||
bgl.glRectf(0, 0, content_width, content_height)
|
||||
|
||||
font_id = 0
|
||||
bgl.glColor4f(1.0, 1.0, 1.0, 1.0)
|
||||
blf.size(font_id, 20, 72)
|
||||
text_width, text_height = blf.dimensions(font_id, text)
|
||||
|
||||
blf.position(font_id,
|
||||
content_width * 0.5 - text_width * 0.5,
|
||||
content_height * 0.7 + text_height * 0.5, 0)
|
||||
blf.draw(font_id, text)
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
def _window_size(self, context):
|
||||
window_region = self._window_region(context)
|
||||
content_width = window_region.width
|
||||
content_height = window_region.height
|
||||
return content_height, content_width
|
||||
|
||||
def _draw_exception(self, context):
|
||||
"""OpenGL drawing code for the EXCEPTION state."""
|
||||
|
||||
import textwrap
|
||||
|
||||
content_height, content_width = self._window_size(context)
|
||||
|
||||
bgl.glEnable(bgl.GL_BLEND)
|
||||
bgl.glColor4f(0.2, 0.0, 0.0, 0.6)
|
||||
bgl.glRectf(0, 0, content_width, content_height)
|
||||
|
||||
font_id = 0
|
||||
ex = self.async_task.exception()
|
||||
if isinstance(ex, pillar.UserNotLoggedInError):
|
||||
ex_msg = 'You are not logged in on Blender ID. Please log in at User Preferences, ' \
|
||||
'System, Blender ID.'
|
||||
else:
|
||||
ex_msg = str(ex)
|
||||
if not ex_msg:
|
||||
ex_msg = str(type(ex))
|
||||
text = "An error occurred:\n%s" % ex_msg
|
||||
lines = textwrap.wrap(text)
|
||||
|
||||
bgl.glColor4f(1.0, 1.0, 1.0, 1.0)
|
||||
blf.size(font_id, 20, 72)
|
||||
_, text_height = blf.dimensions(font_id, 'yhBp')
|
||||
|
||||
def position(line_nr):
|
||||
blf.position(font_id,
|
||||
content_width * 0.1,
|
||||
content_height * 0.8 - line_nr * text_height, 0)
|
||||
|
||||
for line_idx, line in enumerate(lines):
|
||||
position(line_idx)
|
||||
blf.draw(font_id, line)
|
||||
bgl.glDisable(bgl.GL_BLEND)
|
||||
|
||||
def get_clicked(self) -> MenuItem:
|
||||
|
||||
for item in self.current_display_content:
|
||||
if item.hits(self.mouse_x, self.mouse_y):
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
def handle_item_selection(self, context, item: MenuItem):
|
||||
"""Called when the user clicks on a menu item that doesn't represent a folder."""
|
||||
|
||||
self.clear_images()
|
||||
self._state = 'DOWNLOADING_TEXTURE'
|
||||
|
||||
node_path_components = [node['name'] for node in self.path_stack if node is not None]
|
||||
local_path_components = [self.project_uuid] + node_path_components + [self.node['name']]
|
||||
|
||||
top_texture_directory = bpy.path.abspath(context.scene.local_texture_dir)
|
||||
local_path = os.path.join(top_texture_directory, *local_path_components)
|
||||
meta_path = os.path.join(top_texture_directory, '.blender_cloud')
|
||||
|
||||
self.log.info('Downloading texture %r to %s', item.node_uuid, local_path)
|
||||
self.log.debug('Metadata will be stored at %s', meta_path)
|
||||
|
||||
file_paths = []
|
||||
|
||||
def texture_downloading(file_path, file_desc, *args):
|
||||
self.log.info('Texture downloading to %s', file_path)
|
||||
|
||||
def texture_downloaded(file_path, file_desc, *args):
|
||||
self.log.info('Texture downloaded to %r.', file_path)
|
||||
image_dblock = bpy.data.images.load(filepath=file_path)
|
||||
image_dblock['bcloud_file_uuid'] = file_desc['_id']
|
||||
image_dblock['bcloud_texture_node_uuid'] = item.node_uuid
|
||||
file_paths.append(file_path)
|
||||
|
||||
def texture_download_completed(_):
|
||||
self.log.info('Texture download complete, inspect:\n%s', '\n'.join(file_paths))
|
||||
self._state = 'QUIT'
|
||||
|
||||
signalling_future = asyncio.Future()
|
||||
self._new_async_task(pillar.download_texture(item.node, local_path,
|
||||
metadata_directory=meta_path,
|
||||
texture_loading=texture_downloading,
|
||||
texture_loaded=texture_downloaded,
|
||||
future=signalling_future))
|
||||
self.async_task.add_done_callback(texture_download_completed)
|
||||
|
||||
|
||||
# store keymaps here to access after registration
|
||||
addon_keymaps = []
|
||||
|
||||
|
||||
def menu_draw(self, context):
|
||||
layout = self.layout
|
||||
layout.separator()
|
||||
layout.operator(BlenderCloudBrowser.bl_idname, icon='MOD_SCREW')
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(BlenderCloudBrowser)
|
||||
# bpy.types.INFO_MT_mesh_add.append(menu_draw)
|
||||
|
||||
# handle the keymap
|
||||
wm = bpy.context.window_manager
|
||||
kc = wm.keyconfigs.addon
|
||||
if not kc:
|
||||
print('No addon key configuration space found, so no custom hotkeys added.')
|
||||
return
|
||||
|
||||
km = kc.keymaps.new(name='Screen')
|
||||
kmi = km.keymap_items.new('pillar.browser', 'A', 'PRESS', ctrl=True, shift=True, alt=True)
|
||||
addon_keymaps.append((km, kmi))
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(BlenderCloudBrowser)
|
||||
|
||||
# handle the keymap
|
||||
for km, kmi in addon_keymaps:
|
||||
km.keymap_items.remove(kmi)
|
||||
addon_keymaps.clear()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
50
blender_cloud/home_project.py
Normal file
50
blender_cloud/home_project.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import logging
|
||||
|
||||
import pillarsdk
|
||||
from pillarsdk import exceptions as sdk_exceptions
|
||||
from .pillar import pillar_call
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
HOME_PROJECT_ENDPOINT = '/bcloud/home-project'
|
||||
|
||||
|
||||
async def get_home_project(params=None) -> pillarsdk.Project:
|
||||
"""Returns the home project."""
|
||||
|
||||
log.debug('Getting home project')
|
||||
try:
|
||||
return await pillar_call(pillarsdk.Project.find_from_endpoint,
|
||||
HOME_PROJECT_ENDPOINT, params=params)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
log.warning('Access to the home project was denied. '
|
||||
'Double-check that you are logged in with valid BlenderID credentials.')
|
||||
raise
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
log.warning('No home project available.')
|
||||
raise
|
||||
|
||||
|
||||
async def get_home_project_id() -> str:
|
||||
"""Returns just the ID of the home project."""
|
||||
|
||||
home_proj = await get_home_project({'projection': {'_id': 1}})
|
||||
home_proj_id = home_proj['_id']
|
||||
return home_proj_id
|
BIN
blender_cloud/icons/icon-cloud.png
Normal file
BIN
blender_cloud/icons/icon-cloud.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.6 KiB |
334
blender_cloud/image_sharing.py
Normal file
334
blender_cloud/image_sharing.py
Normal file
@@ -0,0 +1,334 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import logging
|
||||
import os.path
|
||||
import tempfile
|
||||
import datetime
|
||||
|
||||
import bpy
|
||||
import pillarsdk
|
||||
from pillarsdk import exceptions as sdk_exceptions
|
||||
from .pillar import pillar_call
|
||||
from . import async_loop, pillar, home_project, blender
|
||||
|
||||
REQUIRES_ROLES_FOR_IMAGE_SHARING = {'subscriber', 'demo'}
|
||||
IMAGE_SHARING_GROUP_NODE_NAME = 'Image sharing'
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def find_image_sharing_group_id(home_project_id, user_id):
|
||||
# Find the top-level image sharing group node.
|
||||
try:
|
||||
share_group, created = await pillar.find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': IMAGE_SHARING_GROUP_NODE_NAME},
|
||||
additional_create_props={
|
||||
'user': user_id,
|
||||
'properties': {},
|
||||
},
|
||||
projection={'_id': 1},
|
||||
may_create=True)
|
||||
except pillar.PillarError:
|
||||
log.exception('Pillar error caught')
|
||||
raise pillar.PillarError('Unable to find image sharing folder on the Cloud')
|
||||
|
||||
return share_group['_id']
|
||||
|
||||
|
||||
class PILLAR_OT_image_share(pillar.PillarOperatorMixin,
|
||||
async_loop.AsyncModalOperatorMixin,
|
||||
bpy.types.Operator):
|
||||
bl_idname = 'pillar.image_share'
|
||||
bl_label = 'Share an image/screenshot via Blender Cloud'
|
||||
bl_description = 'Uploads an image for sharing via Blender Cloud'
|
||||
|
||||
log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
|
||||
home_project_id = None
|
||||
home_project_url = 'home'
|
||||
share_group_id = None # top-level share group node ID
|
||||
user_id = None
|
||||
|
||||
target = bpy.props.EnumProperty(
|
||||
items=[
|
||||
('FILE', 'File', 'Share an image file'),
|
||||
('DATABLOCK', 'Datablock', 'Share an image datablock'),
|
||||
('SCREENSHOT', 'Screenshot', 'Share a screenshot'),
|
||||
],
|
||||
name='target',
|
||||
default='SCREENSHOT')
|
||||
|
||||
name = bpy.props.StringProperty(name='name',
|
||||
description='File or datablock name to sync')
|
||||
|
||||
screenshot_show_multiview = bpy.props.BoolProperty(
|
||||
name='screenshot_show_multiview',
|
||||
description='Enable Multi-View',
|
||||
default=False)
|
||||
|
||||
screenshot_use_multiview = bpy.props.BoolProperty(
|
||||
name='screenshot_use_multiview',
|
||||
description='Use Multi-View',
|
||||
default=False)
|
||||
|
||||
screenshot_full = bpy.props.BoolProperty(
|
||||
name='screenshot_full',
|
||||
description='Full Screen, Capture the whole window (otherwise only capture the active area)',
|
||||
default=False)
|
||||
|
||||
def invoke(self, context, event):
|
||||
# Do a quick test on datablock dirtyness. If it's not packed and dirty,
|
||||
# the user should save it first.
|
||||
if self.target == 'DATABLOCK':
|
||||
if not self.name:
|
||||
self.report({'ERROR'}, 'No name given of the datablock to share.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
datablock = bpy.data.images[self.name]
|
||||
if datablock.type == 'IMAGE' and datablock.is_dirty and not datablock.packed_file:
|
||||
self.report({'ERROR'}, 'Datablock is dirty, save it first.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
return async_loop.AsyncModalOperatorMixin.invoke(self, context, event)
|
||||
|
||||
async def async_execute(self, context):
|
||||
"""Entry point of the asynchronous operator."""
|
||||
|
||||
self.report({'INFO'}, 'Communicating with Blender Cloud')
|
||||
|
||||
try:
|
||||
# Refresh credentials
|
||||
try:
|
||||
db_user = await self.check_credentials(context, REQUIRES_ROLES_FOR_IMAGE_SHARING)
|
||||
self.user_id = db_user['_id']
|
||||
self.log.debug('Found user ID: %s', self.user_id)
|
||||
except pillar.NotSubscribedToCloudError:
|
||||
self.log.exception('User not subscribed to cloud.')
|
||||
self.report({'ERROR'}, 'Please subscribe to the Blender Cloud.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
except pillar.UserNotLoggedInError:
|
||||
self.log.exception('Error checking/refreshing credentials.')
|
||||
self.report({'ERROR'}, 'Please log in on Blender ID first.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
# Find the home project.
|
||||
try:
|
||||
home_proj = await home_project.get_home_project({
|
||||
'projection': {'_id': 1, 'url': 1}
|
||||
})
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Forbidden access to home project.')
|
||||
self.report({'ERROR'}, 'Did not get access to home project.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
self.report({'ERROR'}, 'Home project not found.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
self.home_project_id = home_proj['_id']
|
||||
self.home_project_url = home_proj['url']
|
||||
|
||||
try:
|
||||
gid = await find_image_sharing_group_id(self.home_project_id,
|
||||
self.user_id)
|
||||
self.share_group_id = gid
|
||||
self.log.debug('Found group node ID: %s', self.share_group_id)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Unable to find Group ID')
|
||||
self.report({'ERROR'}, 'Unable to find sync folder.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
await self.share_image(context)
|
||||
except Exception as ex:
|
||||
self.log.exception('Unexpected exception caught.')
|
||||
self.report({'ERROR'}, 'Unexpected error %s: %s' % (type(ex), ex))
|
||||
|
||||
self._state = 'QUIT'
|
||||
|
||||
async def share_image(self, context):
|
||||
"""Sends files to the Pillar server."""
|
||||
|
||||
if self.target == 'FILE':
|
||||
self.report({'INFO'}, "Uploading %s '%s'" % (self.target.lower(), self.name))
|
||||
node = await self.upload_file(self.name)
|
||||
elif self.target == 'SCREENSHOT':
|
||||
node = await self.upload_screenshot(context)
|
||||
else:
|
||||
self.report({'INFO'}, "Uploading %s '%s'" % (self.target.lower(), self.name))
|
||||
node = await self.upload_datablock(context)
|
||||
|
||||
self.report({'INFO'}, 'Upload complete, creating link to share.')
|
||||
share_info = await pillar_call(node.share)
|
||||
url = share_info.get('short_link')
|
||||
context.window_manager.clipboard = url
|
||||
self.report({'INFO'}, 'The link has been copied to your clipboard: %s' % url)
|
||||
|
||||
await self.maybe_open_browser(url)
|
||||
|
||||
async def upload_file(self, filename: str, fileobj=None) -> pillarsdk.Node:
|
||||
"""Uploads a file to the cloud, attached to the image sharing node.
|
||||
|
||||
Returns the node.
|
||||
"""
|
||||
|
||||
self.log.info('Uploading file %s', filename)
|
||||
node = await pillar_call(pillarsdk.Node.create_asset_from_file,
|
||||
self.home_project_id,
|
||||
self.share_group_id,
|
||||
'image',
|
||||
filename,
|
||||
extra_where={'user': self.user_id},
|
||||
always_create_new_node=True,
|
||||
fileobj=fileobj,
|
||||
caching=False)
|
||||
node_id = node['_id']
|
||||
self.log.info('Created node %s', node_id)
|
||||
self.report({'INFO'}, 'File succesfully uploaded to the cloud!')
|
||||
|
||||
return node
|
||||
|
||||
async def maybe_open_browser(self, url):
|
||||
prefs = blender.preferences()
|
||||
if not prefs.open_browser_after_share:
|
||||
return
|
||||
|
||||
import webbrowser
|
||||
|
||||
self.log.info('Opening browser at %s', url)
|
||||
webbrowser.open_new_tab(url)
|
||||
|
||||
async def upload_datablock(self, context) -> pillarsdk.Node:
|
||||
"""Saves a datablock to file if necessary, then upload.
|
||||
|
||||
Returns the node.
|
||||
"""
|
||||
|
||||
self.log.info("Uploading datablock '%s'" % self.name)
|
||||
datablock = bpy.data.images[self.name]
|
||||
|
||||
if datablock.type == 'RENDER_RESULT':
|
||||
# Construct a sensible name for this render.
|
||||
filename = '%s-%s-render%s' % (
|
||||
os.path.splitext(os.path.basename(context.blend_data.filepath))[0],
|
||||
context.scene.name,
|
||||
context.scene.render.file_extension)
|
||||
return await self.upload_via_tempdir(datablock, filename)
|
||||
|
||||
if datablock.packed_file is not None:
|
||||
return await self.upload_packed_file(datablock)
|
||||
|
||||
if datablock.is_dirty:
|
||||
# We can handle dirty datablocks like this if we want.
|
||||
# However, I (Sybren) do NOT think it's a good idea to:
|
||||
# - Share unsaved data to the cloud; users can assume it's saved
|
||||
# to disk and close blender, losing their file.
|
||||
# - Save unsaved data first; this can overwrite a file a user
|
||||
# didn't want to overwrite.
|
||||
filename = bpy.path.basename(datablock.filepath)
|
||||
return await self.upload_via_tempdir(datablock, filename)
|
||||
|
||||
filepath = bpy.path.abspath(datablock.filepath)
|
||||
return await self.upload_file(filepath)
|
||||
|
||||
async def upload_via_tempdir(self, datablock, filename_on_cloud) -> pillarsdk.Node:
|
||||
"""Saves the datablock to file, and uploads it to the cloud.
|
||||
|
||||
Saving is done to a temporary directory, which is removed afterwards.
|
||||
|
||||
Returns the node.
|
||||
"""
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
filepath = os.path.join(tmpdir, filename_on_cloud)
|
||||
self.log.debug('Saving %s to %s', datablock, filepath)
|
||||
datablock.save_render(filepath)
|
||||
return await self.upload_file(filepath)
|
||||
|
||||
async def upload_packed_file(self, datablock) -> pillarsdk.Node:
|
||||
"""Uploads a packed file directly from memory.
|
||||
|
||||
Returns the node.
|
||||
"""
|
||||
|
||||
import io
|
||||
|
||||
filename = '%s.%s' % (datablock.name, datablock.file_format.lower())
|
||||
fileobj = io.BytesIO(datablock.packed_file.data)
|
||||
fileobj.seek(0) # ensure PillarSDK reads the file from the beginning.
|
||||
self.log.info('Uploading packed file directly from memory to %r.', filename)
|
||||
return await self.upload_file(filename, fileobj=fileobj)
|
||||
|
||||
async def upload_screenshot(self, context) -> pillarsdk.Node:
|
||||
"""Takes a screenshot, saves it to a temp file, and uploads it."""
|
||||
|
||||
self.name = datetime.datetime.now().strftime('Screenshot-%Y-%m-%d-%H%M%S.png')
|
||||
self.report({'INFO'}, "Uploading %s '%s'" % (self.target.lower(), self.name))
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
filepath = os.path.join(tmpdir, self.name)
|
||||
self.log.debug('Saving screenshot to %s', filepath)
|
||||
bpy.ops.screen.screenshot(filepath=filepath,
|
||||
show_multiview=self.screenshot_show_multiview,
|
||||
use_multiview=self.screenshot_use_multiview,
|
||||
full=self.screenshot_full)
|
||||
return await self.upload_file(filepath)
|
||||
|
||||
|
||||
def image_editor_menu(self, context):
|
||||
image = context.space_data.image
|
||||
|
||||
box = self.layout.row()
|
||||
if image and image.has_data:
|
||||
text = 'Share on Blender Cloud'
|
||||
if image.type == 'IMAGE' and image.is_dirty and not image.packed_file:
|
||||
box.enabled = False
|
||||
text = 'Save image before sharing on Blender Cloud'
|
||||
|
||||
props = box.operator(PILLAR_OT_image_share.bl_idname, text=text,
|
||||
icon_value=blender.icon('CLOUD'))
|
||||
props.target = 'DATABLOCK'
|
||||
props.name = image.name
|
||||
|
||||
|
||||
def window_menu(self, context):
|
||||
props = self.layout.operator(PILLAR_OT_image_share.bl_idname,
|
||||
text='Share screenshot via Blender Cloud',
|
||||
icon_value=blender.icon('CLOUD'))
|
||||
props.target = 'SCREENSHOT'
|
||||
props.screenshot_full = True
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(PILLAR_OT_image_share)
|
||||
|
||||
bpy.types.IMAGE_MT_image.append(image_editor_menu)
|
||||
bpy.types.INFO_MT_window.append(window_menu)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(PILLAR_OT_image_share)
|
||||
|
||||
bpy.types.IMAGE_MT_image.remove(image_editor_menu)
|
||||
bpy.types.INFO_MT_window.remove(window_menu)
|
582
blender_cloud/pillar.py
Normal file → Executable file
582
blender_cloud/pillar.py
Normal file → Executable file
@@ -1,9 +1,29 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import asyncio
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import functools
|
||||
import logging
|
||||
from contextlib import closing, contextmanager
|
||||
import urllib.parse
|
||||
import pathlib
|
||||
|
||||
import requests
|
||||
@@ -16,8 +36,11 @@ from pillarsdk.utils import sanitize_filename
|
||||
from . import cache
|
||||
|
||||
SUBCLIENT_ID = 'PILLAR'
|
||||
TEXTURE_NODE_TYPES = {'texture', 'hdri'}
|
||||
|
||||
_pillar_api = None # will become a pillarsdk.Api object.
|
||||
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
|
||||
|
||||
_pillar_api = {} # will become a mapping from bool (cached/non-cached) to pillarsdk.Api objects.
|
||||
log = logging.getLogger(__name__)
|
||||
uncached_session = requests.session()
|
||||
_testing_blender_id_profile = None # Just for testing, overrides what is returned by blender_id_profile.
|
||||
@@ -31,14 +54,15 @@ class UserNotLoggedInError(RuntimeError):
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
return 'UserNotLoggedInError'
|
||||
return self.__class__.__name__
|
||||
|
||||
|
||||
class CredentialsNotSyncedError(UserNotLoggedInError):
|
||||
"""Raised when the user may be logged in on Blender ID, but has no Blender Cloud token."""
|
||||
|
||||
def __str__(self):
|
||||
return 'CredentialsNotSyncedError'
|
||||
|
||||
class NotSubscribedToCloudError(UserNotLoggedInError):
|
||||
"""Raised when the user may be logged in on Blender ID, but has no Blender Cloud token."""
|
||||
|
||||
|
||||
class PillarError(RuntimeError):
|
||||
@@ -62,6 +86,8 @@ class CloudPath(pathlib.PurePosixPath):
|
||||
@property
|
||||
def project_uuid(self) -> str:
|
||||
assert self.parts[0] == '/'
|
||||
if len(self.parts) <= 1:
|
||||
return None
|
||||
return self.parts[1]
|
||||
|
||||
@property
|
||||
@@ -71,11 +97,10 @@ class CloudPath(pathlib.PurePosixPath):
|
||||
|
||||
@property
|
||||
def node_uuid(self) -> str:
|
||||
node_uuids = self.node_uuids
|
||||
|
||||
if not node_uuids:
|
||||
if len(self.parts) <= 2:
|
||||
return None
|
||||
return node_uuids[-1]
|
||||
|
||||
return self.parts[-1]
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -90,6 +115,12 @@ def with_existing_dir(filename: str, open_mode: str, encoding=None):
|
||||
yield file_object
|
||||
|
||||
|
||||
def _shorten(somestr: str, maxlen=40) -> str:
|
||||
"""Shortens strings for logging"""
|
||||
|
||||
return (somestr[:maxlen - 3] + '...') if len(somestr) > maxlen else somestr
|
||||
|
||||
|
||||
def save_as_json(pillar_resource, json_filename):
|
||||
with with_existing_dir(json_filename, 'w') as outfile:
|
||||
log.debug('Saving metadata to %r' % json_filename)
|
||||
@@ -107,60 +138,122 @@ def blender_id_profile() -> 'blender_id.BlenderIdProfile':
|
||||
return blender_id.get_active_profile()
|
||||
|
||||
|
||||
def pillar_api(pillar_endpoint: str = None) -> pillarsdk.Api:
|
||||
def blender_id_subclient() -> dict:
|
||||
"""Returns the subclient dict, containing the 'subclient_user_id' and 'token' keys."""
|
||||
|
||||
profile = blender_id_profile()
|
||||
if not profile:
|
||||
raise UserNotLoggedInError()
|
||||
|
||||
subclient = profile.subclients.get(SUBCLIENT_ID)
|
||||
if not subclient:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
return subclient
|
||||
|
||||
|
||||
def pillar_user_uuid() -> str:
|
||||
"""Returns the UUID of the Pillar user."""
|
||||
|
||||
import blender_id
|
||||
return blender_id.get_subclient_user_id(SUBCLIENT_ID)
|
||||
|
||||
|
||||
def pillar_api(pillar_endpoint: str = None, caching=True) -> pillarsdk.Api:
|
||||
"""Returns the Pillar SDK API object for the current user.
|
||||
|
||||
The user must be logged in.
|
||||
|
||||
:param pillar_endpoint: URL of the Pillar server, for testing purposes. If not specified,
|
||||
it will use the addon preferences.
|
||||
:param caching: whether to return a caching or non-caching API
|
||||
"""
|
||||
|
||||
global _pillar_api
|
||||
|
||||
# Only return the Pillar API object if the user is still logged in.
|
||||
profile = blender_id_profile()
|
||||
if not profile:
|
||||
raise UserNotLoggedInError()
|
||||
subclient = blender_id_subclient()
|
||||
|
||||
subclient = profile.subclients.get(SUBCLIENT_ID)
|
||||
if not subclient:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
if _pillar_api is None:
|
||||
if not _pillar_api:
|
||||
# Allow overriding the endpoint before importing Blender-specific stuff.
|
||||
if pillar_endpoint is None:
|
||||
from . import blender
|
||||
pillar_endpoint = blender.preferences().pillar_server
|
||||
|
||||
pillarsdk.Api.requests_session = cache.requests_session()
|
||||
_caching_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
||||
username=subclient['subclient_user_id'],
|
||||
password=SUBCLIENT_ID,
|
||||
token=subclient['token'])
|
||||
_caching_api.requests_session = cache.requests_session()
|
||||
|
||||
_pillar_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
||||
username=subclient['subclient_user_id'],
|
||||
password=SUBCLIENT_ID,
|
||||
token=subclient['token'])
|
||||
_noncaching_api = pillarsdk.Api(endpoint=pillar_endpoint,
|
||||
username=subclient['subclient_user_id'],
|
||||
password=SUBCLIENT_ID,
|
||||
token=subclient['token'])
|
||||
_noncaching_api.requests_session = uncached_session
|
||||
|
||||
return _pillar_api
|
||||
# Send the addon version as HTTP header.
|
||||
from blender_cloud import bl_info
|
||||
addon_version = '.'.join(str(v) for v in bl_info['version'])
|
||||
_caching_api.global_headers['Blender-Cloud-Addon'] = addon_version
|
||||
_noncaching_api.global_headers['Blender-Cloud-Addon'] = addon_version
|
||||
|
||||
_pillar_api = {
|
||||
True: _caching_api,
|
||||
False: _noncaching_api,
|
||||
}
|
||||
|
||||
return _pillar_api[caching]
|
||||
|
||||
|
||||
# No more than this many Pillar calls should be made simultaneously
|
||||
pillar_semaphore = asyncio.Semaphore(3)
|
||||
# This is an asyncio.Semaphore object, which is late-instantiated to be sure
|
||||
# the asyncio loop has been created properly. On Windows we create a new one,
|
||||
# which can cause this semaphore to still be linked against the old default
|
||||
# loop.
|
||||
pillar_semaphore = None
|
||||
|
||||
|
||||
async def pillar_call(pillar_func, *args, **kwargs):
|
||||
partial = functools.partial(pillar_func, *args, api=pillar_api(), **kwargs)
|
||||
async def pillar_call(pillar_func, *args, caching=True, **kwargs):
|
||||
"""Calls a Pillar function.
|
||||
|
||||
A semaphore is used to ensure that there won't be too many
|
||||
calls to Pillar simultaneously.
|
||||
"""
|
||||
|
||||
partial = functools.partial(pillar_func, *args, api=pillar_api(caching=caching), **kwargs)
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
async with pillar_semaphore:
|
||||
# Use explicit calls to acquire() and release() so that we have more control over
|
||||
# how long we wait and how we handle timeouts.
|
||||
try:
|
||||
await asyncio.wait_for(pillar_semaphore.acquire(), timeout=10, loop=loop)
|
||||
except asyncio.TimeoutError:
|
||||
log.info('Waiting for semaphore to call %s', pillar_func.__name__)
|
||||
try:
|
||||
await asyncio.wait_for(pillar_semaphore.acquire(), timeout=50, loop=loop)
|
||||
except asyncio.TimeoutError:
|
||||
raise RuntimeError('Timeout waiting for Pillar Semaphore!')
|
||||
|
||||
try:
|
||||
return await loop.run_in_executor(None, partial)
|
||||
finally:
|
||||
pillar_semaphore.release()
|
||||
|
||||
|
||||
async def check_pillar_credentials():
|
||||
def sync_call(pillar_func, *args, caching=True, **kwargs):
|
||||
"""Synchronous call to Pillar, ensures the correct Api object is used."""
|
||||
|
||||
return pillar_func(*args, api=pillar_api(caching=caching), **kwargs)
|
||||
|
||||
|
||||
async def check_pillar_credentials(required_roles: set):
|
||||
"""Tries to obtain the user at Pillar using the user's credentials.
|
||||
|
||||
:param required_roles: set of roles to require -- having one of those is enough.
|
||||
:raises UserNotLoggedInError: when the user is not logged in on Blender ID.
|
||||
:raises CredentialsNotSyncedError: when the user is logged in on Blender ID but
|
||||
doesn't have a valid subclient token for Pillar.
|
||||
:returns: the Pillar User ID of the current user.
|
||||
"""
|
||||
|
||||
profile = blender_id_profile()
|
||||
@@ -171,13 +264,28 @@ async def check_pillar_credentials():
|
||||
if not subclient:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
try:
|
||||
await get_project_uuid('textures') # Any query will do.
|
||||
except pillarsdk.UnauthorizedAccess:
|
||||
pillar_user_id = subclient['subclient_user_id']
|
||||
if not pillar_user_id:
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
try:
|
||||
db_user = await pillar_call(pillarsdk.User.me)
|
||||
except (pillarsdk.UnauthorizedAccess, pillarsdk.ResourceNotFound, pillarsdk.ForbiddenAccess):
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
async def refresh_pillar_credentials():
|
||||
roles = db_user.roles or set()
|
||||
log.debug('User has roles %r', roles)
|
||||
if required_roles and not required_roles.intersection(set(roles)):
|
||||
# Delete the subclient info. This forces a re-check later, which can
|
||||
# then pick up on the user's new status.
|
||||
del profile.subclients[SUBCLIENT_ID]
|
||||
profile.save_json()
|
||||
raise NotSubscribedToCloudError()
|
||||
|
||||
return db_user
|
||||
|
||||
|
||||
async def refresh_pillar_credentials(required_roles: set):
|
||||
"""Refreshes the authentication token on Pillar.
|
||||
|
||||
:raises blender_id.BlenderIdCommError: when Blender ID refuses to send a token to Pillar.
|
||||
@@ -189,15 +297,19 @@ async def refresh_pillar_credentials():
|
||||
import blender_id
|
||||
|
||||
from . import blender
|
||||
pillar_endpoint = blender.preferences().pillar_server.rstrip('/')
|
||||
pillar_endpoint = blender.preferences().pillar_server
|
||||
|
||||
# Create a subclient token and send it to Pillar.
|
||||
# May raise a blender_id.BlenderIdCommError
|
||||
blender_id.create_subclient_token(SUBCLIENT_ID, pillar_endpoint)
|
||||
try:
|
||||
blender_id.create_subclient_token(SUBCLIENT_ID, pillar_endpoint)
|
||||
except blender_id.communication.BlenderIdCommError as ex:
|
||||
log.warning("Unable to create authentication token: %s", ex)
|
||||
raise CredentialsNotSyncedError()
|
||||
|
||||
# Test the new URL
|
||||
_pillar_api = None
|
||||
await get_project_uuid('textures') # Any query will do.
|
||||
return await check_pillar_credentials(required_roles)
|
||||
|
||||
|
||||
async def get_project_uuid(project_url: str) -> str:
|
||||
@@ -217,7 +329,7 @@ async def get_project_uuid(project_url: str) -> str:
|
||||
|
||||
|
||||
async def get_nodes(project_uuid: str = None, parent_node_uuid: str = None,
|
||||
node_type: str = None) -> list:
|
||||
node_type=None, max_results=None) -> list:
|
||||
"""Gets nodes for either a project or given a parent node.
|
||||
|
||||
@param project_uuid: the UUID of the project, or None if only querying by parent_node_uuid.
|
||||
@@ -242,16 +354,43 @@ async def get_nodes(project_uuid: str = None, parent_node_uuid: str = None,
|
||||
where['project'] = project_uuid
|
||||
|
||||
if node_type:
|
||||
where['node_type'] = node_type
|
||||
if isinstance(node_type, str):
|
||||
where['node_type'] = node_type
|
||||
else:
|
||||
# Convert set & tuple to list
|
||||
where['node_type'] = {'$in': list(node_type)}
|
||||
|
||||
children = await pillar_call(pillarsdk.Node.all, {
|
||||
'projection': {'name': 1, 'parent': 1, 'node_type': 1,
|
||||
'properties.order': 1, 'properties.status': 1,
|
||||
'properties.files': 1,
|
||||
'properties.content_type': 1, 'picture': 1},
|
||||
'where': where,
|
||||
'sort': 'properties.order',
|
||||
'embed': ['parent']})
|
||||
params = {'projection': {'name': 1, 'parent': 1, 'node_type': 1, 'properties.order': 1,
|
||||
'properties.status': 1, 'properties.files': 1,
|
||||
'properties.content_type': 1, 'picture': 1},
|
||||
'where': where,
|
||||
'embed': ['parent']}
|
||||
|
||||
# Pagination
|
||||
if max_results:
|
||||
params['max_results'] = int(max_results)
|
||||
|
||||
children = await pillar_call(pillarsdk.Node.all, params)
|
||||
|
||||
return children['_items']
|
||||
|
||||
|
||||
async def get_texture_projects(max_results=None) -> list:
|
||||
"""Returns project dicts that contain textures."""
|
||||
|
||||
params = {}
|
||||
|
||||
# Pagination
|
||||
if max_results:
|
||||
params['max_results'] = int(max_results)
|
||||
|
||||
try:
|
||||
children = await pillar_call(pillarsdk.Project.all_from_endpoint,
|
||||
'/bcloud/texture-libraries',
|
||||
params=params)
|
||||
except pillarsdk.ResourceNotFound as ex:
|
||||
log.warning('Unable to find texture projects: %s', ex)
|
||||
raise PillarError('Unable to find texture projects: %s' % ex)
|
||||
|
||||
return children['_items']
|
||||
|
||||
@@ -324,9 +463,9 @@ async def download_to_file(url, filename, *,
|
||||
log.debug('Downloading was cancelled before doing the GET')
|
||||
raise asyncio.CancelledError('Downloading was cancelled')
|
||||
|
||||
log.debug('Performing GET %s', url)
|
||||
log.debug('Performing GET %s', _shorten(url))
|
||||
response = await loop.run_in_executor(None, perform_get_request)
|
||||
log.debug('Status %i from GET %s', response.status_code, url)
|
||||
log.debug('Status %i from GET %s', response.status_code, _shorten(url))
|
||||
response.raise_for_status()
|
||||
|
||||
if response.status_code == 304:
|
||||
@@ -340,9 +479,9 @@ async def download_to_file(url, filename, *,
|
||||
log.debug('Downloading was cancelled before downloading the GET response')
|
||||
raise asyncio.CancelledError('Downloading was cancelled')
|
||||
|
||||
log.debug('Downloading response of GET %s', url)
|
||||
log.debug('Downloading response of GET %s', _shorten(url))
|
||||
await loop.run_in_executor(None, download_loop)
|
||||
log.debug('Done downloading response of GET %s', url)
|
||||
log.debug('Done downloading response of GET %s', _shorten(url))
|
||||
|
||||
# We're done downloading, now we have something cached we can use.
|
||||
log.debug('Saving header cache to %s', header_store)
|
||||
@@ -367,7 +506,7 @@ async def fetch_thumbnail_info(file: pillarsdk.File, directory: str, desired_siz
|
||||
finished.
|
||||
"""
|
||||
|
||||
thumb_link = await pillar_call(file.thumbnail_file, desired_size)
|
||||
thumb_link = await pillar_call(file.thumbnail, desired_size)
|
||||
|
||||
if thumb_link is None:
|
||||
raise ValueError("File {} has no thumbnail of size {}"
|
||||
@@ -403,7 +542,7 @@ async def fetch_texture_thumbs(parent_node_uuid: str, desired_size: str,
|
||||
# Download all texture nodes in parallel.
|
||||
log.debug('Getting child nodes of node %r', parent_node_uuid)
|
||||
texture_nodes = await get_nodes(parent_node_uuid=parent_node_uuid,
|
||||
node_type='texture')
|
||||
node_type=TEXTURE_NODE_TYPES)
|
||||
|
||||
if is_cancelled(future):
|
||||
log.warning('fetch_texture_thumbs: Texture downloading cancelled')
|
||||
@@ -417,7 +556,8 @@ async def fetch_texture_thumbs(parent_node_uuid: str, desired_size: str,
|
||||
for texture_node in texture_nodes)
|
||||
|
||||
# raises any exception from failed handle_texture_node() calls.
|
||||
await asyncio.gather(*coros)
|
||||
loop = asyncio.get_event_loop()
|
||||
await asyncio.gather(*coros, loop=loop)
|
||||
|
||||
log.info('fetch_texture_thumbs: Done downloading texture thumbnails')
|
||||
|
||||
@@ -429,7 +569,7 @@ async def download_texture_thumbnail(texture_node, desired_size: str,
|
||||
thumbnail_loaded: callable,
|
||||
future: asyncio.Future = None):
|
||||
# Skip non-texture nodes, as we can't thumbnail them anyway.
|
||||
if texture_node['node_type'] != 'texture':
|
||||
if texture_node['node_type'] not in TEXTURE_NODE_TYPES:
|
||||
return
|
||||
|
||||
if is_cancelled(future):
|
||||
@@ -439,11 +579,26 @@ async def download_texture_thumbnail(texture_node, desired_size: str,
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Find the File that belongs to this texture node
|
||||
pic_uuid = texture_node['picture']
|
||||
# Find out which file to use for the thumbnail picture.
|
||||
pic_uuid = texture_node.picture
|
||||
if not pic_uuid:
|
||||
# Fall back to the first texture file, if it exists.
|
||||
log.debug('Node %r does not have a picture, falling back to first file.',
|
||||
texture_node['_id'])
|
||||
files = texture_node.properties and texture_node.properties.files
|
||||
if not files:
|
||||
log.info('Node %r does not have a picture nor files, skipping.', texture_node['_id'])
|
||||
return
|
||||
pic_uuid = files[0].file
|
||||
if not pic_uuid:
|
||||
log.info('Node %r does not have a picture nor files, skipping.', texture_node['_id'])
|
||||
return
|
||||
|
||||
# Load the File that belongs to this texture node's picture.
|
||||
loop.call_soon_threadsafe(thumbnail_loading, texture_node, texture_node)
|
||||
file_desc = await pillar_call(pillarsdk.File.find, pic_uuid, params={
|
||||
'projection': {'filename': 1, 'variations': 1, 'width': 1, 'height': 1},
|
||||
'projection': {'filename': 1, 'variations': 1, 'width': 1, 'height': 1,
|
||||
'length': 1},
|
||||
})
|
||||
|
||||
if file_desc is None:
|
||||
@@ -472,14 +627,80 @@ async def download_texture_thumbnail(texture_node, desired_size: str,
|
||||
loop.call_soon_threadsafe(thumbnail_loaded, texture_node, file_desc, thumb_path)
|
||||
|
||||
|
||||
async def fetch_node_files(node: pillarsdk.Node,
|
||||
*,
|
||||
file_doc_loading: callable,
|
||||
file_doc_loaded: callable,
|
||||
future: asyncio.Future = None):
|
||||
"""Fetches all files of a texture/hdri node.
|
||||
|
||||
@param node: Node document to fetch all file docs for.
|
||||
@param file_doc_loading: callback function that takes (file_id, ) parameters,
|
||||
which is called before a file document will be downloaded. This allows you to
|
||||
show a "downloading" indicator.
|
||||
@param file_doc_loaded: callback function that takes (file_id, pillarsdk.File object)
|
||||
parameters, which is called for every thumbnail after it's been downloaded.
|
||||
@param future: Future that's inspected; if it is not None and cancelled, texture downloading
|
||||
is aborted.
|
||||
"""
|
||||
|
||||
# Download all thumbnails in parallel.
|
||||
if is_cancelled(future):
|
||||
log.warning('fetch_texture_thumbs: Texture downloading cancelled')
|
||||
return
|
||||
|
||||
coros = (download_file_doc(file_ref.file,
|
||||
file_doc_loading=file_doc_loading,
|
||||
file_doc_loaded=file_doc_loaded,
|
||||
future=future)
|
||||
for file_ref in node.properties.files)
|
||||
|
||||
# raises any exception from failed handle_texture_node() calls.
|
||||
await asyncio.gather(*coros)
|
||||
|
||||
log.info('fetch_node_files: Done downloading %i files', len(node.properties.files))
|
||||
|
||||
|
||||
async def download_file_doc(file_id,
|
||||
*,
|
||||
file_doc_loading: callable,
|
||||
file_doc_loaded: callable,
|
||||
future: asyncio.Future = None):
|
||||
|
||||
if is_cancelled(future):
|
||||
log.debug('fetch_texture_thumbs cancelled before finding File for file_id %s', file_id)
|
||||
return
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Load the File that belongs to this texture node's picture.
|
||||
loop.call_soon_threadsafe(file_doc_loading, file_id)
|
||||
file_desc = await pillar_call(pillarsdk.File.find, file_id, params={
|
||||
'projection': {'filename': 1, 'variations': 1, 'width': 1, 'height': 1,
|
||||
'length': 1},
|
||||
})
|
||||
|
||||
if file_desc is None:
|
||||
log.warning('Unable to find File for file_id %s', file_id)
|
||||
|
||||
loop.call_soon_threadsafe(file_doc_loaded, file_id, file_desc)
|
||||
|
||||
|
||||
async def download_file_by_uuid(file_uuid,
|
||||
target_directory: str,
|
||||
metadata_directory: str,
|
||||
*,
|
||||
filename: str = None,
|
||||
map_type: str = None,
|
||||
file_loading: callable,
|
||||
file_loaded: callable,
|
||||
file_loading: callable = None,
|
||||
file_loaded: callable = None,
|
||||
file_loaded_sync: callable = None,
|
||||
future: asyncio.Future):
|
||||
"""Downloads a file from Pillar by its UUID.
|
||||
|
||||
:param filename: overrules the filename in file_doc['filename'] if given.
|
||||
The extension from file_doc['filename'] is still used, though.
|
||||
"""
|
||||
if is_cancelled(future):
|
||||
log.debug('download_file_by_uuid(%r) cancelled.', file_uuid)
|
||||
return
|
||||
@@ -488,18 +709,27 @@ async def download_file_by_uuid(file_uuid,
|
||||
|
||||
# Find the File document.
|
||||
file_desc = await pillar_call(pillarsdk.File.find, file_uuid, params={
|
||||
'projection': {'link': 1, 'filename': 1},
|
||||
'projection': {'link': 1, 'filename': 1, 'length': 1},
|
||||
})
|
||||
|
||||
# Save the file document to disk
|
||||
metadata_file = os.path.join(metadata_directory, 'files', '%s.json' % file_uuid)
|
||||
save_as_json(file_desc, metadata_file)
|
||||
|
||||
file_path = os.path.join(target_directory,
|
||||
sanitize_filename('%s-%s' % (map_type, file_desc['filename'])))
|
||||
# Let the caller override the filename root.
|
||||
root, ext = os.path.splitext(file_desc['filename'])
|
||||
if filename:
|
||||
root, _ = os.path.splitext(filename)
|
||||
if not map_type or root.endswith(map_type):
|
||||
target_filename = '%s%s' % (root, ext)
|
||||
else:
|
||||
target_filename = '%s-%s%s' % (root, map_type, ext)
|
||||
|
||||
file_path = os.path.join(target_directory, sanitize_filename(target_filename))
|
||||
file_url = file_desc['link']
|
||||
# log.debug('Texture %r:\n%s', file_uuid, pprint.pformat(file_desc.to_dict()))
|
||||
loop.call_soon_threadsafe(file_loading, file_path, file_desc)
|
||||
if file_loading is not None:
|
||||
loop.call_soon_threadsafe(file_loading, file_path, file_desc, map_type)
|
||||
|
||||
# Cached headers are stored in the project space
|
||||
header_store = os.path.join(metadata_directory, 'files',
|
||||
@@ -507,7 +737,10 @@ async def download_file_by_uuid(file_uuid,
|
||||
|
||||
await download_to_file(file_url, file_path, header_store=header_store, future=future)
|
||||
|
||||
loop.call_soon_threadsafe(file_loaded, file_path, file_desc)
|
||||
if file_loaded is not None:
|
||||
loop.call_soon_threadsafe(file_loaded, file_path, file_desc, map_type)
|
||||
if file_loaded_sync is not None:
|
||||
await file_loaded_sync(file_path, file_desc, map_type)
|
||||
|
||||
|
||||
async def download_texture(texture_node,
|
||||
@@ -517,23 +750,222 @@ async def download_texture(texture_node,
|
||||
texture_loading: callable,
|
||||
texture_loaded: callable,
|
||||
future: asyncio.Future):
|
||||
if texture_node['node_type'] != 'texture':
|
||||
raise TypeError("Node type should be 'texture', not %r" % texture_node['node_type'])
|
||||
node_type_name = texture_node['node_type']
|
||||
if node_type_name not in TEXTURE_NODE_TYPES:
|
||||
raise TypeError("Node type should be in %r, not %r" %
|
||||
(TEXTURE_NODE_TYPES, node_type_name))
|
||||
|
||||
filename = '%s.taken_from_file' % sanitize_filename(texture_node['name'])
|
||||
|
||||
# Download every file. Eve doesn't support embedding from a list-of-dicts.
|
||||
downloaders = (download_file_by_uuid(file_info['file'],
|
||||
target_directory,
|
||||
metadata_directory,
|
||||
map_type=file_info['map_type'],
|
||||
file_loading=texture_loading,
|
||||
file_loaded=texture_loaded,
|
||||
future=future)
|
||||
for file_info in texture_node['properties']['files'])
|
||||
downloaders = []
|
||||
for file_info in texture_node['properties']['files']:
|
||||
dlr = download_file_by_uuid(file_info['file'],
|
||||
target_directory,
|
||||
metadata_directory,
|
||||
filename=filename,
|
||||
map_type=file_info.map_type or file_info.resolution,
|
||||
file_loading=texture_loading,
|
||||
file_loaded=texture_loaded,
|
||||
future=future)
|
||||
downloaders.append(dlr)
|
||||
|
||||
return await asyncio.gather(*downloaders, return_exceptions=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
return await asyncio.gather(*downloaders, return_exceptions=True, loop=loop)
|
||||
|
||||
|
||||
async def upload_file(project_id: str, file_path: pathlib.Path, *,
|
||||
future: asyncio.Future) -> str:
|
||||
"""Uploads a file to the Blender Cloud, returning a file document ID."""
|
||||
|
||||
from .blender import PILLAR_SERVER_URL
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
url = urllib.parse.urljoin(PILLAR_SERVER_URL, '/storage/stream/%s' % project_id)
|
||||
|
||||
# Upload the file in a different thread.
|
||||
def upload():
|
||||
auth_token = blender_id_subclient()['token']
|
||||
|
||||
with file_path.open(mode='rb') as infile:
|
||||
return uncached_session.post(url,
|
||||
files={'file': infile},
|
||||
auth=(auth_token, SUBCLIENT_ID))
|
||||
|
||||
# Check for cancellation even before we start our POST request
|
||||
if is_cancelled(future):
|
||||
log.debug('Uploading was cancelled before doing the POST')
|
||||
raise asyncio.CancelledError('Uploading was cancelled')
|
||||
|
||||
log.debug('Performing POST %s', _shorten(url))
|
||||
response = await loop.run_in_executor(None, upload)
|
||||
log.debug('Status %i from POST %s', response.status_code, _shorten(url))
|
||||
response.raise_for_status()
|
||||
|
||||
resp = response.json()
|
||||
log.debug('Upload response: %s', resp)
|
||||
|
||||
try:
|
||||
file_id = resp['file_id']
|
||||
except KeyError:
|
||||
log.error('No file ID in upload response: %s', resp)
|
||||
raise PillarError('No file ID in upload response: %s' % resp)
|
||||
|
||||
log.info('Uploaded %s to file ID %s', file_path, file_id)
|
||||
return file_id
|
||||
|
||||
|
||||
def is_cancelled(future: asyncio.Future) -> bool:
|
||||
# assert future is not None # for debugging purposes.
|
||||
cancelled = future is not None and future.cancelled()
|
||||
return cancelled
|
||||
|
||||
|
||||
class PillarOperatorMixin:
|
||||
async def check_credentials(self, context, required_roles) -> bool:
|
||||
"""Checks credentials with Pillar, and if ok returns the user document from Pillar/MongoDB.
|
||||
|
||||
:raises UserNotLoggedInError: if the user is not logged in
|
||||
:raises NotSubscribedToCloudError: if the user does not have any of the required roles
|
||||
"""
|
||||
|
||||
# self.report({'INFO'}, 'Checking Blender Cloud credentials')
|
||||
|
||||
try:
|
||||
db_user = await check_pillar_credentials(required_roles)
|
||||
except NotSubscribedToCloudError:
|
||||
self._log_subscription_needed()
|
||||
raise
|
||||
except CredentialsNotSyncedError:
|
||||
self.log.info('Credentials not synced, re-syncing automatically.')
|
||||
else:
|
||||
self.log.info('Credentials okay.')
|
||||
return db_user
|
||||
|
||||
try:
|
||||
db_user = await refresh_pillar_credentials(required_roles)
|
||||
except NotSubscribedToCloudError:
|
||||
self._log_subscription_needed()
|
||||
raise
|
||||
except CredentialsNotSyncedError:
|
||||
self.log.info('Credentials not synced after refreshing, handling as not logged in.')
|
||||
raise UserNotLoggedInError('Not logged in.')
|
||||
except UserNotLoggedInError:
|
||||
self.log.error('User not logged in on Blender ID.')
|
||||
raise
|
||||
else:
|
||||
self.log.info('Credentials refreshed and ok.')
|
||||
return db_user
|
||||
|
||||
def _log_subscription_needed(self):
|
||||
self.log.warning(
|
||||
'Please subscribe to the blender cloud at https://cloud.blender.org/join')
|
||||
self.report({'INFO'},
|
||||
'Please subscribe to the blender cloud at https://cloud.blender.org/join')
|
||||
|
||||
|
||||
class AuthenticatedPillarOperatorMixin(PillarOperatorMixin):
|
||||
"""Checks credentials, to be used at the start of async_execute().
|
||||
|
||||
Sets self.user_id to the current user's ID, and self.db_user to the user info dict,
|
||||
if authentication was succesful; sets both to None if not.
|
||||
"""
|
||||
|
||||
async def authenticate(self, context) -> bool:
|
||||
from . import pillar
|
||||
|
||||
self.log.info('Checking credentials')
|
||||
self.user_id = None
|
||||
self.db_user = None
|
||||
try:
|
||||
self.db_user = await self.check_credentials(context, ())
|
||||
except pillar.UserNotLoggedInError as ex:
|
||||
self.log.info('Not logged in error raised: %s', ex)
|
||||
self.report({'ERROR'}, 'Please log in on Blender ID first.')
|
||||
self.quit()
|
||||
return False
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.exception('Error checking pillar credentials.')
|
||||
self.report({'ERROR'}, 'Unable to connect to Blender Cloud, '
|
||||
'check your internet connection.')
|
||||
self.quit()
|
||||
return False
|
||||
|
||||
|
||||
self.user_id = self.db_user['_id']
|
||||
return True
|
||||
|
||||
|
||||
|
||||
async def find_or_create_node(where: dict,
|
||||
additional_create_props: dict = None,
|
||||
projection: dict = None,
|
||||
may_create: bool = True) -> (pillarsdk.Node, bool):
|
||||
"""Finds a node by the `filter_props`, creates it using the additional props.
|
||||
|
||||
:returns: tuple (node, created), where 'created' is a bool indicating whether
|
||||
a new node was created, or an exising one is returned.
|
||||
"""
|
||||
|
||||
params = {
|
||||
'where': where,
|
||||
}
|
||||
if projection:
|
||||
params['projection'] = projection
|
||||
|
||||
found_node = await pillar_call(pillarsdk.Node.find_first, params, caching=False)
|
||||
|
||||
if found_node is not None:
|
||||
return found_node, False
|
||||
|
||||
if not may_create:
|
||||
return None, False
|
||||
|
||||
# Augment the node properties to form a complete node.
|
||||
node_props = where.copy()
|
||||
if additional_create_props:
|
||||
node_props.update(additional_create_props)
|
||||
|
||||
log.debug('Creating new node %s', node_props)
|
||||
created_node = pillarsdk.Node.new(node_props)
|
||||
created_ok = await pillar_call(created_node.create)
|
||||
if not created_ok:
|
||||
log.error('Blender Cloud addon: unable to create node on the Cloud.')
|
||||
raise PillarError('Unable to create node on the Cloud')
|
||||
|
||||
return created_node, True
|
||||
|
||||
|
||||
async def attach_file_to_group(file_path: pathlib.Path,
|
||||
home_project_id: str,
|
||||
group_node_id: str,
|
||||
user_id: str = None) -> pillarsdk.Node:
|
||||
"""Creates an Asset node and attaches a file document to it."""
|
||||
|
||||
node = await pillar_call(pillarsdk.Node.create_asset_from_file,
|
||||
home_project_id,
|
||||
group_node_id,
|
||||
'file',
|
||||
str(file_path),
|
||||
extra_where=user_id and {'user': user_id},
|
||||
caching=False)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
def node_to_id(node: pillarsdk.Node) -> dict:
|
||||
"""Converts a Node to a dict we can store in an ID property.
|
||||
|
||||
ID properties only support a handful of Python classes, so we have
|
||||
to convert datetime.datetime to a string and remove None values.
|
||||
"""
|
||||
|
||||
def to_rna(value):
|
||||
if isinstance(value, dict):
|
||||
return {k: to_rna(v) for k, v in value.items()}
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.strftime(RFC1123_DATE_FORMAT)
|
||||
return value
|
||||
|
||||
as_dict = to_rna(node.to_dict())
|
||||
return pillarsdk.utils.remove_none_attributes(as_dict)
|
||||
|
523
blender_cloud/settings_sync.py
Normal file
523
blender_cloud/settings_sync.py
Normal file
@@ -0,0 +1,523 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""Synchronises settings & startup file with the Cloud.
|
||||
Caching is disabled on many PillarSDK calls, as synchronisation can happen
|
||||
rapidly between multiple machines. This means that information can be outdated
|
||||
in seconds, rather than the minutes the cache system assumes.
|
||||
"""
|
||||
import functools
|
||||
import logging
|
||||
import pathlib
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
import bpy
|
||||
|
||||
import asyncio
|
||||
|
||||
import pillarsdk
|
||||
from pillarsdk import exceptions as sdk_exceptions
|
||||
from .pillar import pillar_call
|
||||
from . import async_loop, pillar, cache, blendfile, home_project
|
||||
|
||||
SETTINGS_FILES_TO_UPLOAD = ['userpref.blend', 'startup.blend']
|
||||
|
||||
# These are RNA keys inside the userpref.blend file, and their
|
||||
# Python properties names. These settings will not be synced.
|
||||
LOCAL_SETTINGS_RNA = [
|
||||
(b'dpi', 'system.dpi'),
|
||||
(b'virtual_pixel', 'system.virtual_pixel_mode'),
|
||||
(b'compute_device_id', 'system.compute_device'),
|
||||
(b'compute_device_type', 'system.compute_device_type'),
|
||||
(b'fontdir', 'filepaths.font_directory'),
|
||||
(b'textudir', 'filepaths.texture_directory'),
|
||||
(b'renderdir', 'filepaths.render_output_directory'),
|
||||
(b'pythondir', 'filepaths.script_directory'),
|
||||
(b'sounddir', 'filepaths.sound_directory'),
|
||||
(b'tempdir', 'filepaths.temporary_directory'),
|
||||
(b'render_cachedir', 'filepaths.render_cache_directory'),
|
||||
(b'i18ndir', 'filepaths.i18n_branches_directory'),
|
||||
(b'image_editor', 'filepaths.image_editor'),
|
||||
(b'anim_player', 'filepaths.animation_player'),
|
||||
]
|
||||
|
||||
REQUIRES_ROLES_FOR_SYNC = set() # no roles needed.
|
||||
SYNC_GROUP_NODE_NAME = 'Blender Sync'
|
||||
SYNC_GROUP_NODE_DESC = 'The [Blender Cloud Addon](https://cloud.blender.org/services' \
|
||||
'#blender-addon) will synchronize your Blender settings here.'
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def set_blender_sync_status(set_status: str):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.status = set_status
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
bss.status = 'IDLE'
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def async_set_blender_sync_status(set_status: str):
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.status = set_status
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
finally:
|
||||
bss.status = 'IDLE'
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
async def find_sync_group_id(home_project_id: str,
|
||||
user_id: str,
|
||||
blender_version: str,
|
||||
*,
|
||||
may_create=True) -> str:
|
||||
"""Finds the group node in which to store sync assets.
|
||||
|
||||
If the group node doesn't exist and may_create=True, it creates it.
|
||||
"""
|
||||
|
||||
# Find the top-level sync group node. This should have been
|
||||
# created by Pillar while creating the home project.
|
||||
try:
|
||||
sync_group, created = await pillar.find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': SYNC_GROUP_NODE_NAME,
|
||||
'user': user_id},
|
||||
projection={'_id': 1},
|
||||
may_create=False)
|
||||
except pillar.PillarError:
|
||||
raise pillar.PillarError('Unable to find sync folder on the Cloud')
|
||||
|
||||
if not may_create and sync_group is None:
|
||||
log.info("Sync folder doesn't exist, and not creating it either.")
|
||||
return None, None
|
||||
|
||||
# Find/create the sub-group for the requested Blender version
|
||||
try:
|
||||
sub_sync_group, created = await pillar.find_or_create_node(
|
||||
where={'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': sync_group['_id'],
|
||||
'name': blender_version,
|
||||
'user': user_id},
|
||||
additional_create_props={
|
||||
'description': 'Sync folder for Blender %s' % blender_version,
|
||||
'properties': {'status': 'published'},
|
||||
},
|
||||
projection={'_id': 1},
|
||||
may_create=may_create)
|
||||
except pillar.PillarError:
|
||||
raise pillar.PillarError('Unable to create sync folder on the Cloud')
|
||||
|
||||
if not may_create and sub_sync_group is None:
|
||||
log.info("Sync folder for Blender version %s doesn't exist, "
|
||||
"and not creating it either.", blender_version)
|
||||
return sync_group['_id'], None
|
||||
|
||||
return sync_group['_id'], sub_sync_group['_id']
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
async def available_blender_versions(home_project_id: str, user_id: str) -> list:
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
|
||||
# Get the available Blender versions.
|
||||
sync_group = await pillar_call(
|
||||
pillarsdk.Node.find_first,
|
||||
params={
|
||||
'where': {'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': None,
|
||||
'name': SYNC_GROUP_NODE_NAME,
|
||||
'user': user_id},
|
||||
'projection': {'_id': 1},
|
||||
},
|
||||
caching=False)
|
||||
|
||||
if sync_group is None:
|
||||
bss.report({'ERROR'}, 'No synced Blender settings in your Blender Cloud')
|
||||
log.debug('-- unable to find sync group for home_project_id=%r and user_id=%r',
|
||||
home_project_id, user_id)
|
||||
return []
|
||||
|
||||
sync_nodes = await pillar_call(
|
||||
pillarsdk.Node.all,
|
||||
params={
|
||||
'where': {'project': home_project_id,
|
||||
'node_type': 'group',
|
||||
'parent': sync_group['_id'],
|
||||
'user': user_id},
|
||||
'projection': {'_id': 1, 'name': 1},
|
||||
'sort': '-name',
|
||||
},
|
||||
caching=False)
|
||||
|
||||
if not sync_nodes or not sync_nodes._items:
|
||||
bss.report({'ERROR'}, 'No synced Blender settings in your Blender Cloud.')
|
||||
return []
|
||||
|
||||
versions = [node.name for node in sync_nodes._items]
|
||||
log.debug('Versions: %s', versions)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class PILLAR_OT_sync(pillar.PillarOperatorMixin,
|
||||
async_loop.AsyncModalOperatorMixin,
|
||||
bpy.types.Operator):
|
||||
bl_idname = 'pillar.sync'
|
||||
bl_label = 'Synchronise with Blender Cloud'
|
||||
bl_description = 'Synchronises Blender settings with Blender Cloud'
|
||||
|
||||
log = logging.getLogger('bpy.ops.%s' % bl_idname)
|
||||
home_project_id = None
|
||||
sync_group_id = None # top-level sync group node ID
|
||||
sync_group_versioned_id = None # sync group node ID for the given Blender version.
|
||||
|
||||
action = bpy.props.EnumProperty(
|
||||
items=[
|
||||
('PUSH', 'Push', 'Push settings to the Blender Cloud'),
|
||||
('PULL', 'Pull', 'Pull settings from the Blender Cloud'),
|
||||
('REFRESH', 'Refresh', 'Refresh available versions'),
|
||||
('SELECT', 'Select', 'Select version to sync'),
|
||||
],
|
||||
name='action')
|
||||
|
||||
CURRENT_BLENDER_VERSION = '%i.%i' % bpy.app.version[:2]
|
||||
blender_version = bpy.props.StringProperty(name='blender_version',
|
||||
description='Blender version to sync for',
|
||||
default=CURRENT_BLENDER_VERSION)
|
||||
|
||||
def bss_report(self, level, message):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.report(level, message)
|
||||
|
||||
def invoke(self, context, event):
|
||||
if self.action == 'SELECT':
|
||||
# Synchronous action
|
||||
return self.action_select(context)
|
||||
|
||||
if self.action in {'PUSH', 'PULL'} and not self.blender_version:
|
||||
self.bss_report({'ERROR'}, 'No Blender version to sync for was given.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
return async_loop.AsyncModalOperatorMixin.invoke(self, context, event)
|
||||
|
||||
def action_select(self, context):
|
||||
"""Allows selection of the Blender version to use.
|
||||
|
||||
This is a synchronous action, as it requires a dialog box.
|
||||
"""
|
||||
|
||||
self.log.info('Performing action SELECT')
|
||||
|
||||
# Do a refresh before we can show the dropdown.
|
||||
fut = asyncio.ensure_future(self.async_execute(context, action_override='REFRESH'))
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(fut)
|
||||
|
||||
self._state = 'SELECTING'
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
def draw(self, context):
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
self.layout.prop(bss, 'version', text='Blender version')
|
||||
|
||||
def execute(self, context):
|
||||
if self.action != 'SELECT':
|
||||
log.debug('Ignoring execute() for action %r', self.action)
|
||||
return {'FINISHED'}
|
||||
|
||||
log.debug('Performing execute() for action %r', self.action)
|
||||
# Perform the sync when the user closes the dialog box.
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bpy.ops.pillar.sync('INVOKE_DEFAULT',
|
||||
action='PULL',
|
||||
blender_version=bss.version)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@async_set_blender_sync_status('SYNCING')
|
||||
async def async_execute(self, context, *, action_override=None):
|
||||
"""Entry point of the asynchronous operator."""
|
||||
|
||||
action = action_override or self.action
|
||||
self.bss_report({'INFO'}, 'Communicating with Blender Cloud')
|
||||
self.log.info('Performing action %s', action)
|
||||
|
||||
try:
|
||||
# Refresh credentials
|
||||
try:
|
||||
db_user = await self.check_credentials(context, REQUIRES_ROLES_FOR_SYNC)
|
||||
self.user_id = db_user['_id']
|
||||
log.debug('Found user ID: %s', self.user_id)
|
||||
except pillar.NotSubscribedToCloudError:
|
||||
self.log.exception('User not subscribed to cloud.')
|
||||
self.bss_report({'SUBSCRIBE'}, 'Please subscribe to the Blender Cloud.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
except pillar.UserNotLoggedInError:
|
||||
self.log.exception('Error checking/refreshing credentials.')
|
||||
self.bss_report({'ERROR'}, 'Please log in on Blender ID first.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
# Find the home project.
|
||||
try:
|
||||
self.home_project_id = await home_project.get_home_project_id()
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Forbidden access to home project.')
|
||||
self.bss_report({'ERROR'}, 'Did not get access to home project.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
except sdk_exceptions.ResourceNotFound:
|
||||
self.bss_report({'ERROR'}, 'Home project not found.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
# Only create the folder structure if we're pushing.
|
||||
may_create = self.action == 'PUSH'
|
||||
try:
|
||||
gid, subgid = await find_sync_group_id(self.home_project_id,
|
||||
self.user_id,
|
||||
self.blender_version,
|
||||
may_create=may_create)
|
||||
self.sync_group_id = gid
|
||||
self.sync_group_versioned_id = subgid
|
||||
self.log.debug('Found top-level group node ID: %s', self.sync_group_id)
|
||||
self.log.debug('Found group node ID for %s: %s',
|
||||
self.blender_version, self.sync_group_versioned_id)
|
||||
except sdk_exceptions.ForbiddenAccess:
|
||||
self.log.exception('Unable to find Group ID')
|
||||
self.bss_report({'ERROR'}, 'Unable to find sync folder.')
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
# Perform the requested action.
|
||||
action_method = {
|
||||
'PUSH': self.action_push,
|
||||
'PULL': self.action_pull,
|
||||
'REFRESH': self.action_refresh,
|
||||
}[action]
|
||||
await action_method(context)
|
||||
except Exception as ex:
|
||||
self.log.exception('Unexpected exception caught.')
|
||||
self.bss_report({'ERROR'}, 'Unexpected error: %s' % ex)
|
||||
|
||||
self._state = 'QUIT'
|
||||
|
||||
async def action_push(self, context):
|
||||
"""Sends files to the Pillar server."""
|
||||
|
||||
self.log.info('Saved user preferences to disk before pushing to cloud.')
|
||||
bpy.ops.wm.save_userpref()
|
||||
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource('CONFIG'))
|
||||
|
||||
for fname in SETTINGS_FILES_TO_UPLOAD:
|
||||
path = config_dir / fname
|
||||
if not path.exists():
|
||||
self.log.debug('Skipping non-existing %s', path)
|
||||
continue
|
||||
|
||||
if self.signalling_future.cancelled():
|
||||
self.bss_report({'WARNING'}, 'Upload aborted.')
|
||||
return
|
||||
|
||||
self.bss_report({'INFO'}, 'Uploading %s' % fname)
|
||||
try:
|
||||
await pillar.attach_file_to_group(path,
|
||||
self.home_project_id,
|
||||
self.sync_group_versioned_id,
|
||||
self.user_id)
|
||||
except sdk_exceptions.RequestEntityTooLarge as ex:
|
||||
self.log.error('File too big to upload: %s' % ex)
|
||||
self.log.error('To upload larger files, please subscribe to Blender Cloud.')
|
||||
self.bss_report({'SUBSCRIBE'}, 'File %s too big to upload. '
|
||||
'Subscribe for unlimited space.' % fname)
|
||||
self._state = 'QUIT'
|
||||
return
|
||||
|
||||
await self.action_refresh(context)
|
||||
|
||||
# After pushing, change the 'pull' version to the current version of Blender.
|
||||
# Or to the latest version, if by some mistake somewhere the current push
|
||||
# isn't available after all.
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
if self.CURRENT_BLENDER_VERSION in bss.available_blender_versions:
|
||||
bss.version = self.CURRENT_BLENDER_VERSION
|
||||
else:
|
||||
bss.version = max(bss.available_blender_versions)
|
||||
|
||||
self.bss_report({'INFO'}, 'Settings pushed to Blender Cloud.')
|
||||
|
||||
async def action_pull(self, context):
|
||||
"""Loads files from the Pillar server."""
|
||||
|
||||
# If the sync group node doesn't exist, offer a list of groups that do.
|
||||
if self.sync_group_id is None:
|
||||
self.bss_report({'ERROR'},
|
||||
'There are no synced Blender settings in your Blender Cloud.')
|
||||
return
|
||||
|
||||
if self.sync_group_versioned_id is None:
|
||||
self.bss_report({'ERROR'}, 'Therre are no synced Blender settings for version %s' %
|
||||
self.blender_version)
|
||||
return
|
||||
|
||||
self.bss_report({'INFO'}, 'Pulling settings from Blender Cloud')
|
||||
with tempfile.TemporaryDirectory(prefix='bcloud-sync') as tempdir:
|
||||
for fname in SETTINGS_FILES_TO_UPLOAD:
|
||||
await self.download_settings_file(fname, tempdir)
|
||||
|
||||
self.bss_report({'WARNING'}, 'Settings pulled from Cloud, restart Blender to load them.')
|
||||
|
||||
async def action_refresh(self, context):
|
||||
self.bss_report({'INFO'}, 'Refreshing available Blender versions.')
|
||||
|
||||
# Clear the LRU cache of available_blender_versions so that we can
|
||||
# obtain new versions (if someone synced from somewhere else, for example)
|
||||
available_blender_versions.cache_clear()
|
||||
|
||||
versions = await available_blender_versions(self.home_project_id, self.user_id)
|
||||
bss = bpy.context.window_manager.blender_sync_status
|
||||
bss.available_blender_versions = versions
|
||||
|
||||
if versions:
|
||||
# There are versions to sync, so we can remove the status message.
|
||||
# However, if there aren't any, the status message shows why, and
|
||||
# shouldn't be erased.
|
||||
self.bss_report({'INFO'}, '')
|
||||
|
||||
async def download_settings_file(self, fname: str, temp_dir: str):
|
||||
config_dir = pathlib.Path(bpy.utils.user_resource('CONFIG'))
|
||||
meta_path = cache.cache_directory('home-project', 'blender-sync')
|
||||
|
||||
self.bss_report({'INFO'}, 'Downloading %s from Cloud' % fname)
|
||||
|
||||
# Get the asset node
|
||||
node_props = {'project': self.home_project_id,
|
||||
'node_type': 'asset',
|
||||
'parent': self.sync_group_versioned_id,
|
||||
'name': fname}
|
||||
node = await pillar_call(pillarsdk.Node.find_first, {
|
||||
'where': node_props,
|
||||
'projection': {'_id': 1, 'properties.file': 1}
|
||||
}, caching=False)
|
||||
if node is None:
|
||||
self.bss_report({'INFO'}, 'Unable to find %s on Blender Cloud' % fname)
|
||||
self.log.info('Unable to find node on Blender Cloud for %s', fname)
|
||||
return
|
||||
|
||||
async def file_downloaded(file_path: str, file_desc: pillarsdk.File, map_type: str):
|
||||
# Allow the caller to adjust the file before we move it into place.
|
||||
|
||||
if fname.lower() == 'userpref.blend':
|
||||
await self.update_userpref_blend(file_path)
|
||||
|
||||
# Move the file next to the final location; as it may be on a
|
||||
# different filesystem than the temporary directory, this can
|
||||
# fail, and we don't want to destroy the existing file.
|
||||
local_temp = config_dir / (fname + '~')
|
||||
local_final = config_dir / fname
|
||||
|
||||
# Make a backup copy of the file as it was before pulling.
|
||||
if local_final.exists():
|
||||
local_bak = config_dir / (fname + '-pre-bcloud-pull')
|
||||
self.move_file(local_final, local_bak)
|
||||
|
||||
self.move_file(file_path, local_temp)
|
||||
self.move_file(local_temp, local_final)
|
||||
|
||||
file_id = node.properties.file
|
||||
await pillar.download_file_by_uuid(file_id,
|
||||
temp_dir,
|
||||
str(meta_path),
|
||||
file_loaded_sync=file_downloaded,
|
||||
future=self.signalling_future)
|
||||
|
||||
def move_file(self, src, dst):
|
||||
self.log.info('Moving %s to %s', src, dst)
|
||||
shutil.move(str(src), str(dst))
|
||||
|
||||
async def update_userpref_blend(self, file_path: str):
|
||||
self.log.info('Overriding machine-local settings in %s', file_path)
|
||||
|
||||
# Remember some settings that should not be overwritten from the Cloud.
|
||||
up = bpy.context.user_preferences
|
||||
remembered = {}
|
||||
for rna_key, python_key in LOCAL_SETTINGS_RNA:
|
||||
assert '.' in python_key, 'Sorry, this code assumes there is a dot in the Python key'
|
||||
|
||||
try:
|
||||
value = up.path_resolve(python_key)
|
||||
except ValueError:
|
||||
# Setting doesn't exist. This can happen, for example Cycles
|
||||
# settings on a build that doesn't have Cycles enabled.
|
||||
continue
|
||||
|
||||
# Map enums from strings (in Python) to ints (in DNA).
|
||||
dot_index = python_key.rindex('.')
|
||||
parent_key, prop_key = python_key[:dot_index], python_key[dot_index + 1:]
|
||||
parent = up.path_resolve(parent_key)
|
||||
prop = parent.bl_rna.properties[prop_key]
|
||||
if prop.type == 'ENUM':
|
||||
log.debug('Rewriting %s from %r to %r',
|
||||
python_key, value, prop.enum_items[value].value)
|
||||
value = prop.enum_items[value].value
|
||||
else:
|
||||
log.debug('Keeping value of %s: %r', python_key, value)
|
||||
|
||||
remembered[rna_key] = value
|
||||
log.debug('Overriding values: %s', remembered)
|
||||
|
||||
# Rewrite the userprefs.blend file to override the options.
|
||||
with blendfile.open_blend(file_path, 'rb+') as blend:
|
||||
prefs = next(block for block in blend.blocks
|
||||
if block.code == b'USER')
|
||||
|
||||
for key, value in remembered.items():
|
||||
self.log.debug('prefs[%r] = %r' % (key, prefs[key]))
|
||||
self.log.debug(' -> setting prefs[%r] = %r' % (key, value))
|
||||
prefs[key] = value
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(PILLAR_OT_sync)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(PILLAR_OT_sync)
|
1046
blender_cloud/texture_browser.py
Normal file
1046
blender_cloud/texture_browser.py
Normal file
File diff suppressed because it is too large
Load Diff
102
blender_cloud/utils.py
Normal file
102
blender_cloud/utils.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import pathlib
|
||||
|
||||
|
||||
def sizeof_fmt(num: int, suffix='B') -> str:
|
||||
"""Returns a human-readable size.
|
||||
|
||||
Source: http://stackoverflow.com/a/1094933/875379
|
||||
"""
|
||||
|
||||
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
|
||||
if abs(num) < 1024:
|
||||
return '%.1f %s%s' % (num, unit, suffix)
|
||||
num /= 1024
|
||||
|
||||
return '%.1f Yi%s' % (num, suffix)
|
||||
|
||||
|
||||
def find_in_path(path: pathlib.Path, filename: str) -> pathlib.Path:
|
||||
"""Performs a breadth-first search for the filename.
|
||||
|
||||
Returns the path that contains the file, or None if not found.
|
||||
"""
|
||||
|
||||
import collections
|
||||
|
||||
# Be lenient on our input type.
|
||||
if isinstance(path, str):
|
||||
path = pathlib.Path(path)
|
||||
|
||||
if not path.exists():
|
||||
return None
|
||||
assert path.is_dir()
|
||||
|
||||
to_visit = collections.deque([path])
|
||||
while to_visit:
|
||||
this_path = to_visit.popleft()
|
||||
|
||||
for subpath in this_path.iterdir():
|
||||
if subpath.is_dir():
|
||||
to_visit.append(subpath)
|
||||
continue
|
||||
|
||||
if subpath.name == filename:
|
||||
return subpath
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def pyside_cache(propname):
|
||||
"""Decorator, stores the result of the decorated callable in Python-managed memory.
|
||||
|
||||
This is to work around the warning at
|
||||
https://www.blender.org/api/blender_python_api_master/bpy.props.html#bpy.props.EnumProperty
|
||||
"""
|
||||
|
||||
if callable(propname):
|
||||
raise TypeError('Usage: pyside_cache("property_name")')
|
||||
|
||||
def decorator(wrapped):
|
||||
"""Stores the result of the callable in Python-managed memory.
|
||||
|
||||
This is to work around the warning at
|
||||
https://www.blender.org/api/blender_python_api_master/bpy.props.html#bpy.props.EnumProperty
|
||||
"""
|
||||
|
||||
import functools
|
||||
|
||||
@functools.wraps(wrapped)
|
||||
# We can't use (*args, **kwargs), because EnumProperty explicitly checks
|
||||
# for the number of fixed positional arguments.
|
||||
def wrapper(self, context):
|
||||
result = None
|
||||
try:
|
||||
result = wrapped(self, context)
|
||||
return result
|
||||
finally:
|
||||
rna_type, rna_info = getattr(self.bl_rna, propname)
|
||||
rna_info['_cached_result'] = result
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def redraw(self, context):
|
||||
context.area.tag_redraw()
|
@@ -1,3 +1,21 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""External dependencies loader."""
|
||||
|
||||
import glob
|
||||
@@ -18,21 +36,28 @@ def load_wheel(module_name, fname_prefix):
|
||||
|
||||
try:
|
||||
module = __import__(module_name)
|
||||
except ImportError:
|
||||
pass
|
||||
except ImportError as ex:
|
||||
log.debug('Unable to import %s directly, will try wheel: %s',
|
||||
module_name, ex)
|
||||
else:
|
||||
log.debug('Was able to load %s from %s, no need to load wheel %s',
|
||||
module_name, module.__file__, fname_prefix)
|
||||
return
|
||||
|
||||
sys.path.append(wheel_filename(fname_prefix))
|
||||
module = __import__(module_name)
|
||||
log.debug('Loaded %s from %s', module_name, module.__file__)
|
||||
|
||||
|
||||
def wheel_filename(fname_prefix: str) -> str:
|
||||
path_pattern = os.path.join(my_dir, '%s*.whl' % fname_prefix)
|
||||
wheels = glob.glob(path_pattern)
|
||||
if not wheels:
|
||||
raise RuntimeError('Unable to find wheel at %r' % path_pattern)
|
||||
|
||||
sys.path.append(wheels[0])
|
||||
module = __import__(module_name)
|
||||
log.debug('Loaded %s from %s', module_name, module.__file__)
|
||||
# If there are multiple wheels that match, load the latest one.
|
||||
wheels.sort()
|
||||
return wheels[-1]
|
||||
|
||||
|
||||
def load_wheels():
|
||||
|
8
clear_wheels.sh
Executable file
8
clear_wheels.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
git clean -n -d -X blender_cloud/wheels/
|
||||
|
||||
echo "Press [ENTER] to actually delete those files."
|
||||
read dummy
|
||||
|
||||
git clean -f -d -X blender_cloud/wheels/
|
8
requirements-dev.txt
Normal file
8
requirements-dev.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
-r requirements.txt
|
||||
|
||||
# Primary requirements
|
||||
pytest==3.0.3
|
||||
|
||||
# Secondary requirements
|
||||
py==1.4.31
|
||||
|
@@ -1,8 +1,9 @@
|
||||
# Primary requirements:
|
||||
CacheControl==0.11.6
|
||||
-e git+https://github.com/sybrenstuvel/cachecontrol.git@sybren-filecache-delete-crash-fix#egg=CacheControl
|
||||
lockfile==0.12.2
|
||||
pillarsdk==1.0.0
|
||||
pillarsdk==1.6.1
|
||||
wheel==0.29.0
|
||||
blender-bam==1.1.7
|
||||
|
||||
# Secondary requirements:
|
||||
cffi==1.6.0
|
||||
|
69
setup.py
69
setup.py
@@ -1,10 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess
|
||||
import re
|
||||
import pathlib
|
||||
import zipfile
|
||||
|
||||
from distutils import log
|
||||
from distutils.core import Command
|
||||
@@ -14,6 +34,7 @@ from distutils.command.install_egg_info import install_egg_info
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
requirement_re = re.compile('[><=]+')
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
|
||||
def set_default_path(var, default):
|
||||
@@ -80,12 +101,21 @@ class BuildWheels(Command):
|
||||
log.info('Downloading Pillar Python SDK wheel')
|
||||
self.download_wheel(requirements['pillarsdk'])
|
||||
|
||||
# Download BAM from pypi. This is required for compatibility with Blender 2.78.
|
||||
if not list(self.wheels_path.glob('blender_bam*.whl')):
|
||||
log.info('Downloading BAM wheel')
|
||||
self.download_wheel(requirements['blender-bam'])
|
||||
|
||||
# Build CacheControl.
|
||||
if not list(self.wheels_path.glob('CacheControl*.whl')):
|
||||
log.info('Building CacheControl in %s', self.cachecontrol_path)
|
||||
# self.git_clone(self.cachecontrol_path,
|
||||
# 'https://github.com/ionrock/cachecontrol.git',
|
||||
# 'v%s' % requirements['CacheControl'][1])
|
||||
# FIXME: we need my clone until pull request #125 has been merged & released
|
||||
self.git_clone(self.cachecontrol_path,
|
||||
'https://github.com/ionrock/cachecontrol.git',
|
||||
'v%s' % requirements['CacheControl'][1])
|
||||
'https://github.com/sybrenstuvel/cachecontrol.git',
|
||||
'sybren-filecache-delete-crash-fix')
|
||||
self.build_copy_wheel(self.cachecontrol_path)
|
||||
|
||||
# Ensure that the wheels are added to the data files.
|
||||
@@ -145,6 +175,34 @@ class BlenderAddonBdist(bdist):
|
||||
super().run()
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class BlenderAddonFdist(BlenderAddonBdist):
|
||||
"""Ensures that 'python setup.py fdist' creates a plain folder structure."""
|
||||
|
||||
user_options = [
|
||||
('dest-path=', None, 'addon installation path'),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
super().initialize_options()
|
||||
self.dest_path = None # path that will contain the addon
|
||||
|
||||
def run(self):
|
||||
super().run()
|
||||
|
||||
# dist_files is a list of tuples ('bdist', 'any', 'filepath')
|
||||
filepath = self.distribution.dist_files[0][2]
|
||||
|
||||
# if dest_path is not specified use the filename as the dest_path (minus the .zip)
|
||||
assert filepath.endswith('.zip')
|
||||
target_folder = self.dest_path or filepath[:-4]
|
||||
|
||||
print('Unzipping the package on {}.'.format(target_folder))
|
||||
|
||||
with zipfile.ZipFile(filepath, 'r') as zip_ref:
|
||||
zip_ref.extractall(target_folder)
|
||||
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
class BlenderAddonInstall(install):
|
||||
"""Ensures the module is placed at the root of the zip file."""
|
||||
@@ -168,16 +226,17 @@ class AvoidEggInfo(install_egg_info):
|
||||
|
||||
setup(
|
||||
cmdclass={'bdist': BlenderAddonBdist,
|
||||
'fdist': BlenderAddonFdist,
|
||||
'install': BlenderAddonInstall,
|
||||
'install_egg_info': AvoidEggInfo,
|
||||
'wheels': BuildWheels},
|
||||
name='blender_cloud',
|
||||
description='The Blender Cloud addon allows browsing the Blender Cloud from Blender.',
|
||||
version='1.0.0',
|
||||
version='1.7.3',
|
||||
author='Sybren A. Stüvel',
|
||||
author_email='sybren@stuvel.eu',
|
||||
packages=find_packages('.'),
|
||||
data_files=[('blender_cloud', ['README.md']),
|
||||
data_files=[('blender_cloud', ['README.md', 'README-flamenco.md', 'CHANGELOG.md']),
|
||||
('blender_cloud/icons', glob.glob('blender_cloud/icons/*'))],
|
||||
scripts=[],
|
||||
url='https://developer.blender.org/diffusion/BCA/',
|
||||
|
90
tests/test_path_replacement.py
Normal file
90
tests/test_path_replacement.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Unittests for blender_cloud.utils.
|
||||
|
||||
This unittest requires bpy to be importable, so build Blender as a module and install it
|
||||
into your virtualenv. See https://stuvel.eu/files/bconf2016/#/10 for notes how.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import pathlib
|
||||
import unittest.mock
|
||||
|
||||
import pillarsdk.utils
|
||||
|
||||
from blender_cloud.flamenco import sdk
|
||||
|
||||
|
||||
class PathReplacementTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.test_manager = sdk.Manager({
|
||||
'_created': datetime.datetime(2017, 5, 31, 15, 12, 32, tzinfo=pillarsdk.utils.utc),
|
||||
'_etag': 'c39942ee4bcc4658adcc21e4bcdfb0ae',
|
||||
'_id': '592edd609837732a2a272c62',
|
||||
'_updated': datetime.datetime(2017, 6, 8, 14, 51, 3, tzinfo=pillarsdk.utils.utc),
|
||||
'description': 'Manager formerly known as "testman"',
|
||||
'job_types': {'sleep': {'vars': {}}},
|
||||
'name': '<script>alert("this is a manager")</script>',
|
||||
'owner': '592edd609837732a2a272c63',
|
||||
'path_replacement': {'job_storage': {'darwin': '/Volume/shared',
|
||||
'linux': '/shared',
|
||||
'windows': 's:/'},
|
||||
'render': {'darwin': '/Volume/render/',
|
||||
'linux': '/render/',
|
||||
'windows': 'r:/'},
|
||||
'longrender': {'darwin': '/Volume/render/long',
|
||||
'linux': '/render/long',
|
||||
'windows': 'r:/long'},
|
||||
},
|
||||
'projects': ['58cbdd5698377322d95eb55e'],
|
||||
'service_account': '592edd609837732a2a272c60',
|
||||
'stats': {'nr_of_workers': 3},
|
||||
'url': 'http://192.168.3.101:8083/',
|
||||
'user_groups': ['58cbdd5698377322d95eb55f'],
|
||||
'variables': {'blender': {'darwin': '/opt/myblenderbuild/blender',
|
||||
'linux': '/home/sybren/workspace/build_linux/bin/blender '
|
||||
'--enable-new-depsgraph --factory-startup',
|
||||
'windows': 'c:/temp/blender.exe'}}}
|
||||
)
|
||||
|
||||
def test_linux(self):
|
||||
# (expected result, input)
|
||||
test_paths = [
|
||||
('/doesnotexistreally', '/doesnotexistreally'),
|
||||
('{render}/agent327/scenes/A_01_03_B', '/render/agent327/scenes/A_01_03_B'),
|
||||
('{job_storage}/render/agent327/scenes', '/shared/render/agent327/scenes'),
|
||||
('{longrender}/agent327/scenes', '/render/long/agent327/scenes'),
|
||||
]
|
||||
|
||||
self._do_test(test_paths, 'linux', pathlib.PurePosixPath)
|
||||
|
||||
def test_windows(self):
|
||||
# (expected result, input)
|
||||
test_paths = [
|
||||
('c:/doesnotexistreally', 'c:/doesnotexistreally'),
|
||||
('c:/some/path', r'c:\some\path'),
|
||||
('{render}/agent327/scenes/A_01_03_B', r'R:\agent327\scenes\A_01_03_B'),
|
||||
('{render}/agent327/scenes/A_01_03_B', r'r:\agent327\scenes\A_01_03_B'),
|
||||
('{render}/agent327/scenes/A_01_03_B', r'r:/agent327/scenes/A_01_03_B'),
|
||||
('{job_storage}/render/agent327/scenes', 's:/render/agent327/scenes'),
|
||||
('{longrender}/agent327/scenes', 'r:/long/agent327/scenes'),
|
||||
]
|
||||
|
||||
self._do_test(test_paths, 'windows', pathlib.PureWindowsPath)
|
||||
|
||||
def test_darwin(self):
|
||||
# (expected result, input)
|
||||
test_paths = [
|
||||
('/Volume/doesnotexistreally', '/Volume/doesnotexistreally'),
|
||||
('{render}/agent327/scenes/A_01_03_B', r'/Volume/render/agent327/scenes/A_01_03_B'),
|
||||
('{job_storage}/render/agent327/scenes', '/Volume/shared/render/agent327/scenes'),
|
||||
('{longrender}/agent327/scenes', '/Volume/render/long/agent327/scenes'),
|
||||
]
|
||||
|
||||
self._do_test(test_paths, 'darwin', pathlib.PurePosixPath)
|
||||
|
||||
def _do_test(self, test_paths, platform, pathclass):
|
||||
self.test_manager.PurePlatformPath = pathclass
|
||||
with unittest.mock.patch('sys.platform', platform):
|
||||
for expected_result, input_path in test_paths:
|
||||
self.assertEqual(expected_result,
|
||||
self.test_manager.replace_path(pathclass(input_path)),
|
||||
'for input %s on platform %s' % (input_path, platform))
|
25
tests/test_utils.py
Normal file
25
tests/test_utils.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Unittests for blender_cloud.utils."""
|
||||
|
||||
import pathlib
|
||||
import unittest
|
||||
|
||||
from blender_cloud import utils
|
||||
|
||||
|
||||
class FindInPathTest(unittest.TestCase):
|
||||
def test_nonexistant_path(self):
|
||||
path = pathlib.Path('/doesnotexistreally')
|
||||
self.assertFalse(path.exists())
|
||||
self.assertIsNone(utils.find_in_path(path, 'jemoeder.blend'))
|
||||
|
||||
def test_really_breadth_first(self):
|
||||
"""A depth-first test might find dir_a1/dir_a2/dir_a3/find_me.txt first."""
|
||||
|
||||
path = pathlib.Path(__file__).parent / 'test_really_breadth_first'
|
||||
found = utils.find_in_path(path, 'find_me.txt')
|
||||
self.assertEqual(path / 'dir_b1' / 'dir_b2' / 'find_me.txt', found)
|
||||
|
||||
def test_nonexistant_file(self):
|
||||
path = pathlib.Path(__file__).parent / 'test_really_breadth_first'
|
||||
found = utils.find_in_path(path, 'do_not_find_me.txt')
|
||||
self.assertEqual(None, found)
|
19
update_version.sh
Executable file
19
update_version.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
VERSION="${1/version-}"
|
||||
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Usage: $0 new-version" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BL_INFO_VER=$(echo "$VERSION" | sed 's/\./, /g')
|
||||
|
||||
sed "s/version='[^']*'/version='$VERSION'/" -i setup.py
|
||||
sed "s/'version': ([^)]*)/'version': ($BL_INFO_VER)/" -i blender_cloud/__init__.py
|
||||
|
||||
git diff
|
||||
echo
|
||||
echo "Don't forget to commit and tag:"
|
||||
echo git commit -m \'Bumped version to $VERSION\' setup.py blender_cloud/__init__.py
|
||||
echo git tag -a version-$VERSION -m \'Tagged version $VERSION\'
|
Reference in New Issue
Block a user