Compare commits
416 Commits
last-befor
...
last-py27
Author | SHA1 | Date | |
---|---|---|---|
4570b4637b | |||
e381ca774e | |||
6765276519 | |||
eca4ade9d8 | |||
2e00e81b30 | |||
0a86ad357f | |||
02f736dcc4 | |||
d8eae2c44b | |||
c98cd82b3f | |||
69b3e06b1c | |||
7b9fef2fc8 | |||
528887b1a6 | |||
10df0af355 | |||
ae38bec218 | |||
3ef0bf6761 | |||
1e56ca5227 | |||
b8ad0cd18f | |||
e049ab0a08 | |||
089b0f1535 | |||
bf0ebce81a | |||
eb02fa5eec | |||
bc6f526b72 | |||
0e07cb2b1d | |||
2b528f0fff | |||
9b90070191 | |||
68fcae64ae | |||
e3fc5d1b9b | |||
85988bb8c9 | |||
85dba5e9e9 | |||
350577033c | |||
eb5fb4eb09 | |||
181cbc07d6 | |||
784c1ed0bb | |||
604d6c1a07 | |||
129ec94608 | |||
01cc52bba9 | |||
8115bc2ad5 | |||
a100d73a8b | |||
11197e669c | |||
7a6e1d3386 | |||
6bb491aadc | |||
bc456f9387 | |||
1beb3ca488 | |||
0190cf944a | |||
5f590a2063 | |||
c284156723 | |||
7219c5ca72 | |||
86b5c1b242 | |||
ffdffdeb96 | |||
455bfdfc49 | |||
2ad3c8a7ed | |||
08f3467406 | |||
2bae7c2fef | |||
b6b517688e | |||
f2942a20fe | |||
d9b56f485b | |||
f06b3c94eb | |||
742a16fb9f | |||
e72f02711d | |||
48ebdf11b3 | |||
e43f99593a | |||
476e7be826 | |||
8654503f5a | |||
98295305fd | |||
e43b0cbccf | |||
462ef953bc | |||
29629f6647 | |||
e3fc265408 | |||
a67774c6e8 | |||
dea6dd5242 | |||
a79ca80f28 | |||
7fb94a86e8 | |||
9783711818 | |||
bf5b457141 | |||
3fbee33369 | |||
2c71168677 | |||
51d7eed164 | |||
64ce091f11 | |||
4a5d553bc8 | |||
f75c43055f | |||
f2d9df8b61 | |||
c73ad07e83 | |||
a93d9be632 | |||
89689db96e | |||
01e79f8565 | |||
5866cc54aa | |||
e8b03de444 | |||
1e1d9e57e7 | |||
5617f89c99 | |||
b30aba2463 | |||
c8ae748bd6 | |||
3e6a9909da | |||
d35f2aa8c9 | |||
32ac0a64fb | |||
3125ff75ca | |||
62b518c81e | |||
8865ae02e4 | |||
44c4182a86 | |||
f59086c025 | |||
081a7f96ca | |||
b1a0e1e3b6 | |||
6910d3da49 | |||
b9c3d6b0fb | |||
f99869f57e | |||
85bfbdb5e3 | |||
ee20926233 | |||
f732f1e08b | |||
f899fb48ce | |||
4f071260f7 | |||
6ed772278c | |||
![]() |
b04ed3f5b6 | ||
738c3e82d7 | |||
9e952b0436 | |||
6ef2c5ca0d | |||
c025aa3aac | |||
a41bda6859 | |||
9210285089 | |||
f1661f7efb | |||
8959fac415 | |||
9b469cee7d | |||
bbb3f5c7c0 | |||
3139ba5368 | |||
df810c0c4e | |||
29b4ebd09a | |||
76a5d9c9e1 | |||
fe848525b1 | |||
24ede3f4ee | |||
756e3d2d89 | |||
684afb8cd5 | |||
52a1602a7c | |||
ce6020702e | |||
76f2367e66 | |||
5f0092cfa1 | |||
4b84e6506b | |||
a13937e500 | |||
b9e27a4cbf | |||
3b694a91af | |||
f651ece343 | |||
595a690473 | |||
1702b40812 | |||
9612e99806 | |||
c17993418c | |||
60e43c368d | |||
2f3e5a513b | |||
54fccfc3ad | |||
b6b62babd2 | |||
ad3f2c0119 | |||
dc70705b1e | |||
ab375b2126 | |||
fcecc75c3d | |||
15be184816 | |||
45328b629b | |||
cce45b96e1 | |||
edad85ee34 | |||
116ed9f08a | |||
7391f40cba | |||
e54bfa4520 | |||
d272896787 | |||
724fe6ceeb | |||
865259d40e | |||
65b554986c | |||
fb6e326a14 | |||
920a1de263 | |||
0da4e3bafc | |||
89be4efe6f | |||
ba591da2fc | |||
4c6a51c501 | |||
76174046ad | |||
7b79270481 | |||
a1dca29382 | |||
c1427cf6a2 | |||
a89ada7c2f | |||
84a86a690e | |||
0a0db88701 | |||
27bad1be8a | |||
e98b158886 | |||
324d500edb | |||
ef326a2193 | |||
5ade876784 | |||
738c20b36b | |||
3c6642d879 | |||
e43405a349 | |||
f394907dd2 | |||
e117432f3d | |||
295c821b9d | |||
865f777152 | |||
36e7cc56ef | |||
aa3340ddbe | |||
4280e0175b | |||
cc562a9fb1 | |||
4ec3268a23 | |||
80601f75ed | |||
9ac2f38042 | |||
4bd334e403 | |||
ae859d3ea7 | |||
e69393e95e | |||
2cc21583d9 | |||
0ac0f482ac | |||
f30cdd5246 | |||
48157254c1 | |||
3fc08bcafd | |||
ff94cc57a3 | |||
cf28e5a3f4 | |||
6ea7386bd3 | |||
90c6fdc377 | |||
2a5b3dc53e | |||
dabc1a44b8 | |||
eb1561136b | |||
d24677992e | |||
e143b9cb72 | |||
6faea83372 | |||
d36dcad773 | |||
a385a373b9 | |||
8fa135d52e | |||
6f460ee127 | |||
8cc2cfb189 | |||
c672bc07fe | |||
656944f1ce | |||
ab9d5c1793 | |||
fe4d70c0d1 | |||
964e807721 | |||
3cf71a365f | |||
5bd2c101fe | |||
aef7754537 | |||
d50d206e77 | |||
28223159e7 | |||
a38e053c1a | |||
62ac12deff | |||
64ece74404 | |||
bffbbad323 | |||
8fb64c38d6 | |||
f72890cc59 | |||
0929a80f2b | |||
ff7101c3fe | |||
590d075735 | |||
fa3406b7d0 | |||
5805f4eb2a | |||
53cbe78ec1 | |||
f4b5e49c26 | |||
499af03473 | |||
51c2c1d568 | |||
144c5b8894 | |||
c9d7da3a42 | |||
b59fcb5cba | |||
7be8e9b967 | |||
041722f71a | |||
457a63ddcb | |||
5677ae8532 | |||
8d99f8fc2e | |||
09a21510a2 | |||
73641ecc8a | |||
b1da6de46e | |||
fceac01505 | |||
8b64f9140b | |||
e1678537c0 | |||
d8686e5a14 | |||
e71e6a7b32 | |||
8352fafd21 | |||
db2680be81 | |||
c456696600 | |||
ad1816c617 | |||
8d3c4745aa | |||
3afeeaccd0 | |||
7f4ad85781 | |||
ea2be0f13d | |||
eea934a86a | |||
f2f66d7a6c | |||
aca54d76e0 | |||
646ab58395 | |||
d99ddca410 | |||
87f3093503 | |||
ae723b1655 | |||
0a606ae15c | |||
6af3dfdb51 | |||
eca3f47eb8 | |||
8043caf187 | |||
aa953f76a1 | |||
10ecb2158e | |||
96c9e12f7f | |||
7c310e12ef | |||
26aa155b9e | |||
0146b568c0 | |||
ade62033ba | |||
8aab88bdc2 | |||
f4b34f1d02 | |||
4eb8319697 | |||
5dd49fa5dd | |||
6429c3df21 | |||
3561cb61c6 | |||
a52c263733 | |||
c9d4a06486 | |||
8a35fe3a16 | |||
620107fdc0 | |||
14a8be6329 | |||
77b17e31e0 | |||
2028891e7a | |||
abe0c28a99 | |||
c71186f318 | |||
4e0db78ff1 | |||
d1610da5f9 | |||
73ec464292 | |||
0de8772c98 | |||
91b116aa74 | |||
6537332b26 | |||
001d310d76 | |||
e2921c8da8 | |||
d1d48553e5 | |||
dd58d4ad04 | |||
b429933737 | |||
2cc22f4f90 | |||
e2236864e7 | |||
74d86487a9 | |||
d7fe196af0 | |||
dcef372e4f | |||
7931428312 | |||
407aefb9ad | |||
c64fbf61ba | |||
063023c69a | |||
2c7d2e7dfd | |||
7968c6ca37 | |||
91e3ec659f | |||
e0f92b6185 | |||
0bf07b4ba4 | |||
dfe398458b | |||
30215bf87c | |||
0f23ee7a08 | |||
9514066893 | |||
cd8707207b | |||
7f9f89853d | |||
78824c9c2a | |||
40896fc70b | |||
7598ad0b57 | |||
4b11aab429 | |||
ad91e37d14 | |||
df8afb8b14 | |||
55b2911665 | |||
1680475d92 | |||
d116439b57 | |||
56c669874d | |||
76b0f5fc46 | |||
68666f0650 | |||
4313284dab | |||
9e6b998c50 | |||
b2e8711ac4 | |||
f03566a10f | |||
2730a7a2b2 | |||
f21b708085 | |||
8a6cd96198 | |||
4ae36a0dc3 | |||
eac49ab810 | |||
49c08cba10 | |||
cf30bb5d62 | |||
ab5a4a6b6c | |||
e04b2ef7ea | |||
52ca2adc19 | |||
29a0bed39b | |||
634ad86fa1 | |||
574178cffc | |||
305d9b44ec | |||
3bb55fd3db | |||
486686f1f9 | |||
52cc61b143 | |||
e4763d809b | |||
4cf7fde5bf | |||
e58f29a9d0 | |||
fa050da8e2 | |||
3d9b9e40d4 | |||
4cf779e040 | |||
a0cc76259e | |||
54bc0e87ce | |||
cb5128907c | |||
34921ece76 | |||
5ebec42e6d | |||
4529d0597b | |||
3f9d519753 | |||
3039aef7d3 | |||
cb84e6f0b7 | |||
88b5537df4 | |||
88dd574797 | |||
8d6df947c8 | |||
b9b993fe4a | |||
2c62bd4016 | |||
06ed6af2a9 | |||
32c130ed93 | |||
634b233685 | |||
eb7b875122 | |||
c4a3601939 | |||
225f9ae054 | |||
163db3f2b8 | |||
dd6fc8bde4 | |||
ff692d287c | |||
1fe86fa000 | |||
04c9c010f0 | |||
b6c623cca8 | |||
9b2a419d9b | |||
d5cf3b8246 | |||
0d3ed3af2c | |||
751a321aa6 | |||
207d821564 | |||
d7b71e38e8 | |||
07691db874 | |||
dcbefc33ae | |||
751c692e6a | |||
00a34e7e24 | |||
2e0ba4c6cd | |||
9d1181330b | |||
f3bf380bb7 | |||
27eee380d2 | |||
57620fd49a | |||
becf7e6381 | |||
c440465cf1 | |||
25fb4ce842 | |||
9c59b06ab9 | |||
bd9ce3182d | |||
4398d250a7 | |||
2c5dc34ea2 |
6
.arcconfig
Normal file
6
.arcconfig
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"project_id" : "Pillar Server",
|
||||
"conduit_uri" : "https://developer.blender.org/",
|
||||
"git.default-relative-commit" : "origin/master",
|
||||
"arc.land.update.default" : "rebase"
|
||||
}
|
18
.gitignore
vendored
18
.gitignore
vendored
@@ -6,14 +6,24 @@
|
||||
*.ropeproject*
|
||||
*.swp
|
||||
|
||||
/pillar/config_local.py
|
||||
config_local.py
|
||||
|
||||
.ropeproject/*
|
||||
|
||||
/pillar/application/static/storage/
|
||||
/build
|
||||
/.cache
|
||||
/pillar/pillar.egg-info/
|
||||
/pillar/google_app.json
|
||||
/*.egg-info/
|
||||
profile.stats
|
||||
/dump/
|
||||
/.eggs
|
||||
|
||||
/node_modules
|
||||
/.sass-cache
|
||||
*.css.map
|
||||
*.js.map
|
||||
|
||||
pillar/web/static/assets/css/*.css
|
||||
pillar/web/static/assets/js/*.min.js
|
||||
pillar/web/static/storage/
|
||||
pillar/web/static/uploads/
|
||||
pillar/web/templates/
|
||||
|
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -ex
|
||||
|
||||
mongodump -h localhost:27018 -d eve --out dump/$(date +'%Y-%m-%d-%H%M') --excludeCollection tokens
|
||||
mongodump -h localhost:27018 -d cloud --out dump/$(date +'%Y-%m-%d-%H%M') --excludeCollection tokens
|
||||
|
59
deploy.sh
59
deploy.sh
@@ -1,57 +1,8 @@
|
||||
#!/bin/bash -e
|
||||
#!/bin/bash
|
||||
|
||||
# Deploys the current production branch to the production machine.
|
||||
|
||||
PROJECT_NAME="pillar"
|
||||
DOCKER_NAME="pillar"
|
||||
REMOTE_ROOT="/data/git/${PROJECT_NAME}"
|
||||
|
||||
SSH="ssh -o ClearAllForwardings=yes cloud.blender.org"
|
||||
ROOT="$(dirname "$(readlink -f "$0")")"
|
||||
cd ${ROOT}
|
||||
|
||||
# Check that we're on production branch.
|
||||
if [ $(git rev-parse --abbrev-ref HEAD) != "production" ]; then
|
||||
echo "You are NOT on the production branch, refusing to deploy." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check that production branch has been pushed.
|
||||
if [ -n "$(git log origin/production..production --oneline)" ]; then
|
||||
echo "WARNING: not all changes to the production branch have been pushed."
|
||||
echo "Press [ENTER] to continue deploying current origin/production, CTRL+C to abort."
|
||||
read dummy
|
||||
fi
|
||||
|
||||
# SSH to cloud to pull all files in
|
||||
echo "==================================================================="
|
||||
echo "UPDATING FILES ON ${PROJECT_NAME}"
|
||||
${SSH} git -C ${REMOTE_ROOT} fetch origin production
|
||||
${SSH} git -C ${REMOTE_ROOT} log origin/production..production --oneline
|
||||
${SSH} git -C ${REMOTE_ROOT} merge --ff-only origin/production
|
||||
|
||||
# Update the virtualenv
|
||||
${SSH} -t docker exec ${DOCKER_NAME} /data/venv/bin/pip install -U -r ${REMOTE_ROOT}/requirements.txt --exists-action w
|
||||
|
||||
# Notify Bugsnag of this new deploy.
|
||||
echo
|
||||
echo "==================================================================="
|
||||
GIT_REVISION=$(${SSH} git -C ${REMOTE_ROOT} describe --always)
|
||||
echo "Notifying Bugsnag of this new deploy of revision ${GIT_REVISION}."
|
||||
BUGSNAG_API_KEY=$(${SSH} python -c "\"import sys; sys.path.append('${REMOTE_ROOT}/${PROJECT_NAME}'); import config_local; print(config_local.BUGSNAG_API_KEY)\"")
|
||||
curl --data "apiKey=${BUGSNAG_API_KEY}&revision=${GIT_REVISION}" https://notify.bugsnag.com/deploy
|
||||
echo
|
||||
|
||||
# Wait for [ENTER] to restart the server
|
||||
echo
|
||||
echo "==================================================================="
|
||||
echo "NOTE: If you want to edit config_local.py on the server, do so now."
|
||||
echo "NOTE: Press [ENTER] to continue and restart the server process."
|
||||
echo "==========================================================================="
|
||||
echo "Dummy deploy script for people with a 'git pp' alias to push to production."
|
||||
echo "Run deploy script on your server project."
|
||||
echo "When done, press [ENTER] to stop this script."
|
||||
read dummy
|
||||
${SSH} docker exec ${DOCKER_NAME} kill -HUP 1
|
||||
echo "Server process restarted"
|
||||
|
||||
echo
|
||||
echo "==================================================================="
|
||||
echo "Deploy of ${PROJECT_NAME} is done."
|
||||
echo "==================================================================="
|
||||
|
@@ -1,17 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
echo $DIR
|
||||
|
||||
if [[ $1 == 'pro' || $1 == 'dev' ]]; then
|
||||
# Copy requirements.txt into pro folder
|
||||
cp ../requirements.txt $1/requirements.txt
|
||||
# Build image
|
||||
docker build -t armadillica/pillar_$1 $1
|
||||
# Remove requirements.txt
|
||||
rm $1/requirements.txt
|
||||
|
||||
else
|
||||
echo "POS. Your options are 'pro' or 'dev'"
|
||||
fi
|
@@ -1,48 +0,0 @@
|
||||
FROM ubuntu:14.04
|
||||
MAINTAINER Francesco Siddi <francesco@blender.org>
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python \
|
||||
python-dev \
|
||||
python-pip \
|
||||
vim \
|
||||
nano \
|
||||
zlib1g-dev \
|
||||
libjpeg-dev \
|
||||
python-crypto \
|
||||
python-openssl \
|
||||
libssl-dev \
|
||||
libffi-dev \
|
||||
software-properties-common \
|
||||
git
|
||||
|
||||
RUN add-apt-repository ppa:mc3man/trusty-media \
|
||||
&& apt-get update && apt-get install -y \
|
||||
ffmpeg
|
||||
|
||||
RUN mkdir -p /data/git/pillar \
|
||||
&& mkdir -p /data/storage/shared \
|
||||
&& mkdir -p /data/storage/pillar \
|
||||
&& mkdir -p /data/config \
|
||||
&& mkdir -p /data/storage/logs
|
||||
|
||||
RUN pip install virtualenv \
|
||||
&& virtualenv /data/venv
|
||||
|
||||
ENV PIP_PACKAGES_VERSION = 2
|
||||
ADD requirements.txt /requirements.txt
|
||||
|
||||
RUN . /data/venv/bin/activate && pip install -r /requirements.txt
|
||||
|
||||
VOLUME /data/git/pillar
|
||||
VOLUME /data/config
|
||||
VOLUME /data/storage/shared
|
||||
VOLUME /data/storage/pillar
|
||||
|
||||
ENV MONGO_HOST mongo_pillar
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
ADD runserver.sh /runserver.sh
|
||||
|
||||
ENTRYPOINT ["bash", "/runserver.sh"]
|
@@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. /data/venv/bin/activate && python /data/git/pillar/pillar/manage.py runserver
|
@@ -1,47 +0,0 @@
|
||||
<VirtualHost *:80>
|
||||
# The ServerName directive sets the request scheme, hostname and port that
|
||||
# the server uses to identify itself. This is used when creating
|
||||
# redirection URLs. In the context of virtual hosts, the ServerName
|
||||
# specifies what hostname must appear in the request's Host: header to
|
||||
# match this virtual host. For the default virtual host (this file) this
|
||||
# value is not decisive as it is used as a last resort host regardless.
|
||||
# However, you must set it for any further virtual host explicitly.
|
||||
#ServerName 127.0.0.1
|
||||
|
||||
# EnableSendfile on
|
||||
XSendFile on
|
||||
XSendFilePath /data/storage/pillar
|
||||
|
||||
ServerAdmin webmaster@localhost
|
||||
DocumentRoot /var/www/html
|
||||
|
||||
# Available loglevels: trace8, ..., trace1, debug, info, notice, warn,
|
||||
# error, crit, alert, emerg.
|
||||
# It is also possible to configure the loglevel for particular
|
||||
# modules, e.g.
|
||||
#LogLevel info ssl:warn
|
||||
|
||||
ErrorLog ${APACHE_LOG_DIR}/error.log
|
||||
CustomLog ${APACHE_LOG_DIR}/access.log combined
|
||||
|
||||
# For most configuration files from conf-available/, which are
|
||||
# enabled or disabled at a global level, it is possible to
|
||||
# include a line for only one particular virtual host. For example the
|
||||
# following line enables the CGI configuration for this host only
|
||||
# after it has been globally disabled with "a2disconf".
|
||||
#Include conf-available/serve-cgi-bin.conf
|
||||
|
||||
WSGIDaemonProcess pillar
|
||||
WSGIPassAuthorization On
|
||||
|
||||
WSGIScriptAlias / /data/git/pillar/pillar/runserver.wsgi \
|
||||
process-group=pillar application-group=%{GLOBAL}
|
||||
|
||||
<Directory /data/git/pillar/pillar>
|
||||
<Files runserver.wsgi>
|
||||
Require all granted
|
||||
</Files>
|
||||
</Directory>
|
||||
</VirtualHost>
|
||||
|
||||
# vim: syntax=apache ts=4 sw=4 sts=4 sr noet
|
@@ -1,61 +0,0 @@
|
||||
FROM ubuntu:14.04
|
||||
MAINTAINER Francesco Siddi <francesco@blender.org>
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python \
|
||||
python-dev \
|
||||
python-pip \
|
||||
vim \
|
||||
nano \
|
||||
zlib1g-dev \
|
||||
libjpeg-dev \
|
||||
python-crypto \
|
||||
python-openssl \
|
||||
libssl-dev \
|
||||
libffi-dev \
|
||||
software-properties-common \
|
||||
apache2-mpm-event \
|
||||
libapache2-mod-wsgi \
|
||||
libapache2-mod-xsendfile \
|
||||
git
|
||||
|
||||
RUN add-apt-repository ppa:mc3man/trusty-media \
|
||||
&& apt-get update && apt-get install -y \
|
||||
ffmpeg
|
||||
|
||||
RUN mkdir -p /data/git/pillar \
|
||||
&& mkdir -p /data/storage/shared \
|
||||
&& mkdir -p /data/storage/pillar \
|
||||
&& mkdir -p /data/config \
|
||||
&& mkdir -p /data/storage/logs
|
||||
|
||||
ENV APACHE_RUN_USER www-data
|
||||
ENV APACHE_RUN_GROUP www-data
|
||||
ENV APACHE_LOG_DIR /var/log/apache2
|
||||
ENV APACHE_PID_FILE /var/run/apache2.pid
|
||||
ENV APACHE_RUN_DIR /var/run/apache2
|
||||
ENV APACHE_LOCK_DIR /var/lock/apache2
|
||||
|
||||
RUN mkdir -p $APACHE_RUN_DIR $APACHE_LOCK_DIR $APACHE_LOG_DIR
|
||||
|
||||
RUN pip install virtualenv \
|
||||
&& virtualenv /data/venv
|
||||
|
||||
ENV PIP_PACKAGES_VERSION = 2
|
||||
ADD requirements.txt /requirements.txt
|
||||
|
||||
RUN . /data/venv/bin/activate \
|
||||
&& pip install -r /requirements.txt
|
||||
|
||||
VOLUME /data/git/pillar
|
||||
VOLUME /data/config
|
||||
VOLUME /data/storage/shared
|
||||
VOLUME /data/storage/pillar
|
||||
|
||||
ENV MONGO_HOST mongo_pillar
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
ADD 000-default.conf /etc/apache2/sites-available/000-default.conf
|
||||
|
||||
CMD ["/usr/sbin/apache2", "-D", "FOREGROUND"]
|
19
gulp
Executable file
19
gulp
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
GULP=./node_modules/.bin/gulp
|
||||
|
||||
function install() {
|
||||
npm install
|
||||
touch $GULP # installer doesn't always touch this after a build, so we do.
|
||||
}
|
||||
|
||||
# Rebuild Gulp if missing or outdated.
|
||||
[ -e $GULP ] || install
|
||||
[ gulpfile.js -nt $GULP ] && install
|
||||
|
||||
if [ "$1" == "watch" ]; then
|
||||
# Treat "gulp watch" as "gulp && gulp watch"
|
||||
$GULP
|
||||
fi
|
||||
|
||||
exec $GULP "$@"
|
111
gulpfile.js
Normal file
111
gulpfile.js
Normal file
@@ -0,0 +1,111 @@
|
||||
var argv = require('minimist')(process.argv.slice(2));
|
||||
var autoprefixer = require('gulp-autoprefixer');
|
||||
var chmod = require('gulp-chmod');
|
||||
var concat = require('gulp-concat');
|
||||
var gulp = require('gulp');
|
||||
var gulpif = require('gulp-if');
|
||||
var jade = require('gulp-jade');
|
||||
var livereload = require('gulp-livereload');
|
||||
var plumber = require('gulp-plumber');
|
||||
var rename = require('gulp-rename');
|
||||
var sass = require('gulp-sass');
|
||||
var sourcemaps = require('gulp-sourcemaps');
|
||||
var uglify = require('gulp-uglify');
|
||||
var cache = require('gulp-cached');
|
||||
|
||||
var enabled = {
|
||||
uglify: argv.production,
|
||||
maps: argv.production,
|
||||
failCheck: argv.production,
|
||||
prettyPug: !argv.production,
|
||||
liveReload: !argv.production
|
||||
};
|
||||
|
||||
|
||||
/* CSS */
|
||||
gulp.task('styles', function() {
|
||||
gulp.src('src/styles/**/*.sass')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
.pipe(sass({
|
||||
outputStyle: 'compressed'}
|
||||
))
|
||||
.pipe(autoprefixer("last 3 versions"))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(gulp.dest('pillar/web/static/assets/css'))
|
||||
.pipe(gulpif(enabled.liveReload, livereload()));
|
||||
});
|
||||
|
||||
|
||||
/* Templates - Jade */
|
||||
gulp.task('templates', function() {
|
||||
gulp.src('src/templates/**/*.jade')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(cache('templating'))
|
||||
.pipe(jade({
|
||||
pretty: enabled.prettyPug
|
||||
}))
|
||||
.pipe(gulp.dest('pillar/web/templates/'))
|
||||
.pipe(gulpif(enabled.liveReload, livereload()));
|
||||
});
|
||||
|
||||
|
||||
/* Individual Uglified Scripts */
|
||||
gulp.task('scripts', function() {
|
||||
gulp.src('src/scripts/*.js')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(cache('scripting'))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
.pipe(gulpif(enabled.uglify, uglify()))
|
||||
.pipe(rename({suffix: '.min'}))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(chmod(644))
|
||||
.pipe(gulp.dest('pillar/web/static/assets/js/'))
|
||||
.pipe(gulpif(enabled.liveReload, livereload()));
|
||||
});
|
||||
|
||||
|
||||
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js */
|
||||
/* Since it's always loaded, it's only for functions that we want site-wide */
|
||||
gulp.task('scripts_concat_tutti', function() {
|
||||
gulp.src('src/scripts/tutti/**/*.js')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
.pipe(concat("tutti.min.js"))
|
||||
.pipe(gulpif(enabled.uglify, uglify()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(chmod(644))
|
||||
.pipe(gulp.dest('pillar/web/static/assets/js/'))
|
||||
.pipe(gulpif(enabled.liveReload, livereload()));
|
||||
});
|
||||
|
||||
gulp.task('scripts_concat_markdown', function() {
|
||||
gulp.src('src/scripts/markdown/**/*.js')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
.pipe(concat("markdown.min.js"))
|
||||
.pipe(gulpif(enabled.uglify, uglify()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(chmod(644))
|
||||
.pipe(gulp.dest('pillar/web/static/assets/js/'))
|
||||
.pipe(gulpif(enabled.liveReload, livereload()));
|
||||
});
|
||||
|
||||
|
||||
// While developing, run 'gulp watch'
|
||||
gulp.task('watch',function() {
|
||||
// Only listen for live reloads if ran with --livereload
|
||||
if (argv.livereload){
|
||||
livereload.listen();
|
||||
}
|
||||
|
||||
gulp.watch('src/styles/**/*.sass',['styles']);
|
||||
gulp.watch('src/templates/**/*.jade',['templates']);
|
||||
gulp.watch('src/scripts/*.js',['scripts']);
|
||||
gulp.watch('src/scripts/tutti/**/*.js',['scripts_concat_tutti']);
|
||||
gulp.watch('src/scripts/markdown/**/*.js',['scripts_concat_markdown']);
|
||||
});
|
||||
|
||||
|
||||
// Run 'gulp' to build everything at once
|
||||
gulp.task('default', ['styles', 'templates', 'scripts', 'scripts_concat_tutti', 'scripts_concat_markdown']);
|
25
package.json
Normal file
25
package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "pillar",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/armadillica/pillar.git"
|
||||
},
|
||||
"author": "Blender Institute",
|
||||
"license": "GPL",
|
||||
"devDependencies": {
|
||||
"gulp": "~3.9.1",
|
||||
"gulp-autoprefixer": "~2.3.1",
|
||||
"gulp-cached": "~1.1.0",
|
||||
"gulp-chmod": "~1.3.0",
|
||||
"gulp-concat": "~2.6.0",
|
||||
"gulp-if": "^2.0.1",
|
||||
"gulp-jade": "~1.1.0",
|
||||
"gulp-livereload": "~3.8.1",
|
||||
"gulp-plumber": "~1.1.0",
|
||||
"gulp-rename": "~1.2.2",
|
||||
"gulp-sass": "~2.3.1",
|
||||
"gulp-sourcemaps": "~1.6.0",
|
||||
"gulp-uglify": "~1.5.3",
|
||||
"minimist": "^1.2.0"
|
||||
}
|
||||
}
|
564
pillar/__init__.py
Normal file
564
pillar/__init__.py
Normal file
@@ -0,0 +1,564 @@
|
||||
"""Pillar server."""
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import json
|
||||
import logging
|
||||
import logging.config
|
||||
import subprocess
|
||||
import tempfile
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import jinja2
|
||||
from eve import Eve
|
||||
import flask
|
||||
from flask import render_template, request
|
||||
from flask.templating import TemplateNotFound
|
||||
|
||||
from pillar.api import custom_field_validation
|
||||
from pillar.api.utils import authentication
|
||||
import pillar.web.jinja
|
||||
|
||||
from . import api
|
||||
from . import web
|
||||
from . import auth
|
||||
|
||||
empty_settings = {
|
||||
# Use a random URL prefix when booting Eve, to ensure that any
|
||||
# Flask route that's registered *before* we load our own config
|
||||
# won't interfere with Pillar itself.
|
||||
'URL_PREFIX': 'pieQui4vah9euwieFai6naivaV4thahchoochiiwazieBe5o',
|
||||
'DOMAIN': {},
|
||||
}
|
||||
|
||||
|
||||
class PillarServer(Eve):
|
||||
def __init__(self, app_root, **kwargs):
|
||||
kwargs.setdefault('validator', custom_field_validation.ValidateCustomFields)
|
||||
super(PillarServer, self).__init__(settings=empty_settings, **kwargs)
|
||||
|
||||
# mapping from extension name to extension object.
|
||||
self.pillar_extensions = collections.OrderedDict()
|
||||
self.pillar_extensions_template_paths = [] # list of paths
|
||||
|
||||
self.app_root = os.path.abspath(app_root)
|
||||
self._load_flask_config()
|
||||
self._config_logging()
|
||||
|
||||
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
|
||||
self.log.info('Creating new instance from %r', self.app_root)
|
||||
|
||||
self._config_tempdirs()
|
||||
self._config_git()
|
||||
self._config_bugsnag()
|
||||
self._config_google_cloud_storage()
|
||||
|
||||
self.algolia_index_users = None
|
||||
self.algolia_index_nodes = None
|
||||
self.algolia_client = None
|
||||
self._config_algolia()
|
||||
|
||||
self.encoding_service_client = None
|
||||
self._config_encoding_backend()
|
||||
|
||||
try:
|
||||
self.settings = os.environ['EVE_SETTINGS']
|
||||
except KeyError:
|
||||
self.settings = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
'api', 'eve_settings.py')
|
||||
# self.settings = self.config['EVE_SETTINGS_PATH']
|
||||
self.load_config()
|
||||
|
||||
# Configure authentication
|
||||
self.login_manager = auth.config_login_manager(self)
|
||||
self.oauth_blender_id = auth.config_oauth_login(self)
|
||||
|
||||
self._config_caching()
|
||||
|
||||
self.before_first_request(self.setup_db_indices)
|
||||
|
||||
def _load_flask_config(self):
|
||||
# Load configuration from different sources, to make it easy to override
|
||||
# settings with secrets, as well as for development & testing.
|
||||
self.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.py'), silent=False)
|
||||
self.config.from_pyfile(os.path.join(self.app_root, 'config.py'), silent=True)
|
||||
self.config.from_pyfile(os.path.join(self.app_root, 'config_local.py'), silent=True)
|
||||
from_envvar = os.environ.get('PILLAR_CONFIG')
|
||||
if from_envvar:
|
||||
# Don't use from_envvar, as we want different behaviour. If the envvar
|
||||
# is not set, it's fine (i.e. silent=True), but if it is set and the
|
||||
# configfile doesn't exist, it should error out (i.e. silent=False).
|
||||
self.config.from_pyfile(from_envvar, silent=False)
|
||||
|
||||
def _config_logging(self):
|
||||
# Configure logging
|
||||
logging.config.dictConfig(self.config['LOGGING'])
|
||||
log = logging.getLogger(__name__)
|
||||
if self.config['DEBUG']:
|
||||
log.info('Pillar starting, debug=%s', self.config['DEBUG'])
|
||||
|
||||
def _config_tempdirs(self):
|
||||
storage_dir = self.config['STORAGE_DIR']
|
||||
if not os.path.exists(storage_dir):
|
||||
self.log.info('Creating storage directory %r', storage_dir)
|
||||
os.makedirs(storage_dir)
|
||||
|
||||
# Set the TMP environment variable to manage where uploads are stored.
|
||||
# These are all used by tempfile.mkstemp(), but we don't knwow in whic
|
||||
# order. As such, we remove all used variables but the one we set.
|
||||
tempfile.tempdir = storage_dir
|
||||
os.environ['TMP'] = storage_dir
|
||||
os.environ.pop('TEMP', None)
|
||||
os.environ.pop('TMPDIR', None)
|
||||
|
||||
def _config_git(self):
|
||||
# Get the Git hash
|
||||
try:
|
||||
git_cmd = ['git', '-C', self.app_root, 'describe', '--always']
|
||||
description = subprocess.check_output(git_cmd)
|
||||
self.config['GIT_REVISION'] = description.strip()
|
||||
except (subprocess.CalledProcessError, OSError) as ex:
|
||||
self.log.warning('Unable to run "git describe" to get git revision: %s', ex)
|
||||
self.config['GIT_REVISION'] = 'unknown'
|
||||
self.log.info('Git revision %r', self.config['GIT_REVISION'])
|
||||
|
||||
def _config_bugsnag(self):
|
||||
# Configure Bugsnag
|
||||
if self.config.get('TESTING') or not self.config.get('BUGSNAG_API_KEY'):
|
||||
self.log.info('Bugsnag NOT configured.')
|
||||
return
|
||||
|
||||
import bugsnag
|
||||
from bugsnag.flask import handle_exceptions
|
||||
from bugsnag.handlers import BugsnagHandler
|
||||
|
||||
bugsnag.configure(
|
||||
api_key=self.config['BUGSNAG_API_KEY'],
|
||||
project_root="/data/git/pillar/pillar",
|
||||
)
|
||||
handle_exceptions(self)
|
||||
|
||||
bs_handler = BugsnagHandler()
|
||||
bs_handler.setLevel(logging.ERROR)
|
||||
self.log.addHandler(bs_handler)
|
||||
|
||||
def _config_google_cloud_storage(self):
|
||||
# Google Cloud project
|
||||
try:
|
||||
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = \
|
||||
self.config['GCLOUD_APP_CREDENTIALS']
|
||||
except KeyError:
|
||||
raise SystemExit('GCLOUD_APP_CREDENTIALS configuration is missing')
|
||||
|
||||
# Storage backend (GCS)
|
||||
try:
|
||||
os.environ['GCLOUD_PROJECT'] = self.config['GCLOUD_PROJECT']
|
||||
except KeyError:
|
||||
raise SystemExit('GCLOUD_PROJECT configuration value is missing')
|
||||
|
||||
def _config_algolia(self):
|
||||
# Algolia search
|
||||
if self.config['SEARCH_BACKEND'] != 'algolia':
|
||||
return
|
||||
|
||||
from algoliasearch import algoliasearch
|
||||
|
||||
client = algoliasearch.Client(self.config['ALGOLIA_USER'],
|
||||
self.config['ALGOLIA_API_KEY'])
|
||||
self.algolia_client = client
|
||||
self.algolia_index_users = client.init_index(self.config['ALGOLIA_INDEX_USERS'])
|
||||
self.algolia_index_nodes = client.init_index(self.config['ALGOLIA_INDEX_NODES'])
|
||||
|
||||
def _config_encoding_backend(self):
|
||||
# Encoding backend
|
||||
if self.config['ENCODING_BACKEND'] != 'zencoder':
|
||||
return
|
||||
|
||||
from zencoder import Zencoder
|
||||
self.encoding_service_client = Zencoder(self.config['ZENCODER_API_KEY'])
|
||||
|
||||
def _config_caching(self):
|
||||
from flask_cache import Cache
|
||||
self.cache = Cache(self)
|
||||
|
||||
def load_extension(self, pillar_extension, url_prefix):
|
||||
from .extension import PillarExtension
|
||||
|
||||
if not isinstance(pillar_extension, PillarExtension):
|
||||
if self.config.get('DEBUG'):
|
||||
for cls in type(pillar_extension).mro():
|
||||
self.log.error('class %42r (%i) is %42r (%i): %s',
|
||||
cls, id(cls), PillarExtension, id(PillarExtension),
|
||||
cls is PillarExtension)
|
||||
raise AssertionError('Extension has wrong type %r' % type(pillar_extension))
|
||||
self.log.info('Loading extension %s', pillar_extension.name)
|
||||
|
||||
# Remember this extension, and disallow duplicates.
|
||||
if pillar_extension.name in self.pillar_extensions:
|
||||
raise ValueError('Extension with name %s already loaded', pillar_extension.name)
|
||||
self.pillar_extensions[pillar_extension.name] = pillar_extension
|
||||
|
||||
# Load extension Flask configuration
|
||||
for key, value in pillar_extension.flask_config():
|
||||
self.config.setdefault(key, value)
|
||||
|
||||
# Load extension blueprint(s)
|
||||
for blueprint in pillar_extension.blueprints():
|
||||
if blueprint.url_prefix:
|
||||
blueprint_prefix = url_prefix + blueprint.url_prefix
|
||||
else:
|
||||
blueprint_prefix = url_prefix
|
||||
self.register_blueprint(blueprint, url_prefix=blueprint_prefix)
|
||||
|
||||
# Load template paths
|
||||
tpath = pillar_extension.template_path
|
||||
if tpath:
|
||||
self.log.info('Extension %s: adding template path %s',
|
||||
pillar_extension.name, tpath)
|
||||
if not os.path.exists(tpath):
|
||||
raise ValueError('Template path %s for extension %s does not exist.',
|
||||
tpath, pillar_extension.name)
|
||||
self.pillar_extensions_template_paths.append(tpath)
|
||||
|
||||
# Load extension Eve settings
|
||||
eve_settings = pillar_extension.eve_settings()
|
||||
|
||||
if 'DOMAIN' in eve_settings:
|
||||
pillar_ext_prefix = pillar_extension.name + '_'
|
||||
pillar_url_prefix = pillar_extension.name + '/'
|
||||
for key, collection in eve_settings['DOMAIN'].items():
|
||||
assert key.startswith(pillar_ext_prefix), \
|
||||
'Eve collection names of %s MUST start with %r' % \
|
||||
(pillar_extension.name, pillar_ext_prefix)
|
||||
url = key.replace(pillar_ext_prefix, pillar_url_prefix)
|
||||
|
||||
collection.setdefault('datasource', {}).setdefault('source', key)
|
||||
collection.setdefault('url', url)
|
||||
|
||||
self.config['DOMAIN'].update(eve_settings['DOMAIN'])
|
||||
|
||||
def _config_jinja_env(self):
|
||||
# Start with the extensions...
|
||||
paths_list = [
|
||||
jinja2.FileSystemLoader(path)
|
||||
for path in reversed(self.pillar_extensions_template_paths)
|
||||
]
|
||||
|
||||
# ...then load Pillar paths.
|
||||
pillar_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
parent_theme_path = os.path.join(pillar_dir, 'web', 'templates')
|
||||
current_path = os.path.join(self.app_root, 'templates')
|
||||
paths_list += [
|
||||
jinja2.FileSystemLoader(current_path),
|
||||
jinja2.FileSystemLoader(parent_theme_path),
|
||||
self.jinja_loader
|
||||
]
|
||||
# Set up a custom loader, so that Jinja searches for a theme file first
|
||||
# in the current theme dir, and if it fails it searches in the default
|
||||
# location.
|
||||
custom_jinja_loader = jinja2.ChoiceLoader(paths_list)
|
||||
self.jinja_loader = custom_jinja_loader
|
||||
|
||||
pillar.web.jinja.setup_jinja_env(self.jinja_env)
|
||||
|
||||
def _config_static_dirs(self):
|
||||
# Setup static folder for the instanced app
|
||||
self.static_folder = os.path.join(self.app_root, 'static')
|
||||
|
||||
# Setup static folder for Pillar
|
||||
pillar_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
pillar_static_folder = os.path.join(pillar_dir, 'web', 'static')
|
||||
self.register_static_file_endpoint('/static/pillar', 'static_pillar', pillar_static_folder)
|
||||
|
||||
# Setup static folders for extensions
|
||||
for name, ext in self.pillar_extensions.items():
|
||||
if not ext.static_path:
|
||||
continue
|
||||
self.register_static_file_endpoint('/static/%s' % name,
|
||||
'static_%s' % name,
|
||||
ext.static_path)
|
||||
|
||||
def register_static_file_endpoint(self, url_prefix, endpoint_name, static_folder):
|
||||
from pillar.web.static import PillarStaticFile
|
||||
|
||||
view_func = PillarStaticFile.as_view(endpoint_name, static_folder=static_folder)
|
||||
self.add_url_rule('%s/<path:filename>' % url_prefix, view_func=view_func)
|
||||
|
||||
def process_extensions(self):
|
||||
# Re-initialise Eve after we allowed Pillar submodules to be loaded.
|
||||
# EVIL STARTS HERE. It just copies part of the Eve.__init__() method.
|
||||
self.set_defaults()
|
||||
self.validate_config()
|
||||
self.validate_domain_struct()
|
||||
|
||||
self._init_url_rules()
|
||||
self._init_media_endpoint()
|
||||
self._init_schema_endpoint()
|
||||
|
||||
if self.config['OPLOG'] is True:
|
||||
self._init_oplog()
|
||||
|
||||
domain_copy = copy.deepcopy(self.config['DOMAIN'])
|
||||
for resource, settings in domain_copy.items():
|
||||
self.register_resource(resource, settings)
|
||||
|
||||
self.register_error_handlers()
|
||||
# EVIL ENDS HERE. No guarantees, though.
|
||||
|
||||
self.finish_startup()
|
||||
|
||||
def register_error_handlers(self):
|
||||
super(PillarServer, self).register_error_handlers()
|
||||
|
||||
# Register error handlers per code.
|
||||
for code in (403, 404, 412, 500):
|
||||
self.register_error_handler(code, self.pillar_error_handler)
|
||||
|
||||
# Register error handlers per exception.
|
||||
from pillarsdk import exceptions as sdk_exceptions
|
||||
|
||||
sdk_handlers = [
|
||||
(sdk_exceptions.UnauthorizedAccess, self.handle_sdk_unauth),
|
||||
(sdk_exceptions.ForbiddenAccess, self.handle_sdk_forbidden),
|
||||
(sdk_exceptions.ResourceNotFound, self.handle_sdk_resource_not_found),
|
||||
(sdk_exceptions.ResourceInvalid, self.handle_sdk_resource_invalid),
|
||||
(sdk_exceptions.MethodNotAllowed, self.handle_sdk_method_not_allowed),
|
||||
(sdk_exceptions.PreconditionFailed, self.handle_sdk_precondition_failed),
|
||||
]
|
||||
|
||||
for (eclass, handler) in sdk_handlers:
|
||||
self.register_error_handler(eclass, handler)
|
||||
|
||||
def handle_sdk_unauth(self, error):
|
||||
"""Global exception handling for pillarsdk UnauthorizedAccess
|
||||
Currently the api is fully locked down so we need to constantly
|
||||
check for user authorization.
|
||||
"""
|
||||
|
||||
return flask.redirect(flask.url_for('users.login'))
|
||||
|
||||
def handle_sdk_forbidden(self, error):
|
||||
self.log.info('Forwarding ForbiddenAccess exception to client: %s', error, exc_info=True)
|
||||
error.code = 403
|
||||
return self.pillar_error_handler(error)
|
||||
|
||||
def handle_sdk_resource_not_found(self, error):
|
||||
self.log.info('Forwarding ResourceNotFound exception to client: %s', error, exc_info=True)
|
||||
|
||||
content = getattr(error, 'content', None)
|
||||
if content:
|
||||
try:
|
||||
error_content = json.loads(content)
|
||||
except ValueError:
|
||||
error_content = None
|
||||
|
||||
if error_content and error_content.get('_deleted', False):
|
||||
# This document used to exist, but doesn't any more. Let the user know.
|
||||
doc_name = error_content.get('name')
|
||||
node_type = error_content.get('node_type')
|
||||
if node_type:
|
||||
node_type = node_type.replace('_', ' ').title()
|
||||
if doc_name:
|
||||
description = u'%s "%s" was deleted.' % (node_type, doc_name)
|
||||
else:
|
||||
description = u'This %s was deleted.' % (node_type, )
|
||||
else:
|
||||
if doc_name:
|
||||
description = u'"%s" was deleted.' % doc_name
|
||||
else:
|
||||
description = None
|
||||
|
||||
error.description = description
|
||||
|
||||
error.code = 404
|
||||
return self.pillar_error_handler(error)
|
||||
|
||||
def handle_sdk_precondition_failed(self, error):
|
||||
self.log.info('Forwarding PreconditionFailed exception to client: %s', error)
|
||||
|
||||
error.code = 412
|
||||
return self.pillar_error_handler(error)
|
||||
|
||||
def handle_sdk_resource_invalid(self, error):
|
||||
self.log.info('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
|
||||
|
||||
# Raising a Werkzeug 422 exception doens't work, as Flask turns it into a 500.
|
||||
return 'The submitted data could not be validated.', 422
|
||||
|
||||
def handle_sdk_method_not_allowed(self, error):
|
||||
"""Forwards 405 Method Not Allowed to the client.
|
||||
|
||||
This is actually not fair, as a 405 between Pillar and Pillar-Web
|
||||
doesn't imply that the request the client did on Pillar-Web is not
|
||||
allowed. However, it does allow us to debug this if it happens, by
|
||||
watching for 405s in the browser.
|
||||
"""
|
||||
from flask import request
|
||||
|
||||
self.log.info('Forwarding MethodNotAllowed exception to client: %s', error, exc_info=True)
|
||||
self.log.info('HTTP Referer is %r', request.referrer)
|
||||
|
||||
# Raising a Werkzeug 405 exception doens't work, as Flask turns it into a 500.
|
||||
return 'The requested HTTP method is not allowed on this URL.', 405
|
||||
|
||||
def pillar_error_handler(self, error_ob):
|
||||
|
||||
# 'error_ob' can be any exception. If it's not a Werkzeug exception,
|
||||
# handle it as a 500.
|
||||
if not hasattr(error_ob, 'code'):
|
||||
error_ob.code = 500
|
||||
if not hasattr(error_ob, 'description'):
|
||||
error_ob.description = str(error_ob)
|
||||
|
||||
if request.full_path.startswith('/%s/' % self.config['URL_PREFIX']):
|
||||
from pillar.api.utils import jsonify
|
||||
# This is an API request, so respond in JSON.
|
||||
return jsonify({
|
||||
'_status': 'ERR',
|
||||
'_code': error_ob.code,
|
||||
'_message': error_ob.description,
|
||||
}, status=error_ob.code)
|
||||
|
||||
# See whether we should return an embedded page or a regular one.
|
||||
if request.is_xhr:
|
||||
fname = 'errors/%i_embed.html' % error_ob.code
|
||||
else:
|
||||
fname = 'errors/%i.html' % error_ob.code
|
||||
|
||||
# Also handle the case where we didn't create a template for this error.
|
||||
try:
|
||||
return render_template(fname, description=error_ob.description), error_ob.code
|
||||
except TemplateNotFound:
|
||||
self.log.warning('Error template %s for code %i not found',
|
||||
fname, error_ob.code)
|
||||
return render_template('errors/500.html'), error_ob.code
|
||||
|
||||
def finish_startup(self):
|
||||
self.log.info('Using MongoDB database %r', self.config['MONGO_DBNAME'])
|
||||
|
||||
api.setup_app(self)
|
||||
web.setup_app(self)
|
||||
authentication.setup_app(self)
|
||||
|
||||
for ext in self.pillar_extensions.itervalues():
|
||||
self.log.info('Setting up extension %s', ext.name)
|
||||
ext.setup_app(self)
|
||||
|
||||
self._config_jinja_env()
|
||||
self._config_static_dirs()
|
||||
|
||||
# Only enable this when debugging.
|
||||
# self._list_routes()
|
||||
|
||||
def setup_db_indices(self):
|
||||
"""Adds missing database indices.
|
||||
|
||||
This does NOT drop and recreate existing indices,
|
||||
nor does it reconfigure existing indices.
|
||||
If you want that, drop them manually first.
|
||||
"""
|
||||
|
||||
self.log.debug('Adding any missing database indices.')
|
||||
|
||||
import pymongo
|
||||
|
||||
db = self.data.driver.db
|
||||
|
||||
coll = db['tokens']
|
||||
coll.create_index([('user', pymongo.ASCENDING)])
|
||||
coll.create_index([('token', pymongo.ASCENDING)])
|
||||
|
||||
coll = db['notifications']
|
||||
coll.create_index([('user', pymongo.ASCENDING)])
|
||||
|
||||
coll = db['activities-subscriptions']
|
||||
coll.create_index([('context_object', pymongo.ASCENDING)])
|
||||
|
||||
coll = db['nodes']
|
||||
# This index is used for queries on project, and for queries on
|
||||
# the combination (project, node type).
|
||||
coll.create_index([('project', pymongo.ASCENDING),
|
||||
('node_type', pymongo.ASCENDING)])
|
||||
coll.create_index([('parent', pymongo.ASCENDING)])
|
||||
coll.create_index([('short_code', pymongo.ASCENDING)],
|
||||
sparse=True, unique=True)
|
||||
|
||||
def register_api_blueprint(self, blueprint, url_prefix):
|
||||
# TODO: use Eve config variable instead of hard-coded '/api'
|
||||
self.register_blueprint(blueprint, url_prefix='/api' + url_prefix)
|
||||
|
||||
def make_header(self, username, subclient_id=''):
|
||||
"""Returns a Basic HTTP Authentication header value."""
|
||||
import base64
|
||||
|
||||
return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
|
||||
|
||||
def post_internal(self, resource, payl=None, skip_validation=False):
|
||||
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
|
||||
from eve.methods.post import post_internal
|
||||
|
||||
with self.test_request_context(method='POST', path='%s/%s' % (self.api_prefix, resource)):
|
||||
return post_internal(resource, payl=payl, skip_validation=skip_validation)
|
||||
|
||||
def put_internal(self, resource, payload=None, concurrency_check=False,
|
||||
skip_validation=False, **lookup):
|
||||
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
|
||||
from eve.methods.put import put_internal
|
||||
|
||||
path = '%s/%s/%s' % (self.api_prefix, resource, lookup['_id'])
|
||||
with self.test_request_context(method='PUT', path=path):
|
||||
return put_internal(resource, payload=payload, concurrency_check=concurrency_check,
|
||||
skip_validation=skip_validation, **lookup)
|
||||
|
||||
def patch_internal(self, resource, payload=None, concurrency_check=False,
|
||||
skip_validation=False, **lookup):
|
||||
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
|
||||
from eve.methods.patch import patch_internal
|
||||
|
||||
path = '%s/%s/%s' % (self.api_prefix, resource, lookup['_id'])
|
||||
with self.test_request_context(method='PATCH', path=path):
|
||||
return patch_internal(resource, payload=payload, concurrency_check=concurrency_check,
|
||||
skip_validation=skip_validation, **lookup)
|
||||
|
||||
def _list_routes(self):
|
||||
from pprint import pprint
|
||||
from flask import url_for
|
||||
|
||||
def has_no_empty_params(rule):
|
||||
defaults = rule.defaults if rule.defaults is not None else ()
|
||||
arguments = rule.arguments if rule.arguments is not None else ()
|
||||
return len(defaults) >= len(arguments)
|
||||
|
||||
links = []
|
||||
with self.test_request_context():
|
||||
for rule in self.url_map.iter_rules():
|
||||
# Filter out rules we can't navigate to in a browser
|
||||
# and rules that require parameters
|
||||
if "GET" in rule.methods and has_no_empty_params(rule):
|
||||
url = url_for(rule.endpoint, **(rule.defaults or {}))
|
||||
links.append((url, rule.endpoint))
|
||||
|
||||
links.sort(key=lambda t: len(t[0]) + 100 * ('/api/' in t[0]))
|
||||
|
||||
pprint(links)
|
||||
|
||||
def db(self):
|
||||
"""Returns the MongoDB database.
|
||||
|
||||
:rtype: flask_pymongo.PyMongo
|
||||
"""
|
||||
|
||||
return self.data.driver.db
|
||||
|
||||
def extension_sidebar_links(self, project):
|
||||
"""Returns the sidebar links for the given projects.
|
||||
|
||||
:returns: HTML as a string for the sidebar.
|
||||
"""
|
||||
|
||||
if not project:
|
||||
return ''
|
||||
|
||||
return jinja2.Markup(''.join(ext.sidebar_links(project)
|
||||
for ext in self.pillar_extensions.values()))
|
15
pillar/api/__init__.py
Normal file
15
pillar/api/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
def setup_app(app):
|
||||
from . import encoding, blender_id, projects, local_auth, file_storage
|
||||
from . import users, nodes, latest, blender_cloud, service, activities
|
||||
|
||||
encoding.setup_app(app, url_prefix='/encoding')
|
||||
blender_id.setup_app(app, url_prefix='/blender_id')
|
||||
projects.setup_app(app, api_prefix='/p')
|
||||
local_auth.setup_app(app, url_prefix='/auth')
|
||||
file_storage.setup_app(app, url_prefix='/storage')
|
||||
latest.setup_app(app, url_prefix='/latest')
|
||||
blender_cloud.setup_app(app, url_prefix='/bcloud')
|
||||
users.setup_app(app, api_prefix='/users')
|
||||
service.setup_app(app, api_prefix='/service')
|
||||
nodes.setup_app(app, url_prefix='/nodes')
|
||||
activities.setup_app(app)
|
@@ -1,7 +1,9 @@
|
||||
from flask import g
|
||||
from flask import current_app
|
||||
from eve.methods.post import post_internal
|
||||
from application.modules.users import gravatar
|
||||
import logging
|
||||
|
||||
from flask import g, request, current_app
|
||||
from pillar.api.utils import gravatar
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def notification_parse(notification):
|
||||
@@ -15,6 +17,11 @@ def notification_parse(notification):
|
||||
if activity is None or activity['object_type'] != 'node':
|
||||
return
|
||||
node = nodes_collection.find_one({'_id': activity['object']})
|
||||
if not node:
|
||||
# This can happen when a notification is generated and then the
|
||||
# node is deleted.
|
||||
return
|
||||
|
||||
# Initial support only for node_type comments
|
||||
if node['node_type'] != 'comment':
|
||||
return
|
||||
@@ -111,7 +118,7 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
|
||||
|
||||
# If no subscription exists, we create one
|
||||
if not subscription:
|
||||
post_internal('activities-subscriptions', lookup)
|
||||
current_app.post_internal('activities-subscriptions', lookup)
|
||||
|
||||
|
||||
def activity_object_add(actor_user_id, verb, object_type, object_id,
|
||||
@@ -133,22 +140,82 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
|
||||
subscriptions = notification_get_subscriptions(
|
||||
context_object_type, context_object_id, actor_user_id)
|
||||
|
||||
if subscriptions.count() > 0:
|
||||
activity = dict(
|
||||
actor_user=actor_user_id,
|
||||
verb=verb,
|
||||
object_type=object_type,
|
||||
object=object_id,
|
||||
context_object_type=context_object_type,
|
||||
context_object=context_object_id
|
||||
)
|
||||
if subscriptions.count() == 0:
|
||||
return
|
||||
|
||||
activity = post_internal('activities', activity)
|
||||
if activity[3] != 201:
|
||||
# If creation failed for any reason, do not create a any notifcation
|
||||
return
|
||||
for subscription in subscriptions:
|
||||
notification = dict(
|
||||
user=subscription['user'],
|
||||
activity=activity[0]['_id'])
|
||||
post_internal('notifications', notification)
|
||||
info, status = register_activity(actor_user_id, verb, object_type, object_id,
|
||||
context_object_type, context_object_id)
|
||||
if status != 201:
|
||||
# If creation failed for any reason, do not create a any notifcation
|
||||
return
|
||||
|
||||
for subscription in subscriptions:
|
||||
notification = dict(
|
||||
user=subscription['user'],
|
||||
activity=info['_id'])
|
||||
current_app.post_internal('notifications', notification)
|
||||
|
||||
|
||||
def register_activity(actor_user_id, verb, object_type, object_id,
|
||||
context_object_type, context_object_id,
|
||||
project_id=None,
|
||||
node_type=None):
|
||||
"""Registers an activity.
|
||||
|
||||
This works using the following pattern:
|
||||
|
||||
ACTOR -> VERB -> OBJECT -> CONTEXT
|
||||
|
||||
:param actor_user_id: id of the user who is changing the object
|
||||
:param verb: the action on the object ('commented', 'replied')
|
||||
:param object_type: hardcoded name, see database schema
|
||||
:param object_id: object id, to be traced with object_type
|
||||
:param context_object_type: the type of the context object, like 'project' or 'node',
|
||||
see database schema
|
||||
:param context_object_id:
|
||||
:param project_id: optional project ID to make the activity easily queryable
|
||||
per project.
|
||||
:param node_type: optional, node type of the node receiving the activity.
|
||||
|
||||
:returns: tuple (info, status_code), where a successful operation should have
|
||||
status_code=201. If it is not 201, a warning is logged.
|
||||
"""
|
||||
|
||||
activity = {
|
||||
'actor_user': actor_user_id,
|
||||
'verb': verb,
|
||||
'object_type': object_type,
|
||||
'object': object_id,
|
||||
'context_object_type': context_object_type,
|
||||
'context_object': context_object_id}
|
||||
if project_id:
|
||||
activity['project'] = project_id
|
||||
if node_type:
|
||||
activity['node_type'] = node_type
|
||||
|
||||
info, _, _, status_code = current_app.post_internal('activities', activity)
|
||||
|
||||
if status_code != 201:
|
||||
log.error('register_activity: code %i creating activity %s: %s',
|
||||
status_code, activity, info)
|
||||
else:
|
||||
log.info('register_activity: user %s "%s" on %s %s, context %s %s',
|
||||
actor_user_id, verb, object_type, object_id,
|
||||
context_object_type, context_object_id)
|
||||
return info, status_code
|
||||
|
||||
|
||||
def before_returning_item_notifications(response):
|
||||
if request.args.get('parse'):
|
||||
notification_parse(response)
|
||||
|
||||
|
||||
def before_returning_resource_notifications(response):
|
||||
for item in response['_items']:
|
||||
if request.args.get('parse'):
|
||||
notification_parse(item)
|
||||
|
||||
|
||||
def setup_app(app):
|
||||
app.on_fetched_item_notifications += before_returning_item_notifications
|
||||
app.on_fetched_resource_notifications += before_returning_resource_notifications
|
@@ -1,17 +1,15 @@
|
||||
import copy
|
||||
import logging
|
||||
import datetime
|
||||
|
||||
import datetime
|
||||
from bson import ObjectId, tz_util
|
||||
from eve.methods.post import post_internal
|
||||
from eve.methods.put import put_internal
|
||||
from eve.methods.get import get
|
||||
from flask import Blueprint, g, current_app, request
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils import authentication, authorization
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from application.modules import projects
|
||||
from application import utils
|
||||
from application.utils import authentication, authorization
|
||||
from pillar.api.projects import utils as proj_utils
|
||||
|
||||
blueprint = Blueprint('blender_cloud.home_project', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -73,7 +71,7 @@ def create_blender_sync_node(project_id, admin_group_id, user_id):
|
||||
}
|
||||
}
|
||||
|
||||
r, _, _, status = post_internal('nodes', node)
|
||||
r, _, _, status = current_app.post_internal('nodes', node)
|
||||
if status != 201:
|
||||
log.warning('Unable to create Blender Sync node for home project %s: %s',
|
||||
project_id, r)
|
||||
@@ -109,9 +107,9 @@ def create_home_project(user_id, write_access):
|
||||
project = deleted_proj
|
||||
else:
|
||||
log.debug('User %s does not have a deleted project', user_id)
|
||||
project = projects.create_new_project(project_name='Home',
|
||||
user_id=ObjectId(user_id),
|
||||
overrides=overrides)
|
||||
project = proj_utils.create_new_project(project_name='Home',
|
||||
user_id=ObjectId(user_id),
|
||||
overrides=overrides)
|
||||
|
||||
# Re-validate the authentication token, so that the put_internal call sees the
|
||||
# new group created for the project.
|
||||
@@ -124,10 +122,10 @@ def create_home_project(user_id, write_access):
|
||||
|
||||
# Set up the correct node types. No need to set permissions for them,
|
||||
# as the inherited project permissions are fine.
|
||||
from manage_extra.node_types.group import node_type_group
|
||||
from manage_extra.node_types.asset import node_type_asset
|
||||
# from manage_extra.node_types.text import node_type_text
|
||||
from manage_extra.node_types.comment import node_type_comment
|
||||
from pillar.api.node_types.group import node_type_group
|
||||
from pillar.api.node_types.asset import node_type_asset
|
||||
# from pillar.api.node_types.text import node_type_text
|
||||
from pillar.api.node_types.comment import node_type_comment
|
||||
|
||||
# For non-subscribers: take away write access from the admin group,
|
||||
# and grant it to certain node types.
|
||||
@@ -147,8 +145,8 @@ def create_home_project(user_id, write_access):
|
||||
node_type_comment,
|
||||
]
|
||||
|
||||
result, _, _, status = put_internal('projects', utils.remove_private_keys(project),
|
||||
_id=project['_id'])
|
||||
result, _, _, status = current_app.put_internal('projects', utils.remove_private_keys(project),
|
||||
_id=project['_id'])
|
||||
if status != 200:
|
||||
log.error('Unable to update home project %s for user %s: %s',
|
||||
project['_id'], user_id, result)
|
||||
@@ -166,7 +164,7 @@ def create_home_project(user_id, write_access):
|
||||
def assign_permissions(node_type, subscriber_methods, world_methods):
|
||||
"""Assigns permissions to the node type object.
|
||||
|
||||
:param node_type: a node type from manage_extra.node_types.
|
||||
:param node_type: a node type from pillar.api.node_types.
|
||||
:type node_type: dict
|
||||
:param subscriber_methods: allowed HTTP methods for users of role 'subscriber',
|
||||
'demo' and 'admin'.
|
||||
@@ -177,7 +175,7 @@ def assign_permissions(node_type, subscriber_methods, world_methods):
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
from application.modules import service
|
||||
from pillar.api import service
|
||||
|
||||
nt_with_perms = copy.deepcopy(node_type)
|
||||
|
||||
@@ -391,7 +389,7 @@ def user_changed_role(sender, user):
|
||||
|
||||
user_id = user['_id']
|
||||
if not has_home_project(user_id):
|
||||
log.debug('User %s does not have a home project', user_id)
|
||||
log.debug('User %s does not have a home project, not changing access permissions', user_id)
|
||||
return
|
||||
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
@@ -414,12 +412,12 @@ def user_changed_role(sender, user):
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
||||
|
||||
app.on_insert_nodes += check_home_project_nodes_permissions
|
||||
app.on_inserted_nodes += mark_parents_as_updated
|
||||
app.on_updated_nodes += mark_parent_as_updated
|
||||
app.on_replaced_nodes += mark_parent_as_updated
|
||||
|
||||
from application.modules import service
|
||||
from pillar.api import service
|
||||
service.signal_user_changed_role.connect(user_changed_role)
|
@@ -1,16 +1,15 @@
|
||||
import functools
|
||||
import logging
|
||||
|
||||
from flask import Blueprint, request, current_app, g
|
||||
from eve.methods.get import get
|
||||
from eve.utils import config as eve_config
|
||||
from flask import Blueprint, request, current_app, g
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils.authentication import current_user_id
|
||||
from pillar.api.utils.authorization import require_login
|
||||
from werkzeug.datastructures import MultiDict
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
|
||||
from application import utils
|
||||
from application.utils.authentication import current_user_id
|
||||
from application.utils.authorization import require_login
|
||||
|
||||
FIRST_ADDON_VERSION_WITH_HDRI = (1, 4, 0)
|
||||
TL_PROJECTION = utils.dumps({'name': 1, 'url': 1, 'permissions': 1,})
|
||||
TL_SORT = utils.dumps([('name', 1)])
|
||||
@@ -144,4 +143,4 @@ def setup_app(app, url_prefix):
|
||||
app.on_replace_nodes += sort_by_image_width
|
||||
app.on_insert_nodes += sort_nodes_by_image_width
|
||||
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
@@ -5,18 +5,15 @@ with Blender ID.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import datetime
|
||||
|
||||
from bson import tz_util
|
||||
import requests
|
||||
from bson import tz_util
|
||||
from flask import Blueprint, request, current_app, jsonify
|
||||
from pillar.api.utils import authentication, remove_private_keys
|
||||
from requests.adapters import HTTPAdapter
|
||||
from flask import Blueprint, request, current_app, abort, jsonify
|
||||
from eve.methods.post import post_internal
|
||||
from eve.methods.put import put_internal
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from application.utils import authentication, remove_private_keys
|
||||
|
||||
blender_id = Blueprint('blender_id', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -99,15 +96,15 @@ def upsert_user(db_user, blender_id_user_id):
|
||||
# Update the existing user
|
||||
attempted_eve_method = 'PUT'
|
||||
db_id = db_user['_id']
|
||||
r, _, _, status = put_internal('users', remove_private_keys(db_user),
|
||||
_id=db_id)
|
||||
r, _, _, status = current_app.put_internal('users', remove_private_keys(db_user),
|
||||
_id=db_id)
|
||||
if status == 422:
|
||||
log.error('Status %i trying to PUT user %s with values %s, should not happen! %s',
|
||||
status, db_id, remove_private_keys(db_user), r)
|
||||
else:
|
||||
# Create a new user, retry for non-unique usernames.
|
||||
attempted_eve_method = 'POST'
|
||||
r, _, _, status = post_internal('users', db_user)
|
||||
r, _, _, status = current_app.post_internal('users', db_user)
|
||||
|
||||
if status not in {200, 201}:
|
||||
log.error('Status %i trying to create user for BlenderID %s with values %s: %s',
|
||||
@@ -238,3 +235,7 @@ def find_user_in_db(blender_id_user_id, user_info):
|
||||
db_user['full_name'] = db_user['username']
|
||||
|
||||
return db_user
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(blender_id, url_prefix=url_prefix)
|
127
pillar/api/custom_field_validation.py
Normal file
127
pillar/api/custom_field_validation.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import logging
|
||||
|
||||
from bson import ObjectId, tz_util
|
||||
from datetime import datetime, tzinfo
|
||||
from eve.io.mongo import Validator
|
||||
from flask import current_app
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ValidateCustomFields(Validator):
|
||||
# TODO: split this into a convert_property(property, schema) and call that from this function.
|
||||
def convert_properties(self, properties, node_schema):
|
||||
"""Converts datetime strings and ObjectId strings to actual Python objects."""
|
||||
|
||||
date_format = current_app.config['RFC1123_DATE_FORMAT']
|
||||
|
||||
for prop in node_schema:
|
||||
if prop not in properties:
|
||||
continue
|
||||
schema_prop = node_schema[prop]
|
||||
prop_type = schema_prop['type']
|
||||
|
||||
if prop_type == 'dict':
|
||||
try:
|
||||
dict_valueschema = schema_prop['schema']
|
||||
properties[prop] = self.convert_properties(properties[prop], dict_valueschema)
|
||||
except KeyError:
|
||||
dict_valueschema = schema_prop['valueschema']
|
||||
self.convert_dict_values(properties[prop], dict_valueschema)
|
||||
|
||||
elif prop_type == 'list':
|
||||
if properties[prop] in ['', '[]']:
|
||||
properties[prop] = []
|
||||
if 'schema' in schema_prop:
|
||||
for k, val in enumerate(properties[prop]):
|
||||
item_schema = {'item': schema_prop['schema']}
|
||||
item_prop = {'item': properties[prop][k]}
|
||||
properties[prop][k] = self.convert_properties(
|
||||
item_prop, item_schema)['item']
|
||||
|
||||
# Convert datetime string to RFC1123 datetime
|
||||
elif prop_type == 'datetime':
|
||||
prop_val = properties[prop]
|
||||
prop_naieve = datetime.strptime(prop_val, date_format)
|
||||
prop_aware = prop_naieve.replace(tzinfo=tz_util.utc)
|
||||
properties[prop] = prop_aware
|
||||
|
||||
elif prop_type == 'objectid':
|
||||
prop_val = properties[prop]
|
||||
if prop_val:
|
||||
properties[prop] = ObjectId(prop_val)
|
||||
else:
|
||||
properties[prop] = None
|
||||
|
||||
return properties
|
||||
|
||||
def convert_dict_values(self, dict_property, dict_valueschema):
|
||||
"""Calls convert_properties() for the values in the dict.
|
||||
|
||||
Only validates the dict values, not the keys. Modifies the given dict in-place.
|
||||
"""
|
||||
|
||||
assert dict_valueschema[u'type'] == u'dict'
|
||||
assert isinstance(dict_property, dict)
|
||||
|
||||
for key, val in dict_property.items():
|
||||
item_schema = {u'item': dict_valueschema}
|
||||
item_prop = {u'item': val}
|
||||
dict_property[key] = self.convert_properties(item_prop, item_schema)[u'item']
|
||||
|
||||
def _validate_valid_properties(self, valid_properties, field, value):
|
||||
from pillar.api.utils import project_get_node_type
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
lookup = {'_id': ObjectId(self.document['project'])}
|
||||
|
||||
project = projects_collection.find_one(lookup, {
|
||||
'node_types.name': 1,
|
||||
'node_types.dyn_schema': 1,
|
||||
})
|
||||
if project is None:
|
||||
log.warning('Unknown project %s, declared by node %s',
|
||||
lookup, self.document.get('_id'))
|
||||
self._error(field, 'Unknown project')
|
||||
return False
|
||||
|
||||
node_type_name = self.document['node_type']
|
||||
node_type = project_get_node_type(project, node_type_name)
|
||||
if node_type is None:
|
||||
log.warning('Project %s has no node type %s, declared by node %s',
|
||||
project, node_type_name, self.document.get('_id'))
|
||||
self._error(field, 'Unknown node type')
|
||||
return False
|
||||
|
||||
try:
|
||||
value = self.convert_properties(value, node_type['dyn_schema'])
|
||||
except Exception as e:
|
||||
log.warning("Error converting form properties", exc_info=True)
|
||||
|
||||
v = self.__class__(schema=node_type['dyn_schema'])
|
||||
val = v.validate(value)
|
||||
|
||||
if val:
|
||||
return True
|
||||
|
||||
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
|
||||
self._error(field, "Error validating properties")
|
||||
|
||||
def _validate_required_after_creation(self, required_after_creation, field, value):
|
||||
"""Makes a value required after creation only.
|
||||
|
||||
Combine "required_after_creation=True" with "required=False" to allow
|
||||
pre-insert hooks to set default values.
|
||||
"""
|
||||
|
||||
if not required_after_creation:
|
||||
# Setting required_after_creation=False is the same as not mentioning this
|
||||
# validator at all.
|
||||
return
|
||||
|
||||
if self._id is None:
|
||||
# This is a creation call, in which case this validator shouldn't run.
|
||||
return
|
||||
|
||||
if not value:
|
||||
self._error(field, "Value is required once the document was created")
|
@@ -2,16 +2,14 @@ import logging
|
||||
|
||||
import datetime
|
||||
import os
|
||||
|
||||
from bson import ObjectId, tz_util
|
||||
from eve.methods.put import put_internal
|
||||
from flask import Blueprint
|
||||
from flask import abort
|
||||
from flask import request
|
||||
from flask import current_app
|
||||
from application import utils
|
||||
from application.utils import skip_when_testing
|
||||
from application.utils.gcs import GoogleCloudStorageBucket
|
||||
from flask import request
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
from pillar.api.utils import skip_when_testing
|
||||
|
||||
encoding = Blueprint('encoding', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -115,7 +113,7 @@ def zencoder_notifications():
|
||||
log.info(' %s: %s', key, output[key])
|
||||
|
||||
file_doc['status'] = 'failed'
|
||||
put_internal('files', file_doc, _id=file_id)
|
||||
current_app.put_internal('files', file_doc, _id=file_id)
|
||||
return "You failed, but that's okay.", 200
|
||||
|
||||
log.info('Zencoder job %s for file %s completed with status %s.', zencoder_job_id, file_id,
|
||||
@@ -171,6 +169,10 @@ def zencoder_notifications():
|
||||
# Force an update of the links on the next load of the file.
|
||||
file_doc['link_expires'] = datetime.datetime.now(tz=tz_util.utc) - datetime.timedelta(days=1)
|
||||
|
||||
put_internal('files', file_doc, _id=file_id)
|
||||
current_app.put_internal('files', file_doc, _id=file_id)
|
||||
|
||||
return '', 204
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_api_blueprint(encoding, url_prefix=url_prefix)
|
@@ -1,5 +1,7 @@
|
||||
import os
|
||||
|
||||
URL_PREFIX = 'api'
|
||||
|
||||
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
|
||||
# (if you omit this line, the API will default to ['GET'] and provide
|
||||
# read-only access to the endpoint).
|
||||
@@ -119,6 +121,7 @@ users_schema = {
|
||||
},
|
||||
'service': {
|
||||
'type': 'dict',
|
||||
'allow_unknown': True,
|
||||
'schema': {
|
||||
'badger': {
|
||||
'type': 'list',
|
||||
@@ -375,14 +378,15 @@ files_schema = {
|
||||
},
|
||||
'length_aggregate_in_bytes': { # Size of file + all variations
|
||||
'type': 'integer',
|
||||
'required': False, # it's computed on the fly anyway, so clients don't need to provide it.
|
||||
'required': False,
|
||||
# it's computed on the fly anyway, so clients don't need to provide it.
|
||||
},
|
||||
'md5': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
|
||||
# Original filename as given by the user, possibly cleaned-up to make it safe.
|
||||
# Original filename as given by the user, cleaned-up to make it safe.
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
@@ -620,7 +624,16 @@ projects_schema = {
|
||||
'permissions': {
|
||||
'type': 'dict',
|
||||
'schema': permissions_embedded_schema
|
||||
}
|
||||
},
|
||||
|
||||
# Properties defined by extensions. Extensions should use their name
|
||||
# (see the PillarExtension.name property) as the key, and are free to
|
||||
# use whatever they want as value (but we suggest a dict for future
|
||||
# extendability).
|
||||
'extension_props': {
|
||||
'type': 'dict',
|
||||
'required': False,
|
||||
},
|
||||
}
|
||||
|
||||
activities_subscriptions_schema = {
|
||||
@@ -664,6 +677,19 @@ activities_schema = {
|
||||
'type': 'objectid',
|
||||
'required': True
|
||||
},
|
||||
'project': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'projects',
|
||||
'field': '_id',
|
||||
},
|
||||
'required': False,
|
||||
},
|
||||
# If the object type is 'node', the node type can be stored here.
|
||||
'node_type': {
|
||||
'type': 'string',
|
||||
'required': False,
|
||||
}
|
||||
}
|
||||
|
||||
notifications_schema = {
|
||||
@@ -692,7 +718,7 @@ users = {
|
||||
'cache_expires': 10,
|
||||
|
||||
'resource_methods': ['GET'],
|
||||
'item_methods': ['GET', 'PUT'],
|
||||
'item_methods': ['GET', 'PUT', 'PATCH'],
|
||||
'public_item_methods': ['GET'],
|
||||
|
||||
# By default don't include the 'auth' field. It can still be obtained
|
||||
@@ -713,6 +739,7 @@ tokens = {
|
||||
|
||||
files = {
|
||||
'resource_methods': ['GET', 'POST'],
|
||||
'item_methods': ['GET', 'PATCH'],
|
||||
'public_methods': ['GET'],
|
||||
'public_item_methods': ['GET'],
|
||||
'schema': files_schema
|
||||
@@ -763,9 +790,9 @@ DOMAIN = {
|
||||
'notifications': notifications
|
||||
}
|
||||
|
||||
MONGO_HOST = os.environ.get('MONGO_HOST', 'localhost')
|
||||
MONGO_PORT = os.environ.get('MONGO_PORT', 27017)
|
||||
MONGO_DBNAME = os.environ.get('MONGO_DBNAME', 'eve')
|
||||
MONGO_HOST = os.environ.get('PILLAR_MONGO_HOST', 'localhost')
|
||||
MONGO_PORT = int(os.environ.get('PILLAR_MONGO_PORT', 27017))
|
||||
MONGO_DBNAME = os.environ.get('PILLAR_MONGO_DBNAME', 'eve')
|
||||
CACHE_EXPIRES = 60
|
||||
HATEOAS = False
|
||||
UPSET_ON_PUT = False # do not create new document on PUT of non-existant URL.
|
@@ -1,37 +1,32 @@
|
||||
import datetime
|
||||
import io
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import tempfile
|
||||
import uuid
|
||||
import io
|
||||
from hashlib import md5
|
||||
|
||||
import os
|
||||
import requests
|
||||
import bson.tz_util
|
||||
import datetime
|
||||
import eve.utils
|
||||
import pymongo
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
from bson import ObjectId
|
||||
from bson.errors import InvalidId
|
||||
from eve.methods.patch import patch_internal
|
||||
from eve.methods.post import post_internal
|
||||
from eve.methods.put import put_internal
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import request
|
||||
from flask import send_from_directory
|
||||
from flask import url_for, helpers
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import make_response
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
from application import utils
|
||||
from application.utils import remove_private_keys, authentication
|
||||
from application.utils.authorization import require_login, user_has_role, user_matches_roles
|
||||
from application.utils.cdn import hash_file_path
|
||||
from application.utils.encoding import Encoder
|
||||
from application.utils.gcs import GoogleCloudStorageBucket
|
||||
from application.utils.imaging import generate_local_thumbnails
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils.imaging import generate_local_thumbnails
|
||||
from pillar.api.utils import remove_private_keys, authentication
|
||||
from pillar.api.utils.authorization import require_login, user_has_role, \
|
||||
user_matches_roles
|
||||
from pillar.api.utils.cdn import hash_file_path
|
||||
from pillar.api.utils.encoding import Encoder
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -93,7 +88,8 @@ def index(file_name=None):
|
||||
|
||||
# Determine & create storage directory
|
||||
folder_name = file_name[:2]
|
||||
file_folder_path = helpers.safe_join(current_app.config['STORAGE_DIR'], folder_name)
|
||||
file_folder_path = helpers.safe_join(current_app.config['STORAGE_DIR'],
|
||||
folder_name)
|
||||
if not os.path.exists(file_folder_path):
|
||||
log.info('Creating folder path %r', file_folder_path)
|
||||
os.mkdir(file_folder_path)
|
||||
@@ -121,8 +117,8 @@ def _process_image(gcs, file_id, local_file, src_file):
|
||||
local_file.name)
|
||||
|
||||
# Send those previews to Google Cloud Storage.
|
||||
log.info('Uploading %i thumbnails for file %s to Google Cloud Storage (GCS)',
|
||||
len(src_file['variations']), file_id)
|
||||
log.info('Uploading %i thumbnails for file %s to Google Cloud Storage '
|
||||
'(GCS)', len(src_file['variations']), file_id)
|
||||
|
||||
# TODO: parallelize this at some point.
|
||||
for variation in src_file['variations']:
|
||||
@@ -141,8 +137,8 @@ def _process_image(gcs, file_id, local_file, src_file):
|
||||
try:
|
||||
os.unlink(variation['local_path'])
|
||||
except OSError:
|
||||
log.warning('Unable to unlink %s, ignoring this but it will need cleanup later.',
|
||||
variation['local_path'])
|
||||
log.warning('Unable to unlink %s, ignoring this but it will need '
|
||||
'cleanup later.', variation['local_path'])
|
||||
|
||||
del variation['local_path']
|
||||
|
||||
@@ -177,17 +173,19 @@ def _process_video(gcs, file_id, local_file, src_file):
|
||||
src_file['variations'].append(file_variation)
|
||||
|
||||
if current_app.config['TESTING']:
|
||||
log.warning('_process_video: NOT sending out encoding job due to TESTING=%r',
|
||||
current_app.config['TESTING'])
|
||||
log.warning('_process_video: NOT sending out encoding job due to '
|
||||
'TESTING=%r', current_app.config['TESTING'])
|
||||
j = type('EncoderJob', (), {'process_id': 'fake-process-id',
|
||||
'backend': 'fake'})
|
||||
else:
|
||||
j = Encoder.job_create(src_file)
|
||||
if j is None:
|
||||
log.warning('_process_video: unable to create encoder job for file %s.', file_id)
|
||||
log.warning('_process_video: unable to create encoder job for file '
|
||||
'%s.', file_id)
|
||||
return
|
||||
|
||||
log.info('Created asynchronous Zencoder job %s for file %s', j['process_id'], file_id)
|
||||
log.info('Created asynchronous Zencoder job %s for file %s',
|
||||
j['process_id'], file_id)
|
||||
|
||||
# Add the processing status to the file object
|
||||
src_file['processing'] = {
|
||||
@@ -201,7 +199,8 @@ def process_file(gcs, file_id, local_file):
|
||||
|
||||
:param file_id: '_id' key of the file
|
||||
:type file_id: ObjectId or str
|
||||
:param local_file: locally stored file, or None if no local processing is needed.
|
||||
:param local_file: locally stored file, or None if no local processing is
|
||||
needed.
|
||||
:type local_file: file
|
||||
"""
|
||||
|
||||
@@ -239,26 +238,30 @@ def process_file(gcs, file_id, local_file):
|
||||
try:
|
||||
processor = processors[mime_category]
|
||||
except KeyError:
|
||||
log.info("POSTed file %s was of type %r, which isn't thumbnailed/encoded.", file_id,
|
||||
log.info("POSTed file %s was of type %r, which isn't "
|
||||
"thumbnailed/encoded.", file_id,
|
||||
mime_category)
|
||||
src_file['status'] = 'complete'
|
||||
else:
|
||||
log.debug('process_file(%s): marking file status as "processing"', file_id)
|
||||
log.debug('process_file(%s): marking file status as "processing"',
|
||||
file_id)
|
||||
src_file['status'] = 'processing'
|
||||
update_file_doc(file_id, status='processing')
|
||||
|
||||
try:
|
||||
processor(gcs, file_id, local_file, src_file)
|
||||
except Exception:
|
||||
log.warning('process_file(%s): error when processing file, resetting status to '
|
||||
log.warning('process_file(%s): error when processing file, '
|
||||
'resetting status to '
|
||||
'"queued_for_processing"', file_id, exc_info=True)
|
||||
update_file_doc(file_id, status='queued_for_processing')
|
||||
return
|
||||
|
||||
# Update the original file with additional info, e.g. image resolution
|
||||
r, _, _, status = put_internal('files', src_file, _id=file_id)
|
||||
r, _, _, status = current_app.put_internal('files', src_file, _id=file_id)
|
||||
if status not in (200, 201):
|
||||
log.warning('process_file(%s): status %i when saving processed file info to MongoDB: %s',
|
||||
log.warning('process_file(%s): status %i when saving processed file '
|
||||
'info to MongoDB: %s',
|
||||
file_id, status, r)
|
||||
|
||||
|
||||
@@ -296,9 +299,16 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
|
||||
"""
|
||||
|
||||
if backend == 'gcs':
|
||||
if current_app.config['TESTING']:
|
||||
log.info('Skipping GCS link generation, and returning a fake link '
|
||||
'instead.')
|
||||
return '/path/to/testing/gcs/%s' % file_path
|
||||
|
||||
storage = GoogleCloudStorageBucket(project_id)
|
||||
blob = storage.Get(file_path)
|
||||
if blob is None:
|
||||
log.warning('generate_link(%r, %r): unable to find blob for file path,'
|
||||
' returning empty link.', backend, file_path)
|
||||
return ''
|
||||
|
||||
if is_public:
|
||||
@@ -306,20 +316,23 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
|
||||
return blob['signed_url']
|
||||
|
||||
if backend == 'pillar':
|
||||
return url_for('file_storage.index', file_name=file_path, _external=True,
|
||||
_scheme=current_app.config['SCHEME'])
|
||||
return url_for('file_storage.index', file_name=file_path,
|
||||
_external=True, _scheme=current_app.config['SCHEME'])
|
||||
if backend == 'cdnsun':
|
||||
return hash_file_path(file_path, None)
|
||||
if backend == 'unittest':
|
||||
return md5(file_path).hexdigest()
|
||||
return 'https://unit.test/%s' % md5(file_path).hexdigest()
|
||||
|
||||
log.warning('generate_link(): Unknown backend %r, returning empty string as new link.',
|
||||
backend)
|
||||
return ''
|
||||
|
||||
|
||||
def before_returning_file(response):
|
||||
ensure_valid_link(response)
|
||||
|
||||
# Enable this call later, when we have implemented the is_public field on files.
|
||||
# Enable this call later, when we have implemented the is_public field on
|
||||
# files.
|
||||
# strip_link_and_variations(response)
|
||||
|
||||
|
||||
@@ -352,7 +365,7 @@ def ensure_valid_link(response):
|
||||
"""Ensures the file item has valid file links using generate_link(...)."""
|
||||
|
||||
# Log to function-specific logger, so we can easily turn it off.
|
||||
log = logging.getLogger('%s.ensure_valid_link' % __name__)
|
||||
log_link = logging.getLogger('%s.ensure_valid_link' % __name__)
|
||||
# log.debug('Inspecting link for file %s', response['_id'])
|
||||
|
||||
# Check link expiry.
|
||||
@@ -361,18 +374,19 @@ def ensure_valid_link(response):
|
||||
link_expires = response['link_expires']
|
||||
if now < link_expires:
|
||||
# Not expired yet, so don't bother regenerating anything.
|
||||
log.debug('Link expires at %s, which is in the future, so not generating new link',
|
||||
link_expires)
|
||||
log_link.debug('Link expires at %s, which is in the future, so not '
|
||||
'generating new link', link_expires)
|
||||
return
|
||||
|
||||
log.debug('Link expired at %s, which is in the past; generating new link', link_expires)
|
||||
log_link.debug('Link expired at %s, which is in the past; generating '
|
||||
'new link', link_expires)
|
||||
else:
|
||||
log.debug('No expiry date for link; generating new link')
|
||||
log_link.debug('No expiry date for link; generating new link')
|
||||
|
||||
_generate_all_links(response, now)
|
||||
generate_all_links(response, now)
|
||||
|
||||
|
||||
def _generate_all_links(response, now):
|
||||
def generate_all_links(response, now):
|
||||
"""Generate a new link for the file and all its variations.
|
||||
|
||||
:param response: the file document that should be updated.
|
||||
@@ -380,14 +394,16 @@ def _generate_all_links(response, now):
|
||||
"""
|
||||
|
||||
project_id = str(
|
||||
response['project']) if 'project' in response else None # TODO: add project id to all files
|
||||
response['project']) if 'project' in response else None
|
||||
# TODO: add project id to all files
|
||||
backend = response['backend']
|
||||
response['link'] = generate_link(backend, response['file_path'], project_id)
|
||||
|
||||
variations = response.get('variations')
|
||||
if variations:
|
||||
for variation in variations:
|
||||
variation['link'] = generate_link(backend, variation['file_path'], project_id)
|
||||
variation['link'] = generate_link(backend, variation['file_path'],
|
||||
project_id)
|
||||
|
||||
# Construct the new expiry datetime.
|
||||
validity_secs = current_app.config['FILE_LINK_VALIDITY'][backend]
|
||||
@@ -395,16 +411,19 @@ def _generate_all_links(response, now):
|
||||
|
||||
patch_info = remove_private_keys(response)
|
||||
file_id = ObjectId(response['_id'])
|
||||
(patch_resp, _, _, _) = patch_internal('files', patch_info, _id=file_id)
|
||||
(patch_resp, _, _, _) = current_app.patch_internal('files', patch_info,
|
||||
_id=file_id)
|
||||
if patch_resp.get('_status') == 'ERR':
|
||||
log.warning('Unable to save new links for file %s: %r', response['_id'], patch_resp)
|
||||
log.warning('Unable to save new links for file %s: %r',
|
||||
response['_id'], patch_resp)
|
||||
# TODO: raise a snag.
|
||||
response['_updated'] = now
|
||||
else:
|
||||
response['_updated'] = patch_resp['_updated']
|
||||
|
||||
# Be silly and re-fetch the etag ourselves. TODO: handle this better.
|
||||
etag_doc = current_app.data.driver.db['files'].find_one({'_id': file_id}, {'_etag': 1})
|
||||
etag_doc = current_app.data.driver.db['files'].find_one({'_id': file_id},
|
||||
{'_etag': 1})
|
||||
response['_etag'] = etag_doc['_etag']
|
||||
|
||||
|
||||
@@ -413,7 +432,8 @@ def before_deleting_file(item):
|
||||
|
||||
|
||||
def on_pre_get_files(_, lookup):
|
||||
# Override the HTTP header, we always want to fetch the document from MongoDB.
|
||||
# Override the HTTP header, we always want to fetch the document from
|
||||
# MongoDB.
|
||||
parsed_req = eve.utils.parse_request('files')
|
||||
parsed_req.if_modified_since = None
|
||||
|
||||
@@ -425,12 +445,13 @@ def on_pre_get_files(_, lookup):
|
||||
cursor = current_app.data.find('files', parsed_req, lookup_expired)
|
||||
for file_doc in cursor:
|
||||
# log.debug('Updating expired links for file %r.', file_doc['_id'])
|
||||
_generate_all_links(file_doc, now)
|
||||
generate_all_links(file_doc, now)
|
||||
|
||||
|
||||
def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
|
||||
if chunk_size:
|
||||
log.info('Refreshing the first %i links for project %s', chunk_size, project_uuid)
|
||||
log.info('Refreshing the first %i links for project %s',
|
||||
chunk_size, project_uuid)
|
||||
else:
|
||||
log.info('Refreshing all links for project %s', project_uuid)
|
||||
|
||||
@@ -452,7 +473,7 @@ def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
|
||||
|
||||
for file_doc in to_refresh:
|
||||
log.debug('Refreshing links for file %s', file_doc['_id'])
|
||||
_generate_all_links(file_doc, now)
|
||||
generate_all_links(file_doc, now)
|
||||
|
||||
log.info('Refreshed %i links', min(chunk_size, to_refresh.count()))
|
||||
|
||||
@@ -470,9 +491,11 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||
|
||||
to_refresh = files_collection.find(
|
||||
{'$or': [{'backend': backend_name, 'link_expires': None},
|
||||
{'backend': backend_name, 'link_expires': {'$lt': expire_before}},
|
||||
{'backend': backend_name, 'link_expires': {
|
||||
'$lt': expire_before}},
|
||||
{'backend': backend_name, 'link': None}]
|
||||
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size).batch_size(5)
|
||||
}).sort([('link_expires', pymongo.ASCENDING)]).limit(
|
||||
chunk_size).batch_size(5)
|
||||
|
||||
if to_refresh.count() == 0:
|
||||
log.info('No links to refresh.')
|
||||
@@ -493,33 +516,35 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||
]})
|
||||
|
||||
if count == 0:
|
||||
log.debug('Skipping file %s, project %s does not exist.', file_id, project_id)
|
||||
log.debug('Skipping file %s, project %s does not exist.',
|
||||
file_id, project_id)
|
||||
continue
|
||||
|
||||
if 'file_path' not in file_doc:
|
||||
log.warning("Skipping file %s, missing 'file_path' property.", file_id)
|
||||
log.warning("Skipping file %s, missing 'file_path' property.",
|
||||
file_id)
|
||||
continue
|
||||
|
||||
log.debug('Refreshing links for file %s', file_id)
|
||||
|
||||
try:
|
||||
_generate_all_links(file_doc, now)
|
||||
generate_all_links(file_doc, now)
|
||||
except gcloud.exceptions.Forbidden:
|
||||
log.warning('Skipping file %s, GCS forbids us access to project %s bucket.',
|
||||
file_id, project_id)
|
||||
log.warning('Skipping file %s, GCS forbids us access to '
|
||||
'project %s bucket.', file_id, project_id)
|
||||
continue
|
||||
refreshed += 1
|
||||
except KeyboardInterrupt:
|
||||
log.warning('Aborting due to KeyboardInterrupt after refreshing %i links',
|
||||
refreshed)
|
||||
log.warning('Aborting due to KeyboardInterrupt after refreshing %i '
|
||||
'links', refreshed)
|
||||
return
|
||||
|
||||
log.info('Refreshed %i links', refreshed)
|
||||
|
||||
|
||||
@require_login()
|
||||
def create_file_doc(name, filename, content_type, length, project, backend='gcs',
|
||||
**extra_fields):
|
||||
def create_file_doc(name, filename, content_type, length, project,
|
||||
backend='gcs', **extra_fields):
|
||||
"""Creates a minimal File document for storage in MongoDB.
|
||||
|
||||
Doesn't save it to MongoDB yet.
|
||||
@@ -571,7 +596,8 @@ def override_content_type(uploaded_file):
|
||||
# content_type property can't be set directly
|
||||
uploaded_file.headers['content-type'] = mimetype
|
||||
|
||||
# It has this, because we used uploaded_file.mimetype earlier this function.
|
||||
# It has this, because we used uploaded_file.mimetype earlier this
|
||||
# function.
|
||||
del uploaded_file._parsed_content_type
|
||||
|
||||
|
||||
@@ -590,15 +616,18 @@ def assert_file_size_allowed(file_size):
|
||||
return
|
||||
|
||||
filesize_limit_mb = filesize_limit / 2.0 ** 20
|
||||
log.info('User %s tried to upload a %.3f MiB file, but is only allowed %.3f MiB.',
|
||||
authentication.current_user_id(), file_size / 2.0 ** 20, filesize_limit_mb)
|
||||
log.info('User %s tried to upload a %.3f MiB file, but is only allowed '
|
||||
'%.3f MiB.',
|
||||
authentication.current_user_id(), file_size / 2.0 ** 20,
|
||||
filesize_limit_mb)
|
||||
raise wz_exceptions.RequestEntityTooLarge(
|
||||
'To upload files larger than %i MiB, subscribe to Blender Cloud' % filesize_limit_mb)
|
||||
'To upload files larger than %i MiB, subscribe to Blender Cloud' %
|
||||
filesize_limit_mb)
|
||||
|
||||
|
||||
@file_storage.route('/stream/<string:project_id>', methods=['POST', 'OPTIONS'])
|
||||
@require_login()
|
||||
def stream_to_gcs(project_id):
|
||||
def stream_to_storage(project_id):
|
||||
project_oid = utils.str2id(project_id)
|
||||
|
||||
projects = current_app.data.driver.db['projects']
|
||||
@@ -610,13 +639,21 @@ def stream_to_gcs(project_id):
|
||||
log.info('Streaming file to bucket for project=%s user_id=%s', project_id,
|
||||
authentication.current_user_id())
|
||||
log.info('request.headers[Origin] = %r', request.headers.get('Origin'))
|
||||
log.info('request.content_length = %r', request.content_length)
|
||||
|
||||
# Try a check for the content length before we access request.files[]. This allows us
|
||||
# to abort the upload early. The entire body content length is always a bit larger than
|
||||
# the actual file size, so if we accept here, we're sure it'll be accepted in subsequent
|
||||
# checks as well.
|
||||
if request.content_length:
|
||||
assert_file_size_allowed(request.content_length)
|
||||
|
||||
uploaded_file = request.files['file']
|
||||
|
||||
# Not every upload has a Content-Length header. If it was passed, we might as
|
||||
# well check for its value before we require the user to upload the entire file.
|
||||
# (At least I hope that this part of the code is processed before the body is
|
||||
# read in its entirety)
|
||||
# Not every upload has a Content-Length header. If it was passed, we might
|
||||
# as well check for its value before we require the user to upload the
|
||||
# entire file. (At least I hope that this part of the code is processed
|
||||
# before the body is read in its entirety)
|
||||
if uploaded_file.content_length:
|
||||
assert_file_size_allowed(uploaded_file.content_length)
|
||||
|
||||
@@ -638,7 +675,8 @@ def stream_to_gcs(project_id):
|
||||
|
||||
# Figure out the file size, as we need to pass this in explicitly to GCloud.
|
||||
# Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
|
||||
# supported by a BytesIO object (even though it does have a fileno attribute).
|
||||
# supported by a BytesIO object (even though it does have a fileno
|
||||
# attribute).
|
||||
if isinstance(stream_for_gcs, io.BytesIO):
|
||||
file_size = len(stream_for_gcs.getvalue())
|
||||
else:
|
||||
@@ -648,50 +686,42 @@ def stream_to_gcs(project_id):
|
||||
assert_file_size_allowed(file_size)
|
||||
|
||||
# Create file document in MongoDB.
|
||||
file_id, internal_fname, status = create_file_doc_for_upload(project_oid, uploaded_file)
|
||||
file_id, internal_fname, status = create_file_doc_for_upload(project_oid,
|
||||
uploaded_file)
|
||||
|
||||
if current_app.config['TESTING']:
|
||||
log.warning('NOT streaming to GCS because TESTING=%r', current_app.config['TESTING'])
|
||||
log.warning('NOT streaming to GCS because TESTING=%r',
|
||||
current_app.config['TESTING'])
|
||||
# Fake a Blob object.
|
||||
gcs = None
|
||||
blob = type('Blob', (), {'size': file_size})
|
||||
else:
|
||||
# Upload the file to GCS.
|
||||
from gcloud.streaming import transfer
|
||||
# Files larger than this many bytes will be streamed directly from disk, smaller
|
||||
# ones will be read into memory and then uploaded.
|
||||
transfer.RESUMABLE_UPLOAD_THRESHOLD = 102400
|
||||
try:
|
||||
gcs = GoogleCloudStorageBucket(project_id)
|
||||
blob = gcs.bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2)
|
||||
blob.upload_from_file(stream_for_gcs, size=file_size,
|
||||
content_type=uploaded_file.mimetype)
|
||||
except Exception:
|
||||
log.exception('Error uploading file to Google Cloud Storage (GCS),'
|
||||
' aborting handling of uploaded file (id=%s).', file_id)
|
||||
update_file_doc(file_id, status='failed')
|
||||
raise wz_exceptions.InternalServerError('Unable to stream file to Google Cloud Storage')
|
||||
blob, gcs = stream_to_gcs(file_id, file_size, internal_fname,
|
||||
project_id, stream_for_gcs,
|
||||
uploaded_file.mimetype)
|
||||
|
||||
if stream_for_gcs.closed:
|
||||
log.error('Eek, GCS closed its stream, Andy is not going to like this.')
|
||||
|
||||
# Reload the blob to get the file size according to Google.
|
||||
blob.reload()
|
||||
log.debug('Marking uploaded file id=%s, fname=%s, '
|
||||
'size=%i as "queued_for_processing"',
|
||||
file_id, internal_fname, blob.size)
|
||||
update_file_doc(file_id,
|
||||
status='queued_for_processing',
|
||||
file_path=internal_fname,
|
||||
length=blob.size,
|
||||
content_type=uploaded_file.mimetype)
|
||||
|
||||
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
|
||||
internal_fname, blob.size)
|
||||
process_file(gcs, file_id, local_file)
|
||||
|
||||
# Local processing is done, we can close the local file so it is removed.
|
||||
if local_file is not None:
|
||||
local_file.close()
|
||||
|
||||
log.debug('Handled uploaded file id=%s, fname=%s, size=%i', file_id, internal_fname, blob.size)
|
||||
log.debug('Handled uploaded file id=%s, fname=%s, size=%i, status=%i',
|
||||
file_id, internal_fname, blob.size, status)
|
||||
|
||||
# Status is 200 if the file already existed, and 201 if it was newly created.
|
||||
# Status is 200 if the file already existed, and 201 if it was newly
|
||||
# created.
|
||||
# TODO: add a link to a thumbnail in the response.
|
||||
resp = jsonify(status='ok', file_id=str(file_id))
|
||||
resp.status_code = status
|
||||
@@ -699,6 +729,32 @@ def stream_to_gcs(project_id):
|
||||
return resp
|
||||
|
||||
|
||||
def stream_to_gcs(file_id, file_size, internal_fname, project_id,
|
||||
stream_for_gcs, content_type):
|
||||
# Upload the file to GCS.
|
||||
from gcloud.streaming import transfer
|
||||
log.debug('Streaming file to GCS bucket; id=%s, fname=%s, size=%i',
|
||||
file_id, internal_fname, file_size)
|
||||
# Files larger than this many bytes will be streamed directly from disk,
|
||||
# smaller ones will be read into memory and then uploaded.
|
||||
transfer.RESUMABLE_UPLOAD_THRESHOLD = 102400
|
||||
try:
|
||||
gcs = GoogleCloudStorageBucket(project_id)
|
||||
blob = gcs.bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2)
|
||||
blob.upload_from_file(stream_for_gcs, size=file_size,
|
||||
content_type=content_type)
|
||||
except Exception:
|
||||
log.exception('Error uploading file to Google Cloud Storage (GCS),'
|
||||
' aborting handling of uploaded file (id=%s).', file_id)
|
||||
update_file_doc(file_id, status='failed')
|
||||
raise wz_exceptions.InternalServerError(
|
||||
'Unable to stream file to Google Cloud Storage')
|
||||
|
||||
# Reload the blob to get the file size according to Google.
|
||||
blob.reload()
|
||||
return blob, gcs
|
||||
|
||||
|
||||
def add_access_control_headers(resp):
|
||||
"""Allows cross-site requests from the configured domain."""
|
||||
|
||||
@@ -756,16 +812,19 @@ def create_file_doc_for_upload(project_id, uploaded_file):
|
||||
if file_doc is None:
|
||||
# Create a file document on MongoDB for this file.
|
||||
file_doc = create_file_doc(name=internal_filename, **new_props)
|
||||
file_fields, _, _, status = post_internal('files', file_doc)
|
||||
file_fields, _, _, status = current_app.post_internal('files', file_doc)
|
||||
else:
|
||||
file_doc.update(new_props)
|
||||
file_fields, _, _, status = put_internal('files', remove_private_keys(file_doc))
|
||||
file_fields, _, _, status = current_app.put_internal('files', remove_private_keys(file_doc))
|
||||
|
||||
if status not in (200, 201):
|
||||
log.error('Unable to create new file document in MongoDB, status=%i: %s',
|
||||
status, file_fields)
|
||||
raise wz_exceptions.InternalServerError()
|
||||
|
||||
log.debug('Created file document %s for uploaded file %s; internal name %s',
|
||||
file_fields['_id'], uploaded_file.filename, internal_filename)
|
||||
|
||||
return file_fields['_id'], internal_filename, status
|
||||
|
||||
|
||||
@@ -799,4 +858,4 @@ def setup_app(app, url_prefix):
|
||||
app.on_replace_files += compute_aggregate_length
|
||||
app.on_insert_files += compute_aggregate_length_items
|
||||
|
||||
app.register_blueprint(file_storage, url_prefix=url_prefix)
|
||||
app.register_api_blueprint(file_storage, url_prefix=url_prefix)
|
191
pillar/api/file_storage/moving.py
Normal file
191
pillar/api/file_storage/moving.py
Normal file
@@ -0,0 +1,191 @@
|
||||
"""Code for moving files between backends."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from bson import ObjectId
|
||||
import bson.tz_util
|
||||
from flask import current_app
|
||||
import requests
|
||||
import requests.exceptions
|
||||
|
||||
from . import stream_to_gcs, generate_all_links, ensure_valid_link
|
||||
import pillar.api.utils.gcs
|
||||
|
||||
__all__ = ['PrerequisiteNotMetError', 'change_file_storage_backend']
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PrerequisiteNotMetError(RuntimeError):
|
||||
"""Raised when a file cannot be moved due to unmet prerequisites."""
|
||||
|
||||
|
||||
def change_file_storage_backend(file_id, dest_backend):
|
||||
"""Given a file document, move it to the specified backend (if not already
|
||||
there) and update the document to reflect that.
|
||||
Files on the original backend are not deleted automatically.
|
||||
"""
|
||||
|
||||
dest_backend = unicode(dest_backend)
|
||||
file_id = ObjectId(file_id)
|
||||
|
||||
# Fetch file document
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
f = files_collection.find_one(file_id)
|
||||
if f is None:
|
||||
raise ValueError('File with _id: {} not found'.format(file_id))
|
||||
|
||||
# Check that new backend differs from current one
|
||||
if dest_backend == f['backend']:
|
||||
raise PrerequisiteNotMetError('Destination backend ({}) matches the current backend, we '
|
||||
'are not moving the file'.format(dest_backend))
|
||||
|
||||
# TODO Check that new backend is allowed (make conf var)
|
||||
|
||||
# Check that the file has a project; without project, we don't know
|
||||
# which bucket to store the file into.
|
||||
try:
|
||||
project_id = f['project']
|
||||
except KeyError:
|
||||
raise PrerequisiteNotMetError('File document does not have a project')
|
||||
|
||||
# Ensure that all links are up to date before we even attempt a download.
|
||||
ensure_valid_link(f)
|
||||
|
||||
# Upload file and variations to the new backend
|
||||
variations = f.get('variations', ())
|
||||
|
||||
try:
|
||||
copy_file_to_backend(file_id, project_id, f, f['backend'], dest_backend)
|
||||
except requests.exceptions.HTTPError as ex:
|
||||
# allow the main file to be removed from storage.
|
||||
if ex.response.status_code not in {404, 410}:
|
||||
raise
|
||||
if not variations:
|
||||
raise PrerequisiteNotMetError('Main file ({link}) does not exist on server, '
|
||||
'and no variations exist either'.format(**f))
|
||||
log.warning('Main file %s does not exist; skipping main and visiting variations', f['link'])
|
||||
|
||||
for var in variations:
|
||||
copy_file_to_backend(file_id, project_id, var, f['backend'], dest_backend)
|
||||
|
||||
# Generate new links for the file & all variations. This also saves
|
||||
# the new backend we set here.
|
||||
f['backend'] = dest_backend
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
generate_all_links(f, now)
|
||||
|
||||
|
||||
def copy_file_to_backend(file_id, project_id, file_or_var, src_backend, dest_backend):
|
||||
# Filenames on GCS do not contain paths, by our convention
|
||||
internal_fname = os.path.basename(file_or_var['file_path'])
|
||||
file_or_var['file_path'] = internal_fname
|
||||
|
||||
# If the file is not local already, fetch it
|
||||
if src_backend == 'pillar':
|
||||
local_finfo = fetch_file_from_local(file_or_var)
|
||||
else:
|
||||
local_finfo = fetch_file_from_link(file_or_var['link'])
|
||||
|
||||
# Upload to GCS
|
||||
if dest_backend != 'gcs':
|
||||
raise ValueError('Only dest_backend="gcs" is supported now.')
|
||||
|
||||
if current_app.config['TESTING']:
|
||||
log.warning('Skipping actual upload to GCS due to TESTING')
|
||||
else:
|
||||
# TODO check for name collisions
|
||||
stream_to_gcs(file_id, local_finfo['file_size'],
|
||||
internal_fname=internal_fname,
|
||||
project_id=str(project_id),
|
||||
stream_for_gcs=local_finfo['local_file'],
|
||||
content_type=local_finfo['content_type'])
|
||||
|
||||
# No longer needed, so it can be closed & dispersed of.
|
||||
local_finfo['local_file'].close()
|
||||
|
||||
|
||||
def fetch_file_from_link(link):
|
||||
"""Utility to download a file from a remote location and return it with
|
||||
additional info (for upload to a different storage backend).
|
||||
"""
|
||||
|
||||
log.info('Downloading %s', link)
|
||||
r = requests.get(link, stream=True)
|
||||
r.raise_for_status()
|
||||
|
||||
local_file = tempfile.NamedTemporaryFile(dir=current_app.config['STORAGE_DIR'])
|
||||
log.info('Downloading to %s', local_file.name)
|
||||
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
local_file.write(chunk)
|
||||
local_file.seek(0)
|
||||
|
||||
file_dict = {
|
||||
'file_size': os.fstat(local_file.fileno()).st_size,
|
||||
'content_type': r.headers.get('content-type', 'application/octet-stream'),
|
||||
'local_file': local_file
|
||||
}
|
||||
return file_dict
|
||||
|
||||
|
||||
def fetch_file_from_local(file_doc):
|
||||
"""Mimicks fetch_file_from_link(), but just returns the local file.
|
||||
|
||||
:param file_doc: dict with 'link' key pointing to a path in STORAGE_DIR, and
|
||||
'content_type' key.
|
||||
:type file_doc: dict
|
||||
:rtype: dict self._log.info('Moving file %s to project %s', file_id, dest_proj['_id'])
|
||||
|
||||
"""
|
||||
|
||||
local_file = open(os.path.join(current_app.config['STORAGE_DIR'], file_doc['file_path']), 'rb')
|
||||
local_finfo = {
|
||||
'file_size': os.fstat(local_file.fileno()).st_size,
|
||||
'content_type': file_doc['content_type'],
|
||||
'local_file': local_file
|
||||
}
|
||||
return local_finfo
|
||||
|
||||
|
||||
def gcs_move_to_bucket(file_id, dest_project_id, skip_gcs=False):
|
||||
"""Moves a file from its own bucket to the new project_id bucket."""
|
||||
|
||||
files_coll = current_app.db()['files']
|
||||
|
||||
f = files_coll.find_one(file_id)
|
||||
if f is None:
|
||||
raise ValueError('File with _id: {} not found'.format(file_id))
|
||||
|
||||
# Check that new backend differs from current one
|
||||
if f['backend'] != 'gcs':
|
||||
raise ValueError('Only Google Cloud Storage is supported for now.')
|
||||
|
||||
# Move file and variations to the new bucket.
|
||||
if skip_gcs:
|
||||
log.warning('NOT ACTUALLY MOVING file %s on GCS, just updating MongoDB', file_id)
|
||||
else:
|
||||
src_project = f['project']
|
||||
pillar.api.utils.gcs.copy_to_bucket(f['file_path'], src_project, dest_project_id)
|
||||
for var in f.get('variations', []):
|
||||
pillar.api.utils.gcs.copy_to_bucket(var['file_path'], src_project, dest_project_id)
|
||||
|
||||
# Update the file document after moving was successful.
|
||||
log.info('Switching file %s to project %s', file_id, dest_project_id)
|
||||
update_result = files_coll.update_one({'_id': file_id},
|
||||
{'$set': {'project': dest_project_id}})
|
||||
if update_result.matched_count != 1:
|
||||
raise RuntimeError(
|
||||
'Unable to update file %s in MongoDB: matched_count=%i; modified_count=%i' % (
|
||||
file_id, update_result.matched_count, update_result.modified_count))
|
||||
|
||||
log.info('Switching file %s: matched_count=%i; modified_count=%i',
|
||||
file_id, update_result.matched_count, update_result.modified_count)
|
||||
|
||||
# Regenerate the links for this file
|
||||
f['project'] = dest_project_id
|
||||
generate_all_links(f, now=datetime.datetime.now(tz=bson.tz_util.utc))
|
@@ -3,12 +3,13 @@ import itertools
|
||||
import pymongo
|
||||
from flask import Blueprint, current_app
|
||||
|
||||
from application.utils import jsonify
|
||||
from pillar.api.utils import jsonify
|
||||
|
||||
blueprint = Blueprint('latest', __name__)
|
||||
|
||||
|
||||
def keep_fetching(collection, db_filter, projection, sort, py_filter, batch_size=12):
|
||||
def keep_fetching(collection, db_filter, projection, sort, py_filter,
|
||||
batch_size=12):
|
||||
"""Yields results for which py_filter returns True"""
|
||||
|
||||
projection['_deleted'] = 1
|
||||
@@ -47,7 +48,7 @@ def has_public_project(node_doc):
|
||||
return is_project_public(project_id)
|
||||
|
||||
|
||||
# TODO: cache result, at least for a limited amt. of time, or for this HTTP request.
|
||||
# TODO: cache result, for a limited amt. of time, or for this HTTP request.
|
||||
def is_project_public(project_id):
|
||||
"""Returns True iff the project is public."""
|
||||
|
||||
@@ -60,7 +61,8 @@ def is_project_public(project_id):
|
||||
|
||||
@blueprint.route('/assets')
|
||||
def latest_assets():
|
||||
latest = latest_nodes({'node_type': 'asset', 'properties.status': 'published'},
|
||||
latest = latest_nodes({'node_type': 'asset',
|
||||
'properties.status': 'published'},
|
||||
{'name': 1, 'project': 1, 'user': 1, 'node_type': 1,
|
||||
'parent': 1, 'picture': 1, 'properties.status': 1,
|
||||
'properties.content_type': 1,
|
||||
@@ -78,9 +80,9 @@ def embed_user(latest):
|
||||
|
||||
for comment in latest:
|
||||
user_id = comment['user']
|
||||
comment['user'] = users.find_one(user_id, {'auth': 0, 'groups': 0, 'roles': 0,
|
||||
'settings': 0, 'email': 0,
|
||||
'_created': 0, '_updated': 0, '_etag': 0})
|
||||
comment['user'] = users.find_one(user_id, {
|
||||
'auth': 0, 'groups': 0, 'roles': 0, 'settings': 0, 'email': 0,
|
||||
'_created': 0, '_updated': 0, '_etag': 0})
|
||||
|
||||
|
||||
def embed_project(latest):
|
||||
@@ -88,16 +90,19 @@ def embed_project(latest):
|
||||
|
||||
for comment in latest:
|
||||
project_id = comment['project']
|
||||
comment['project'] = projects.find_one(project_id, {'_id': 1, 'name': 1, 'url': 1})
|
||||
comment['project'] = projects.find_one(project_id, {'_id': 1, 'name': 1,
|
||||
'url': 1})
|
||||
|
||||
|
||||
@blueprint.route('/comments')
|
||||
def latest_comments():
|
||||
latest = latest_nodes({'node_type': 'comment', 'properties.status': 'published'},
|
||||
latest = latest_nodes({'node_type': 'comment',
|
||||
'properties.status': 'published'},
|
||||
{'project': 1, 'parent': 1, 'user': 1,
|
||||
'properties.content': 1, 'node_type': 1, 'properties.status': 1,
|
||||
'properties.content': 1, 'node_type': 1,
|
||||
'properties.status': 1,
|
||||
'properties.is_reply': 1},
|
||||
has_public_project, 6)
|
||||
has_public_project, 10)
|
||||
|
||||
# Embed the comments' parents.
|
||||
nodes = current_app.data.driver.db['nodes']
|
||||
@@ -120,4 +125,4 @@ def latest_comments():
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
@@ -1,17 +1,15 @@
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import rsa.randnum
|
||||
|
||||
import bcrypt
|
||||
import datetime
|
||||
import rsa.randnum
|
||||
from bson import tz_util
|
||||
from eve.methods.post import post_internal
|
||||
|
||||
from flask import abort, Blueprint, current_app, jsonify, request
|
||||
|
||||
from application.utils.authentication import store_token
|
||||
from application.utils.authentication import create_new_user_document
|
||||
from application.utils.authentication import make_unique_username
|
||||
from pillar.api.utils.authentication import create_new_user_document
|
||||
from pillar.api.utils.authentication import make_unique_username
|
||||
from pillar.api.utils.authentication import store_token
|
||||
|
||||
blueprint = Blueprint('authentication', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -31,7 +29,7 @@ def create_local_user(email, password):
|
||||
# Make username unique
|
||||
db_user['username'] = make_unique_username(email)
|
||||
# Create the user
|
||||
r, _, _, status = post_internal('users', db_user)
|
||||
r, _, _, status = current_app.post_internal('users', db_user)
|
||||
if status != 201:
|
||||
log.error('internal response: %r %r', status, r)
|
||||
return abort(500)
|
||||
@@ -96,4 +94,4 @@ def hash_password(password, salt):
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
60
pillar/api/node_types/__init__.py
Normal file
60
pillar/api/node_types/__init__.py
Normal file
@@ -0,0 +1,60 @@
|
||||
_file_embedded_schema = {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'files',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
||||
|
||||
ATTACHMENT_SLUG_REGEX = '[a-zA-Z0-9_ ]+'
|
||||
|
||||
_attachments_embedded_schema = {
|
||||
'type': 'dict',
|
||||
# TODO: will be renamed to 'keyschema' in Cerberus 1.0
|
||||
'propertyschema': {
|
||||
'type': 'string',
|
||||
'regex': '^%s$' % ATTACHMENT_SLUG_REGEX,
|
||||
},
|
||||
'valueschema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'oid': {
|
||||
'type': 'objectid',
|
||||
'required': True,
|
||||
},
|
||||
'link': {
|
||||
'type': 'string',
|
||||
'allowed': ['self', 'none', 'custom'],
|
||||
'default': 'self',
|
||||
},
|
||||
'link_custom': {
|
||||
'type': 'string',
|
||||
},
|
||||
'collection': {
|
||||
'type': 'string',
|
||||
'allowed': ['files'],
|
||||
'default': 'files',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Import after defining the common embedded schemas, to prevent dependency cycles.
|
||||
from pillar.api.node_types.asset import node_type_asset
|
||||
from pillar.api.node_types.blog import node_type_blog
|
||||
from pillar.api.node_types.comment import node_type_comment
|
||||
from pillar.api.node_types.group import node_type_group
|
||||
from pillar.api.node_types.group_hdri import node_type_group_hdri
|
||||
from pillar.api.node_types.group_texture import node_type_group_texture
|
||||
from pillar.api.node_types.hdri import node_type_hdri
|
||||
from pillar.api.node_types.page import node_type_page
|
||||
from pillar.api.node_types.post import node_type_post
|
||||
from pillar.api.node_types.storage import node_type_storage
|
||||
from pillar.api.node_types.text import node_type_text
|
||||
from pillar.api.node_types.texture import node_type_texture
|
||||
|
||||
PILLAR_NODE_TYPES = (node_type_asset, node_type_blog, node_type_comment, node_type_group,
|
||||
node_type_group_hdri, node_type_group_texture, node_type_hdri, node_type_page,
|
||||
node_type_post, node_type_storage, node_type_text, node_type_texture)
|
||||
PILLAR_NAMED_NODE_TYPES = {nt['name']: nt for nt in PILLAR_NODE_TYPES}
|
@@ -1,4 +1,4 @@
|
||||
from manage_extra.node_types import _file_embedded_schema
|
||||
from pillar.api.node_types import _file_embedded_schema, _attachments_embedded_schema
|
||||
|
||||
node_type_asset = {
|
||||
'name': 'asset',
|
||||
@@ -27,26 +27,7 @@ node_type_asset = {
|
||||
# We point to the original file (and use it to extract any relevant
|
||||
# variation useful for our scope).
|
||||
'file': _file_embedded_schema,
|
||||
'attachments': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'field': {'type': 'string'},
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'slug': {'type': 'string', 'minlength': 1},
|
||||
'size': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'attachments': _attachments_embedded_schema,
|
||||
# Tags for search
|
||||
'tags': {
|
||||
'type': 'list',
|
||||
@@ -58,17 +39,29 @@ node_type_asset = {
|
||||
# this schema: "Root > Nested Category > One More Nested Category"
|
||||
'categories': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'license_type': {
|
||||
'default': 'cc-by',
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'cc-by',
|
||||
'cc-0',
|
||||
'cc-by-sa',
|
||||
'cc-by-nd',
|
||||
'cc-by-nc',
|
||||
'copyright'
|
||||
]
|
||||
},
|
||||
'license_notes': {
|
||||
'type': 'string'
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'content_type': {'visible': False},
|
||||
'file': {},
|
||||
'attachments': {'visible': False},
|
||||
'order': {'visible': False},
|
||||
'tags': {'visible': False},
|
||||
'categories': {'visible': False}
|
||||
'categories': {'visible': False},
|
||||
'license_type': {'visible': False},
|
||||
'license_notes': {'visible': False},
|
||||
},
|
||||
'permissions': {
|
||||
}
|
||||
}
|
@@ -6,7 +6,7 @@ node_type_blog = {
|
||||
'template': {
|
||||
'type': 'string',
|
||||
},
|
||||
'categories' : {
|
||||
'categories': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
@@ -18,12 +18,4 @@ node_type_blog = {
|
||||
'template': {},
|
||||
},
|
||||
'parent': ['project',],
|
||||
'permissions': {
|
||||
# 'groups': [{
|
||||
# 'group': app.config['ADMIN_USER_GROUP'],
|
||||
# 'methods': ['GET', 'PUT', 'POST']
|
||||
# }],
|
||||
# 'users': [],
|
||||
# 'world': ['GET']
|
||||
}
|
||||
}
|
@@ -6,6 +6,11 @@ node_type_comment = {
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'required': True,
|
||||
},
|
||||
# The converted-to-HTML content.
|
||||
'content_html': {
|
||||
'type': 'string',
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
@@ -50,16 +55,6 @@ node_type_comment = {
|
||||
'confidence': {'type': 'float'},
|
||||
'is_reply': {'type': 'boolean'}
|
||||
},
|
||||
'form_schema': {
|
||||
'content': {},
|
||||
'status': {},
|
||||
'rating_positive': {},
|
||||
'rating_negative': {},
|
||||
'ratings': {},
|
||||
'confidence': {},
|
||||
'is_reply': {}
|
||||
},
|
||||
'form_schema': {},
|
||||
'parent': ['asset', 'comment'],
|
||||
'permissions': {
|
||||
}
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
node_type_group = {
|
||||
'name': 'group',
|
||||
'description': 'Generic group node type edited',
|
||||
'description': 'Folder node type',
|
||||
'parent': ['group', 'project'],
|
||||
'dyn_schema': {
|
||||
# Used for sorting within the context of a group
|
||||
@@ -24,10 +24,7 @@ node_type_group = {
|
||||
},
|
||||
'form_schema': {
|
||||
'url': {'visible': False},
|
||||
'status': {},
|
||||
'notes': {'visible': False},
|
||||
'order': {'visible': False}
|
||||
},
|
||||
'permissions': {
|
||||
}
|
||||
}
|
@@ -15,8 +15,5 @@ node_type_group_hdri = {
|
||||
],
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'order': {}
|
||||
}
|
||||
'form_schema': {},
|
||||
}
|
@@ -15,8 +15,5 @@ node_type_group_texture = {
|
||||
],
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'order': {}
|
||||
}
|
||||
'form_schema': {},
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
from manage_extra.node_types import _file_embedded_schema
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_hdri = {
|
||||
# When adding this node type, make sure to enable CORS from * on the GCS
|
||||
@@ -62,5 +62,5 @@ node_type_hdri = {
|
||||
'content_type': {'visible': False},
|
||||
'tags': {'visible': False},
|
||||
'categories': {'visible': False},
|
||||
}
|
||||
},
|
||||
}
|
31
pillar/api/node_types/page.py
Normal file
31
pillar/api/node_types/page.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from pillar.api.node_types import _attachments_embedded_schema
|
||||
|
||||
node_type_page = {
|
||||
'name': 'page',
|
||||
'description': 'A single page',
|
||||
'dyn_schema': {
|
||||
# The page content (Markdown format)
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'maxlength': 90000,
|
||||
'required': True
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending'
|
||||
],
|
||||
'default': 'pending'
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'attachments': _attachments_embedded_schema,
|
||||
},
|
||||
'form_schema': {
|
||||
'attachments': {'visible': False},
|
||||
},
|
||||
'parent': ['project', ],
|
||||
}
|
35
pillar/api/node_types/post.py
Normal file
35
pillar/api/node_types/post.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from pillar.api.node_types import _attachments_embedded_schema
|
||||
|
||||
node_type_post = {
|
||||
'name': 'post',
|
||||
'description': 'A blog post, for any project',
|
||||
'dyn_schema': {
|
||||
# The blogpost content (Markdown format)
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'maxlength': 90000,
|
||||
'required': True
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending'
|
||||
],
|
||||
'default': 'pending'
|
||||
},
|
||||
# Global categories, will be enforced to be 1 word
|
||||
'category': {
|
||||
'type': 'string',
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'attachments': _attachments_embedded_schema,
|
||||
},
|
||||
'form_schema': {
|
||||
'attachments': {'visible': False},
|
||||
},
|
||||
'parent': ['blog', ],
|
||||
}
|
@@ -21,17 +21,6 @@ node_type_storage = {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'subdir': {},
|
||||
'project': {},
|
||||
'backend': {}
|
||||
},
|
||||
'form_schema': {},
|
||||
'parent': ['group', 'project'],
|
||||
'permissions': {
|
||||
# 'groups': [{
|
||||
# 'group': app.config['ADMIN_USER_GROUP'],
|
||||
# 'methods': ['GET', 'PUT', 'POST']
|
||||
# }],
|
||||
# 'users': [],
|
||||
}
|
||||
}
|
@@ -24,5 +24,5 @@ node_type_text = {
|
||||
},
|
||||
'form_schema': {
|
||||
'shared_slug': {'visible': False},
|
||||
}
|
||||
},
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
from manage_extra.node_types import _file_embedded_schema
|
||||
from pillar.api.node_types import _file_embedded_schema
|
||||
|
||||
node_type_texture = {
|
||||
'name': 'texture',
|
||||
@@ -58,15 +58,8 @@ node_type_texture = {
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'content_type': {'visible': False},
|
||||
'files': {},
|
||||
'is_tileable': {},
|
||||
'is_landscape': {},
|
||||
'resolution': {},
|
||||
'aspect_ratio': {},
|
||||
'order': {},
|
||||
'tags': {'visible': False},
|
||||
'categories': {'visible': False},
|
||||
}
|
||||
},
|
||||
}
|
@@ -1,26 +1,62 @@
|
||||
import base64
|
||||
import functools
|
||||
import logging
|
||||
import urlparse
|
||||
|
||||
import pymongo.errors
|
||||
import rsa.randnum
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
from bson import ObjectId
|
||||
from flask import current_app, g, Blueprint, request
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
from application.modules import file_storage
|
||||
from application.utils import str2id, jsonify
|
||||
from application.utils.authorization import check_permissions, require_login
|
||||
from application.utils.gcs import update_file_name
|
||||
from application.utils.activities import activity_subscribe, activity_object_add
|
||||
from application.utils.algolia import algolia_index_node_delete
|
||||
from application.utils.algolia import algolia_index_node_save
|
||||
import pillar.markdown
|
||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
|
||||
from pillar.api.activities import activity_subscribe, activity_object_add
|
||||
from pillar.api.utils.algolia import algolia_index_node_delete
|
||||
from pillar.api.utils.algolia import algolia_index_node_save
|
||||
from pillar.api.utils import str2id, jsonify
|
||||
from pillar.api.utils.authorization import check_permissions, require_login
|
||||
from pillar.api.utils.gcs import update_file_name
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint = Blueprint('nodes', __name__)
|
||||
blueprint = Blueprint('nodes_api', __name__)
|
||||
ROLES_FOR_SHARING = {u'subscriber', u'demo'}
|
||||
|
||||
|
||||
def only_for_node_type_decorator(*required_node_type_names):
|
||||
"""Returns a decorator that checks its first argument's node type.
|
||||
|
||||
If the node type is not of the required node type, returns None,
|
||||
otherwise calls the wrapped function.
|
||||
|
||||
>>> deco = only_for_node_type_decorator('comment')
|
||||
>>> @deco
|
||||
... def handle_comment(node): pass
|
||||
|
||||
>>> deco = only_for_node_type_decorator('comment', 'post')
|
||||
>>> @deco
|
||||
... def handle_comment_or_post(node): pass
|
||||
|
||||
"""
|
||||
|
||||
# Convert to a set for efficient 'x in required_node_type_names' queries.
|
||||
required_node_type_names = set(required_node_type_names)
|
||||
|
||||
def only_for_node_type(wrapped):
|
||||
@functools.wraps(wrapped)
|
||||
def wrapper(node, *args, **kwargs):
|
||||
if node.get('node_type') not in required_node_type_names:
|
||||
return
|
||||
|
||||
return wrapped(node, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
only_for_node_type.__doc__ = "Decorator, immediately returns when " \
|
||||
"the first argument is not of type %s." % required_node_type_names
|
||||
return only_for_node_type
|
||||
|
||||
|
||||
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
|
||||
@require_login(require_roles=ROLES_FOR_SHARING)
|
||||
def share_node(node_id):
|
||||
@@ -35,6 +71,8 @@ def share_node(node_id):
|
||||
'node_type': 1,
|
||||
'short_code': 1
|
||||
})
|
||||
if not node:
|
||||
raise wz_exceptions.NotFound('Node %s does not exist.' % node_id)
|
||||
|
||||
check_permissions('nodes', node, request.method)
|
||||
|
||||
@@ -134,62 +172,6 @@ def short_link_info(short_code):
|
||||
}
|
||||
|
||||
|
||||
def item_parse_attachments(response):
|
||||
"""Before returning a response, check if the 'attachments' property is
|
||||
defined. If yes, load the file (for the moment only images) in the required
|
||||
variation, get the link and build a Markdown representation. Search in the
|
||||
'field' specified in the attachment and replace the 'slug' tag with the
|
||||
generated link.
|
||||
"""
|
||||
|
||||
attachments = response.get('properties', {}).get('attachments', None)
|
||||
if not attachments:
|
||||
return
|
||||
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
for attachment in attachments:
|
||||
# Make a list from the property path
|
||||
field_name_path = attachment['field'].split('.')
|
||||
# This currently allow to access only properties inside of
|
||||
# the properties property
|
||||
if len(field_name_path) > 1:
|
||||
field_content = response[field_name_path[0]][field_name_path[1]]
|
||||
# This is for the "normal" first level property
|
||||
else:
|
||||
field_content = response[field_name_path[0]]
|
||||
for af in attachment['files']:
|
||||
slug = af['slug']
|
||||
slug_tag = "[{0}]".format(slug)
|
||||
f = files_collection.find_one({'_id': ObjectId(af['file'])})
|
||||
if f is None:
|
||||
af['file'] = None
|
||||
continue
|
||||
size = f['size'] if 'size' in f else 'l'
|
||||
|
||||
# Get the correct variation from the file
|
||||
file_storage.ensure_valid_link(f)
|
||||
thumbnail = next((item for item in f['variations'] if
|
||||
item['size'] == size), None)
|
||||
|
||||
# Build Markdown img string
|
||||
l = ''.format(slug, thumbnail['link'], f['name'])
|
||||
# Parse the content of the file and replace the attachment
|
||||
# tag with the actual image link
|
||||
field_content = field_content.replace(slug_tag, l)
|
||||
|
||||
# Apply the parsed value back to the property. See above for
|
||||
# clarifications on how this is done.
|
||||
if len(field_name_path) > 1:
|
||||
response[field_name_path[0]][field_name_path[1]] = field_content
|
||||
else:
|
||||
response[field_name_path[0]] = field_content
|
||||
|
||||
|
||||
def resource_parse_attachments(response):
|
||||
for item in response['_items']:
|
||||
item_parse_attachments(item)
|
||||
|
||||
|
||||
def before_replacing_node(item, original):
|
||||
check_permissions('nodes', original, 'PUT')
|
||||
update_file_name(item)
|
||||
@@ -275,9 +257,13 @@ def after_inserting_nodes(items):
|
||||
else:
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
verb = 'commented'
|
||||
else:
|
||||
elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
|
||||
verb = 'posted'
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
else:
|
||||
# Don't automatically create activities for non-Pillar node types,
|
||||
# as we don't know what would be a suitable verb (among other things).
|
||||
continue
|
||||
|
||||
activity_object_add(
|
||||
item['user'],
|
||||
@@ -392,18 +378,39 @@ def after_deleting_node(item):
|
||||
item.get('_id'), ex)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
only_for_comments = only_for_node_type_decorator('comment')
|
||||
|
||||
|
||||
@only_for_comments
|
||||
def convert_markdown(node, original=None):
|
||||
"""Converts comments from Markdown to HTML.
|
||||
|
||||
Always does this on save, even when the original Markdown hasn't changed,
|
||||
because our Markdown -> HTML conversion rules might have.
|
||||
"""
|
||||
|
||||
try:
|
||||
content = node['properties']['content']
|
||||
except KeyError:
|
||||
node['properties']['content_html'] = ''
|
||||
else:
|
||||
node['properties']['content_html'] = pillar.markdown.markdown(content)
|
||||
|
||||
|
||||
def nodes_convert_markdown(nodes):
|
||||
for node in nodes:
|
||||
convert_markdown(node)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
from . import patch
|
||||
patch.setup_app(app, url_prefix=url_prefix)
|
||||
|
||||
app.on_fetched_item_nodes += before_returning_node
|
||||
app.on_fetched_resource_nodes += before_returning_nodes
|
||||
|
||||
app.on_fetched_item_nodes += item_parse_attachments
|
||||
app.on_fetched_resource_nodes += resource_parse_attachments
|
||||
|
||||
app.on_replace_nodes += before_replacing_node
|
||||
app.on_replace_nodes += convert_markdown
|
||||
app.on_replace_nodes += deduct_content_type
|
||||
app.on_replace_nodes += node_set_default_picture
|
||||
app.on_replaced_nodes += after_replacing_node
|
||||
@@ -411,8 +418,11 @@ def setup_app(app, url_prefix):
|
||||
app.on_insert_nodes += before_inserting_nodes
|
||||
app.on_insert_nodes += nodes_deduct_content_type
|
||||
app.on_insert_nodes += nodes_set_default_picture
|
||||
app.on_insert_nodes += nodes_convert_markdown
|
||||
app.on_inserted_nodes += after_inserting_nodes
|
||||
|
||||
app.on_update_nodes += convert_markdown
|
||||
|
||||
app.on_deleted_item_nodes += after_deleting_node
|
||||
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
@@ -1,15 +1,19 @@
|
||||
"""PATCH support for comment nodes."""
|
||||
|
||||
import logging
|
||||
|
||||
from eve.methods.patch import patch_internal
|
||||
from flask import current_app
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
from application.utils import authorization, authentication, jsonify
|
||||
from pillar.api.utils import authorization, authentication, jsonify
|
||||
|
||||
from . import register_patch_handler
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
ROLES_FOR_COMMENT_VOTING = {u'subscriber', u'demo'}
|
||||
VALID_COMMENT_OPERATIONS = {u'upvote', u'downvote', u'revoke'}
|
||||
COMMENT_VOTING_OPS = {u'upvote', u'downvote', u'revoke'}
|
||||
VALID_COMMENT_OPERATIONS = COMMENT_VOTING_OPS.union({u'edit'})
|
||||
|
||||
|
||||
@register_patch_handler(u'comment')
|
||||
@@ -17,7 +21,23 @@ def patch_comment(node_id, patch):
|
||||
assert_is_valid_patch(node_id, patch)
|
||||
user_id = authentication.current_user_id()
|
||||
|
||||
# Find the node
|
||||
if patch[u'op'] in COMMENT_VOTING_OPS:
|
||||
result, node = vote_comment(user_id, node_id, patch)
|
||||
else:
|
||||
assert patch[u'op'] == u'edit', 'Invalid patch operation %s' % patch[u'op']
|
||||
result, node = edit_comment(user_id, node_id, patch)
|
||||
|
||||
return jsonify({'_status': 'OK',
|
||||
'result': result,
|
||||
'properties': node['properties']
|
||||
})
|
||||
|
||||
|
||||
def vote_comment(user_id, node_id, patch):
|
||||
"""Performs a voting operation."""
|
||||
|
||||
# Find the node. Includes a query on the properties.ratings array so
|
||||
# that we only get the current user's rating.
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
node_query = {'_id': node_id,
|
||||
'$or': [{'properties.ratings.$.user': {'$exists': False}},
|
||||
@@ -25,7 +45,7 @@ def patch_comment(node_id, patch):
|
||||
node = nodes_coll.find_one(node_query,
|
||||
projection={'properties': 1})
|
||||
if node is None:
|
||||
log.warning('How can the node not be found?')
|
||||
log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
|
||||
raise wz_exceptions.NotFound('Node %s not found' % node_id)
|
||||
|
||||
props = node['properties']
|
||||
@@ -82,6 +102,7 @@ def patch_comment(node_id, patch):
|
||||
action = actions[patch['op']]
|
||||
mongo_update = action()
|
||||
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
if mongo_update:
|
||||
log.info('Running %s', mongo_update)
|
||||
if rating:
|
||||
@@ -97,10 +118,50 @@ def patch_comment(node_id, patch):
|
||||
projection={'properties.rating_positive': 1,
|
||||
'properties.rating_negative': 1})
|
||||
|
||||
return jsonify({'_status': 'OK',
|
||||
'result': result,
|
||||
'properties': node['properties']
|
||||
})
|
||||
return result, node
|
||||
|
||||
|
||||
def edit_comment(user_id, node_id, patch):
|
||||
"""Edits a single comment.
|
||||
|
||||
Doesn't do permission checking; users are allowed to edit their own
|
||||
comment, and this is not something you want to revoke anyway. Admins
|
||||
can edit all comments.
|
||||
"""
|
||||
|
||||
# Find the node. We need to fetch some more info than we use here, so that
|
||||
# we can pass this stuff to Eve's patch_internal; that way the validation &
|
||||
# authorisation system has enough info to work.
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
projection = {'user': 1,
|
||||
'project': 1,
|
||||
'node_type': 1}
|
||||
node = nodes_coll.find_one(node_id, projection=projection)
|
||||
if node is None:
|
||||
log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
|
||||
raise wz_exceptions.NotFound('Node %s not found' % node_id)
|
||||
|
||||
if node['user'] != user_id and not authorization.user_has_role(u'admin'):
|
||||
raise wz_exceptions.Forbidden('You can only edit your own comments.')
|
||||
|
||||
# Use Eve to PATCH this node, as that also updates the etag.
|
||||
r, _, _, status = patch_internal('nodes',
|
||||
{'properties.content': patch['content'],
|
||||
'project': node['project'],
|
||||
'user': node['user'],
|
||||
'node_type': node['node_type']},
|
||||
concurrency_check=False,
|
||||
_id=node_id)
|
||||
if status != 200:
|
||||
log.error('Error %i editing comment %s for user %s: %s',
|
||||
status, node_id, user_id, r)
|
||||
raise wz_exceptions.InternalServerError('Internal error %i from Eve' % status)
|
||||
else:
|
||||
log.info('User %s edited comment %s', user_id, node_id)
|
||||
|
||||
# Fetch the new content, so the client can show these without querying again.
|
||||
node = nodes_coll.find_one(node_id, projection={'properties.content_html': 1})
|
||||
return status, node
|
||||
|
||||
|
||||
def assert_is_valid_patch(node_id, patch):
|
||||
@@ -112,8 +173,12 @@ def assert_is_valid_patch(node_id, patch):
|
||||
raise wz_exceptions.BadRequest("PATCH should have a key 'op' indicating the operation.")
|
||||
|
||||
if op not in VALID_COMMENT_OPERATIONS:
|
||||
raise wz_exceptions.BadRequest('Operation should be one of %s',
|
||||
', '.join(VALID_COMMENT_OPERATIONS))
|
||||
raise wz_exceptions.BadRequest(u'Operation should be one of %s',
|
||||
u', '.join(VALID_COMMENT_OPERATIONS))
|
||||
|
||||
if op not in COMMENT_VOTING_OPS:
|
||||
# We can't check here, we need the node owner for that.
|
||||
return
|
||||
|
||||
# See whether the user is allowed to patch
|
||||
if authorization.user_matches_roles(ROLES_FOR_COMMENT_VOTING):
|
110
pillar/api/nodes/moving.py
Normal file
110
pillar/api/nodes/moving.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Code for moving around nodes."""
|
||||
|
||||
import attr
|
||||
import flask_pymongo.wrappers
|
||||
from bson import ObjectId
|
||||
|
||||
from pillar import attrs_extra
|
||||
import pillar.api.file_storage.moving
|
||||
|
||||
|
||||
@attr.s
|
||||
class NodeMover(object):
|
||||
db = attr.ib(validator=attr.validators.instance_of(flask_pymongo.wrappers.Database))
|
||||
skip_gcs = attr.ib(default=False, validator=attr.validators.instance_of(bool))
|
||||
_log = attrs_extra.log('%s.NodeMover' % __name__)
|
||||
|
||||
def change_project(self, node, dest_proj):
|
||||
"""Moves a node and children to a new project."""
|
||||
|
||||
assert isinstance(node, dict)
|
||||
assert isinstance(dest_proj, dict)
|
||||
|
||||
for move_node in self._children(node):
|
||||
self._change_project(move_node, dest_proj)
|
||||
|
||||
def _change_project(self, node, dest_proj):
|
||||
"""Changes the project of a single node, non-recursively."""
|
||||
|
||||
node_id = node['_id']
|
||||
proj_id = dest_proj['_id']
|
||||
self._log.info('Moving node %s to project %s', node_id, proj_id)
|
||||
|
||||
# Find all files in the node.
|
||||
moved_files = set()
|
||||
self._move_files(moved_files, dest_proj, self._files(node.get('picture', None)))
|
||||
self._move_files(moved_files, dest_proj, self._files(node['properties'], 'file'))
|
||||
self._move_files(moved_files, dest_proj, self._files(node['properties'], 'files', 'file'))
|
||||
self._move_files(moved_files, dest_proj,
|
||||
self._files(node['properties'], 'attachments', 'files', 'file'))
|
||||
|
||||
# Switch the node's project after its files have been moved.
|
||||
self._log.info('Switching node %s to project %s', node_id, proj_id)
|
||||
nodes_coll = self.db['nodes']
|
||||
update_result = nodes_coll.update_one({'_id': node_id},
|
||||
{'$set': {'project': proj_id}})
|
||||
if update_result.matched_count != 1:
|
||||
raise RuntimeError(
|
||||
'Unable to update node %s in MongoDB: matched_count=%i; modified_count=%i' % (
|
||||
node_id, update_result.matched_count, update_result.modified_count))
|
||||
|
||||
def _move_files(self, moved_files, dest_proj, file_generator):
|
||||
"""Tries to find all files from the given properties."""
|
||||
|
||||
for file_id in file_generator:
|
||||
if file_id in moved_files:
|
||||
continue
|
||||
moved_files.add(file_id)
|
||||
self.move_file(dest_proj, file_id)
|
||||
|
||||
def move_file(self, dest_proj, file_id):
|
||||
"""Moves a single file to another project"""
|
||||
|
||||
self._log.info('Moving file %s to project %s', file_id, dest_proj['_id'])
|
||||
pillar.api.file_storage.moving.gcs_move_to_bucket(file_id, dest_proj['_id'],
|
||||
skip_gcs=self.skip_gcs)
|
||||
|
||||
def _files(self, file_ref, *properties):
|
||||
"""Yields file ObjectIDs."""
|
||||
|
||||
# Degenerate cases.
|
||||
if not file_ref:
|
||||
return
|
||||
|
||||
# Single ObjectID
|
||||
if isinstance(file_ref, ObjectId):
|
||||
assert not properties
|
||||
yield file_ref
|
||||
return
|
||||
|
||||
# List of ObjectIDs
|
||||
if isinstance(file_ref, list):
|
||||
for item in file_ref:
|
||||
for subitem in self._files(item, *properties):
|
||||
yield subitem
|
||||
return
|
||||
|
||||
# Dict, use properties[0] as key
|
||||
if isinstance(file_ref, dict):
|
||||
try:
|
||||
subref = file_ref[properties[0]]
|
||||
except KeyError:
|
||||
# Silently skip non-existing keys.
|
||||
return
|
||||
|
||||
for subitem in self._files(subref, *properties[1:]):
|
||||
yield subitem
|
||||
return
|
||||
|
||||
raise TypeError('File ref is of type %s, not implemented' % type(file_ref))
|
||||
|
||||
def _children(self, node):
|
||||
"""Generator, recursively yields the node and its children."""
|
||||
|
||||
yield node
|
||||
|
||||
nodes_coll = self.db['nodes']
|
||||
for child in nodes_coll.find({'parent': node['_id']}):
|
||||
# "yield from self.children(child)" was introduced in Python 3.3
|
||||
for grandchild in self._children(child):
|
||||
yield grandchild
|
@@ -5,11 +5,11 @@ Depends on node_type-specific patch handlers in submodules.
|
||||
|
||||
import logging
|
||||
|
||||
from flask import Blueprint, request
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
from application.utils import str2id
|
||||
from application.utils import authorization, mongo, authentication
|
||||
from flask import Blueprint, request
|
||||
from pillar.api.utils import mongo
|
||||
from pillar.api.utils import authorization, authentication
|
||||
from pillar.api.utils import str2id
|
||||
|
||||
from . import custom
|
||||
|
||||
@@ -48,4 +48,4 @@ def patch_node(node_id):
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
22
pillar/api/projects/__init__.py
Normal file
22
pillar/api/projects/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from . import hooks
|
||||
from .routes import blueprint_api
|
||||
|
||||
|
||||
def setup_app(app, api_prefix):
|
||||
app.on_replace_projects += hooks.override_is_private_field
|
||||
app.on_replace_projects += hooks.before_edit_check_permissions
|
||||
app.on_replace_projects += hooks.protect_sensitive_fields
|
||||
app.on_update_projects += hooks.override_is_private_field
|
||||
app.on_update_projects += hooks.before_edit_check_permissions
|
||||
app.on_update_projects += hooks.protect_sensitive_fields
|
||||
app.on_delete_item_projects += hooks.before_delete_project
|
||||
app.on_insert_projects += hooks.before_inserting_override_is_private_field
|
||||
app.on_insert_projects += hooks.before_inserting_projects
|
||||
app.on_inserted_projects += hooks.after_inserting_projects
|
||||
|
||||
app.on_fetched_item_projects += hooks.before_returning_project_permissions
|
||||
app.on_fetched_resource_projects += hooks.before_returning_project_resource_permissions
|
||||
app.on_fetched_item_projects += hooks.project_node_type_has_method
|
||||
app.on_fetched_resource_projects += hooks.projects_node_type_has_method
|
||||
|
||||
app.register_api_blueprint(blueprint_api, url_prefix=api_prefix)
|
236
pillar/api/projects/hooks.py
Normal file
236
pillar/api/projects/hooks.py
Normal file
@@ -0,0 +1,236 @@
|
||||
import copy
|
||||
import logging
|
||||
|
||||
from flask import request, abort, current_app
|
||||
from gcloud import exceptions as gcs_exceptions
|
||||
from pillar.api.node_types.asset import node_type_asset
|
||||
from pillar.api.node_types.comment import node_type_comment
|
||||
from pillar.api.node_types.group import node_type_group
|
||||
from pillar.api.node_types.group_texture import node_type_group_texture
|
||||
from pillar.api.node_types.texture import node_type_texture
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
from pillar.api.utils import authorization, authentication
|
||||
from pillar.api.utils import remove_private_keys
|
||||
from pillar.api.utils.authorization import user_has_role, check_permissions
|
||||
from .utils import abort_with_error
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Default project permissions for the admin group.
|
||||
DEFAULT_ADMIN_GROUP_PERMISSIONS = ['GET', 'PUT', 'POST', 'DELETE']
|
||||
|
||||
|
||||
def before_inserting_projects(items):
|
||||
"""Strip unwanted properties, that will be assigned after creation. Also,
|
||||
verify permission to create a project (check quota, check role).
|
||||
|
||||
:param items: List of project docs that have been inserted (normally one)
|
||||
"""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
for item in items:
|
||||
item.pop('url', None)
|
||||
|
||||
|
||||
def override_is_private_field(project, original):
|
||||
"""Override the 'is_private' property from the world permissions.
|
||||
|
||||
:param project: the project, which will be updated
|
||||
"""
|
||||
|
||||
# No permissions, no access.
|
||||
if 'permissions' not in project:
|
||||
project['is_private'] = True
|
||||
return
|
||||
|
||||
world_perms = project['permissions'].get('world', [])
|
||||
is_private = 'GET' not in world_perms
|
||||
project['is_private'] = is_private
|
||||
|
||||
|
||||
def before_inserting_override_is_private_field(projects):
|
||||
for project in projects:
|
||||
override_is_private_field(project, None)
|
||||
|
||||
|
||||
def before_edit_check_permissions(document, original):
|
||||
check_permissions('projects', original, request.method)
|
||||
|
||||
|
||||
def before_delete_project(document):
|
||||
"""Checks permissions before we allow deletion"""
|
||||
|
||||
check_permissions('projects', document, request.method)
|
||||
|
||||
|
||||
def protect_sensitive_fields(document, original):
|
||||
"""When not logged in as admin, prevents update to certain fields."""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
def revert(name):
|
||||
if name not in original:
|
||||
try:
|
||||
del document[name]
|
||||
except KeyError:
|
||||
pass
|
||||
return
|
||||
document[name] = original[name]
|
||||
|
||||
revert('status')
|
||||
revert('category')
|
||||
revert('user')
|
||||
|
||||
if 'url' in original:
|
||||
revert('url')
|
||||
|
||||
|
||||
def after_inserting_projects(projects):
|
||||
"""After inserting a project in the collection we do some processing such as:
|
||||
- apply the right permissions
|
||||
- define basic node types
|
||||
- optionally generate a url
|
||||
- initialize storage space
|
||||
|
||||
:param projects: List of project docs that have been inserted (normally one)
|
||||
"""
|
||||
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
for project in projects:
|
||||
owner_id = project.get('user', None)
|
||||
owner = users_collection.find_one(owner_id)
|
||||
after_inserting_project(project, owner)
|
||||
|
||||
|
||||
def after_inserting_project(project, db_user):
|
||||
project_id = project['_id']
|
||||
user_id = db_user['_id']
|
||||
|
||||
# Create a project-specific admin group (with name matching the project id)
|
||||
result, _, _, status = current_app.post_internal('groups', {'name': str(project_id)})
|
||||
if status != 201:
|
||||
log.error('Unable to create admin group for new project %s: %s',
|
||||
project_id, result)
|
||||
return abort_with_error(status)
|
||||
|
||||
admin_group_id = result['_id']
|
||||
log.debug('Created admin group %s for project %s', admin_group_id, project_id)
|
||||
|
||||
# Assign the current user to the group
|
||||
db_user.setdefault('groups', []).append(admin_group_id)
|
||||
|
||||
result, _, _, status = current_app.patch_internal('users', {'groups': db_user['groups']},
|
||||
_id=user_id)
|
||||
if status != 200:
|
||||
log.error('Unable to add user %s as member of admin group %s for new project %s: %s',
|
||||
user_id, admin_group_id, project_id, result)
|
||||
return abort_with_error(status)
|
||||
log.debug('Made user %s member of group %s', user_id, admin_group_id)
|
||||
|
||||
# Assign the group to the project with admin rights
|
||||
is_admin = authorization.is_admin(db_user)
|
||||
world_permissions = ['GET'] if is_admin else []
|
||||
permissions = {
|
||||
'world': world_permissions,
|
||||
'users': [],
|
||||
'groups': [
|
||||
{'group': admin_group_id,
|
||||
'methods': DEFAULT_ADMIN_GROUP_PERMISSIONS[:]},
|
||||
]
|
||||
}
|
||||
|
||||
def with_permissions(node_type):
|
||||
copied = copy.deepcopy(node_type)
|
||||
copied['permissions'] = permissions
|
||||
return copied
|
||||
|
||||
# Assign permissions to the project itself, as well as to the node_types
|
||||
project['permissions'] = permissions
|
||||
project['node_types'] = [
|
||||
with_permissions(node_type_group),
|
||||
with_permissions(node_type_asset),
|
||||
with_permissions(node_type_comment),
|
||||
with_permissions(node_type_texture),
|
||||
with_permissions(node_type_group_texture),
|
||||
]
|
||||
|
||||
# Allow admin users to use whatever url they want.
|
||||
if not is_admin or not project.get('url'):
|
||||
if project.get('category', '') == 'home':
|
||||
project['url'] = 'home'
|
||||
else:
|
||||
project['url'] = "p-{!s}".format(project_id)
|
||||
|
||||
# Initialize storage page (defaults to GCS)
|
||||
if current_app.config.get('TESTING'):
|
||||
log.warning('Not creating Google Cloud Storage bucket while running unit tests!')
|
||||
else:
|
||||
try:
|
||||
gcs_storage = GoogleCloudStorageBucket(str(project_id))
|
||||
if gcs_storage.bucket.exists():
|
||||
log.info('Created GCS instance for project %s', project_id)
|
||||
else:
|
||||
log.warning('Unable to create GCS instance for project %s', project_id)
|
||||
except gcs_exceptions.Forbidden as ex:
|
||||
log.warning('GCS forbids me to create CGS instance for project %s: %s', project_id, ex)
|
||||
|
||||
# Commit the changes directly to the MongoDB; a PUT is not allowed yet,
|
||||
# as the project doesn't have a valid permission structure.
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
result = projects_collection.update_one({'_id': project_id},
|
||||
{'$set': remove_private_keys(project)})
|
||||
if result.matched_count != 1:
|
||||
log.error('Unable to update project %s: %s', project_id, result.raw_result)
|
||||
abort_with_error(500)
|
||||
|
||||
|
||||
def before_returning_project_permissions(response):
|
||||
# Run validation process, since GET on nodes entry point is public
|
||||
check_permissions('projects', response, 'GET', append_allowed_methods=True)
|
||||
|
||||
|
||||
def before_returning_project_resource_permissions(response):
|
||||
# Return only those projects the user has access to.
|
||||
allow = []
|
||||
for project in response['_items']:
|
||||
if authorization.has_permissions('projects', project,
|
||||
'GET', append_allowed_methods=True):
|
||||
allow.append(project)
|
||||
else:
|
||||
log.debug('User %s requested project %s, but has no access to it; filtered out.',
|
||||
authentication.current_user_id(), project['_id'])
|
||||
|
||||
response['_items'] = allow
|
||||
|
||||
|
||||
def project_node_type_has_method(response):
|
||||
"""Check for a specific request arg, and check generate the allowed_methods
|
||||
list for the required node_type.
|
||||
"""
|
||||
|
||||
node_type_name = request.args.get('node_type', '')
|
||||
|
||||
# Proceed only node_type has been requested
|
||||
if not node_type_name:
|
||||
return
|
||||
|
||||
# Look up the node type in the project document
|
||||
if not any(node_type.get('name') == node_type_name
|
||||
for node_type in response['node_types']):
|
||||
return abort(404)
|
||||
|
||||
# Check permissions and append the allowed_methods to the node_type
|
||||
check_permissions('projects', response, 'GET', append_allowed_methods=True,
|
||||
check_node_type=node_type_name)
|
||||
|
||||
|
||||
def projects_node_type_has_method(response):
|
||||
for project in response['_items']:
|
||||
project_node_type_has_method(project)
|
||||
|
||||
|
139
pillar/api/projects/routes.py
Normal file
139
pillar/api/projects/routes.py
Normal file
@@ -0,0 +1,139 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import Blueprint, g, request, current_app, make_response, url_for
|
||||
from pillar.api.utils import authorization, jsonify, str2id
|
||||
from pillar.api.utils import mongo
|
||||
from pillar.api.utils.authorization import require_login, check_permissions
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from . import utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
blueprint_api = Blueprint('projects_api', __name__)
|
||||
|
||||
|
||||
@blueprint_api.route('/create', methods=['POST'])
|
||||
@authorization.require_login(require_roles={u'admin', u'subscriber', u'demo'})
|
||||
def create_project(overrides=None):
|
||||
"""Creates a new project."""
|
||||
|
||||
if request.mimetype == 'application/json':
|
||||
project_name = request.json['name']
|
||||
else:
|
||||
project_name = request.form['project_name']
|
||||
user_id = g.current_user['user_id']
|
||||
|
||||
project = utils.create_new_project(project_name, user_id, overrides)
|
||||
|
||||
# Return the project in the response.
|
||||
loc = url_for('projects|item_lookup', _id=project['_id'])
|
||||
return jsonify(project, status=201, headers={'Location': loc})
|
||||
|
||||
|
||||
@blueprint_api.route('/users', methods=['GET', 'POST'])
|
||||
@authorization.require_login()
|
||||
def project_manage_users():
|
||||
"""Manage users of a project. In this initial implementation, we handle
|
||||
addition and removal of a user to the admin group of a project.
|
||||
No changes are done on the project itself.
|
||||
"""
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
|
||||
# TODO: check if user is admin of the project before anything
|
||||
if request.method == 'GET':
|
||||
project_id = request.args['project_id']
|
||||
project = projects_collection.find_one({'_id': ObjectId(project_id)})
|
||||
admin_group_id = project['permissions']['groups'][0]['group']
|
||||
|
||||
users = users_collection.find(
|
||||
{'groups': {'$in': [admin_group_id]}},
|
||||
{'username': 1, 'email': 1, 'full_name': 1})
|
||||
return jsonify({'_status': 'OK', '_items': list(users)})
|
||||
|
||||
# The request is not a form, since it comes from the API sdk
|
||||
data = json.loads(request.data)
|
||||
project_id = ObjectId(data['project_id'])
|
||||
target_user_id = ObjectId(data['user_id'])
|
||||
action = data['action']
|
||||
current_user_id = g.current_user['user_id']
|
||||
|
||||
project = projects_collection.find_one({'_id': project_id})
|
||||
|
||||
# Check if the current_user is owner of the project, or removing themselves.
|
||||
if not authorization.user_has_role(u'admin'):
|
||||
remove_self = target_user_id == current_user_id and action == 'remove'
|
||||
if project['user'] != current_user_id and not remove_self:
|
||||
utils.abort_with_error(403)
|
||||
|
||||
admin_group = utils.get_admin_group(project)
|
||||
|
||||
# Get the user and add the admin group to it
|
||||
if action == 'add':
|
||||
operation = '$addToSet'
|
||||
log.info('project_manage_users: Adding user %s to admin group of project %s',
|
||||
target_user_id, project_id)
|
||||
elif action == 'remove':
|
||||
log.info('project_manage_users: Removing user %s from admin group of project %s',
|
||||
target_user_id, project_id)
|
||||
operation = '$pull'
|
||||
else:
|
||||
log.warning('project_manage_users: Unsupported action %r called by user %s',
|
||||
action, current_user_id)
|
||||
raise wz_exceptions.UnprocessableEntity()
|
||||
|
||||
users_collection.update({'_id': target_user_id},
|
||||
{operation: {'groups': admin_group['_id']}})
|
||||
|
||||
user = users_collection.find_one({'_id': target_user_id},
|
||||
{'username': 1, 'email': 1,
|
||||
'full_name': 1})
|
||||
|
||||
if not user:
|
||||
return jsonify({'_status': 'ERROR'}), 404
|
||||
|
||||
user['_status'] = 'OK'
|
||||
return jsonify(user)
|
||||
|
||||
|
||||
@blueprint_api.route('/<string:project_id>/quotas')
|
||||
@require_login()
|
||||
def project_quotas(project_id):
|
||||
"""Returns information about the project's limits."""
|
||||
|
||||
# Check that the user has GET permissions on the project itself.
|
||||
project = mongo.find_one_or_404('projects', project_id)
|
||||
check_permissions('projects', project, 'GET')
|
||||
|
||||
file_size_used = utils.project_total_file_size(project_id)
|
||||
|
||||
info = {
|
||||
'file_size_quota': None, # TODO: implement this later.
|
||||
'file_size_used': file_size_used,
|
||||
}
|
||||
|
||||
return jsonify(info)
|
||||
|
||||
|
||||
@blueprint_api.route('/<project_id>/<node_type>', methods=['OPTIONS', 'GET'])
|
||||
def get_allowed_methods(project_id=None, node_type=None):
|
||||
"""Returns allowed methods to create a node of a certain type.
|
||||
|
||||
Either project_id or parent_node_id must be given. If the latter is given,
|
||||
the former is deducted from it.
|
||||
"""
|
||||
|
||||
project = mongo.find_one_or_404('projects', str2id(project_id))
|
||||
proj_methods = authorization.compute_allowed_methods('projects', project, node_type)
|
||||
|
||||
resp = make_response()
|
||||
resp.headers['Allowed'] = ', '.join(sorted(proj_methods))
|
||||
resp.status_code = 204
|
||||
|
||||
return resp
|
||||
|
||||
|
99
pillar/api/projects/utils.py
Normal file
99
pillar/api/projects/utils.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import current_app
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
from werkzeug.exceptions import abort
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def project_total_file_size(project_id):
|
||||
"""Returns the total number of bytes used by files of this project."""
|
||||
|
||||
files = current_app.data.driver.db['files']
|
||||
file_size_used = files.aggregate([
|
||||
{'$match': {'project': ObjectId(project_id)}},
|
||||
{'$project': {'length_aggregate_in_bytes': 1}},
|
||||
{'$group': {'_id': None,
|
||||
'all_files': {'$sum': '$length_aggregate_in_bytes'}}}
|
||||
])
|
||||
|
||||
# The aggregate function returns a cursor, not a document.
|
||||
try:
|
||||
return next(file_size_used)['all_files']
|
||||
except StopIteration:
|
||||
# No files used at all.
|
||||
return 0
|
||||
|
||||
|
||||
def get_admin_group(project):
|
||||
"""Returns the admin group for the project."""
|
||||
|
||||
groups_collection = current_app.data.driver.db['groups']
|
||||
|
||||
# TODO: search through all groups to find the one with the project ID as its name.
|
||||
admin_group_id = ObjectId(project['permissions']['groups'][0]['group'])
|
||||
group = groups_collection.find_one({'_id': admin_group_id})
|
||||
|
||||
if group is None:
|
||||
raise ValueError('Unable to handle project without admin group.')
|
||||
|
||||
if group['name'] != str(project['_id']):
|
||||
return abort_with_error(403)
|
||||
|
||||
return group
|
||||
|
||||
|
||||
def abort_with_error(status):
|
||||
"""Aborts with the given status, or 500 if the status doesn't indicate an error.
|
||||
|
||||
If the status is < 400, status 500 is used instead.
|
||||
"""
|
||||
|
||||
abort(status if status // 100 >= 4 else 500)
|
||||
raise wz_exceptions.InternalServerError('abort() should have aborted!')
|
||||
|
||||
|
||||
def create_new_project(project_name, user_id, overrides):
|
||||
"""Creates a new project owned by the given user."""
|
||||
|
||||
log.info('Creating new project "%s" for user %s', project_name, user_id)
|
||||
|
||||
# Create the project itself, the rest will be done by the after-insert hook.
|
||||
project = {'description': '',
|
||||
'name': project_name,
|
||||
'node_types': [],
|
||||
'status': 'published',
|
||||
'user': user_id,
|
||||
'is_private': True,
|
||||
'permissions': {},
|
||||
'url': '',
|
||||
'summary': '',
|
||||
'category': 'assets', # TODO: allow the user to choose this.
|
||||
}
|
||||
if overrides is not None:
|
||||
project.update(overrides)
|
||||
|
||||
result, _, _, status = current_app.post_internal('projects', project)
|
||||
if status != 201:
|
||||
log.error('Unable to create project "%s": %s', project_name, result)
|
||||
return abort_with_error(status)
|
||||
project.update(result)
|
||||
|
||||
# Now re-fetch the project, as both the initial document and the returned
|
||||
# result do not contain the same etag as the database. This also updates
|
||||
# other fields set by hooks.
|
||||
document = current_app.data.driver.db['projects'].find_one(project['_id'])
|
||||
project.update(document)
|
||||
|
||||
log.info('Created project %s for user %s', project['_id'], user_id)
|
||||
|
||||
return project
|
||||
|
||||
|
||||
def get_node_type(project, node_type_name):
|
||||
"""Returns the named node type, or None if it doesn't exist."""
|
||||
|
||||
return next((nt for nt in project['node_types']
|
||||
if nt['name'] == node_type_name), None)
|
@@ -3,12 +3,12 @@
|
||||
import logging
|
||||
|
||||
import blinker
|
||||
from flask import Blueprint, current_app, g, request
|
||||
from flask import Blueprint, current_app, request
|
||||
from pillar.api import local_auth
|
||||
from pillar.api.utils import mongo
|
||||
from pillar.api.utils import authorization, authentication, str2id, jsonify
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from application.utils import authorization, authentication, str2id, mongo, jsonify
|
||||
from application.modules import local_auth
|
||||
|
||||
blueprint = Blueprint('service', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
signal_user_changed_role = blinker.NamedSignal('badger:user_changed_role')
|
||||
@@ -162,7 +162,7 @@ def manage_user_group_membership(db_user, role, action):
|
||||
return user_groups
|
||||
|
||||
|
||||
def create_service_account(email, roles, service):
|
||||
def create_service_account(email, roles, service, update_existing=None):
|
||||
"""Creates a service account with the given roles + the role 'service'.
|
||||
|
||||
:param email: email address associated with the account
|
||||
@@ -170,22 +170,53 @@ def create_service_account(email, roles, service):
|
||||
:param roles: iterable of role names
|
||||
:param service: dict of the 'service' key in the user.
|
||||
:type service: dict
|
||||
:param update_existing: callback function that receives an existing user to update
|
||||
for this service, in case the email address is already in use by someone.
|
||||
If not given or None, updating existing users is disallowed, and a ValueError
|
||||
exception is thrown instead.
|
||||
|
||||
:return: tuple (user doc, token doc)
|
||||
"""
|
||||
from eve.methods.post import post_internal
|
||||
|
||||
# Create a user with the correct roles.
|
||||
roles = list(set(roles).union({u'service'}))
|
||||
user = {'username': email,
|
||||
'groups': [],
|
||||
'roles': roles,
|
||||
'settings': {'email_communications': 0},
|
||||
'auth': [],
|
||||
'full_name': email,
|
||||
'email': email,
|
||||
'service': service}
|
||||
result, _, _, status = post_internal('users', user)
|
||||
if status != 201:
|
||||
from pillar.api.utils import remove_private_keys
|
||||
|
||||
# Find existing
|
||||
users_coll = current_app.db()['users']
|
||||
user = users_coll.find_one({'email': email})
|
||||
if user:
|
||||
# Check whether updating is allowed at all.
|
||||
if update_existing is None:
|
||||
raise ValueError('User %s already exists' % email)
|
||||
|
||||
# Compute the new roles, and assign.
|
||||
roles = list(set(roles).union({u'service'}).union(user['roles']))
|
||||
user['roles'] = list(roles)
|
||||
|
||||
# Let the caller perform any required updates.
|
||||
log.info('Updating existing user %s to become service account for %s',
|
||||
email, roles)
|
||||
update_existing(user['service'])
|
||||
|
||||
# Try to store the updated user.
|
||||
result, _, _, status = current_app.put_internal('users',
|
||||
remove_private_keys(user),
|
||||
_id=user['_id'])
|
||||
expected_status = 200
|
||||
else:
|
||||
# Create a user with the correct roles.
|
||||
roles = list(set(roles).union({u'service'}))
|
||||
user = {'username': email,
|
||||
'groups': [],
|
||||
'roles': roles,
|
||||
'settings': {'email_communications': 0},
|
||||
'auth': [],
|
||||
'full_name': email,
|
||||
'email': email,
|
||||
'service': service}
|
||||
result, _, _, status = current_app.post_internal('users', user)
|
||||
expected_status = 201
|
||||
|
||||
if status != expected_status:
|
||||
raise SystemExit('Error creating user {}: {}'.format(email, result))
|
||||
user.update(result)
|
||||
|
||||
@@ -195,5 +226,5 @@ def create_service_account(email, roles, service):
|
||||
return user, token
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
def setup_app(app, api_prefix):
|
||||
app.register_api_blueprint(blueprint, url_prefix=api_prefix)
|
15
pillar/api/users/__init__.py
Normal file
15
pillar/api/users/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from . import hooks
|
||||
from .routes import blueprint_api
|
||||
|
||||
|
||||
def setup_app(app, api_prefix):
|
||||
app.on_pre_GET_users += hooks.check_user_access
|
||||
app.on_post_GET_users += hooks.post_GET_user
|
||||
app.on_pre_PUT_users += hooks.check_put_access
|
||||
app.on_pre_PUT_users += hooks.before_replacing_user
|
||||
app.on_replaced_users += hooks.push_updated_user_to_algolia
|
||||
app.on_replaced_users += hooks.send_blinker_signal_roles_changed
|
||||
app.on_fetched_item_users += hooks.after_fetching_user
|
||||
app.on_fetched_resource_users += hooks.after_fetching_user_resource
|
||||
|
||||
app.register_api_blueprint(blueprint_api, url_prefix=api_prefix)
|
@@ -1,45 +1,11 @@
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import urllib
|
||||
|
||||
from flask import g, current_app, Blueprint
|
||||
|
||||
from werkzeug.exceptions import Forbidden
|
||||
from eve.utils import parse_request
|
||||
from eve.methods.get import get
|
||||
|
||||
from application.utils.authorization import user_has_role, require_login
|
||||
from application.utils import jsonify
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint = Blueprint('users', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/me')
|
||||
@require_login()
|
||||
def my_info():
|
||||
eve_resp, _, _, status, _ = get('users', {'_id': g.current_user['user_id']})
|
||||
resp = jsonify(eve_resp['_items'][0], status=status)
|
||||
return resp
|
||||
|
||||
|
||||
def gravatar(email, size=64):
|
||||
parameters = {'s': str(size), 'd': 'mm'}
|
||||
return "https://www.gravatar.com/avatar/" + \
|
||||
hashlib.md5(str(email)).hexdigest() + \
|
||||
"?" + urllib.urlencode(parameters)
|
||||
|
||||
|
||||
def post_GET_user(request, payload):
|
||||
json_data = json.loads(payload.data)
|
||||
# Check if we are querying the users endpoint (instead of the single user)
|
||||
if json_data.get('_id') is None:
|
||||
return
|
||||
# json_data['computed_permissions'] = \
|
||||
# compute_permissions(json_data['_id'], app.data.driver)
|
||||
payload.data = json.dumps(json_data)
|
||||
from flask import current_app, g
|
||||
from pillar.api.users.routes import log
|
||||
from pillar.api.utils.authorization import user_has_role
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
|
||||
def before_replacing_user(request, lookup):
|
||||
@@ -64,7 +30,7 @@ def push_updated_user_to_algolia(user, original):
|
||||
"""Push an update to the Algolia index when a user item is updated"""
|
||||
|
||||
from algoliasearch.client import AlgoliaException
|
||||
from application.utils.algolia import algolia_index_user_save
|
||||
from pillar.api.utils.algolia import algolia_index_user_save
|
||||
|
||||
try:
|
||||
algolia_index_user_save(user)
|
||||
@@ -79,7 +45,7 @@ def send_blinker_signal_roles_changed(user, original):
|
||||
if user.get('roles') == original.get('roles'):
|
||||
return
|
||||
|
||||
from application.modules.service import signal_user_changed_role
|
||||
from pillar.api.service import signal_user_changed_role
|
||||
|
||||
log.info('User %s changed roles to %s, sending Blinker signal',
|
||||
user.get('_id'), user.get('roles'))
|
||||
@@ -136,7 +102,7 @@ def after_fetching_user(user):
|
||||
return
|
||||
|
||||
# Remove all fields except public ones.
|
||||
public_fields = {'full_name', 'email'}
|
||||
public_fields = {'full_name', 'username', 'email'}
|
||||
for field in list(user.keys()):
|
||||
if field not in public_fields:
|
||||
del user[field]
|
||||
@@ -147,14 +113,11 @@ def after_fetching_user_resource(response):
|
||||
after_fetching_user(user)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.on_pre_GET_users += check_user_access
|
||||
app.on_post_GET_users += post_GET_user
|
||||
app.on_pre_PUT_users += check_put_access
|
||||
app.on_pre_PUT_users += before_replacing_user
|
||||
app.on_replaced_users += push_updated_user_to_algolia
|
||||
app.on_replaced_users += send_blinker_signal_roles_changed
|
||||
app.on_fetched_item_users += after_fetching_user
|
||||
app.on_fetched_resource_users += after_fetching_user_resource
|
||||
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
||||
def post_GET_user(request, payload):
|
||||
json_data = json.loads(payload.data)
|
||||
# Check if we are querying the users endpoint (instead of the single user)
|
||||
if json_data.get('_id') is None:
|
||||
return
|
||||
# json_data['computed_permissions'] = \
|
||||
# compute_permissions(json_data['_id'], app.data.driver)
|
||||
payload.data = json.dumps(json_data)
|
19
pillar/api/users/routes.py
Normal file
19
pillar/api/users/routes.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import logging
|
||||
|
||||
from eve.methods.get import get
|
||||
from flask import g, Blueprint
|
||||
from pillar.api.utils import jsonify
|
||||
from pillar.api.utils.authorization import require_login
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint_api = Blueprint('users_api', __name__)
|
||||
|
||||
|
||||
@blueprint_api.route('/me')
|
||||
@require_login()
|
||||
def my_info():
|
||||
eve_resp, _, _, status, _ = get('users', {'_id': g.current_user['user_id']})
|
||||
resp = jsonify(eve_resp['_items'][0], status=status)
|
||||
return resp
|
||||
|
||||
|
@@ -1,5 +1,8 @@
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import urllib
|
||||
|
||||
import datetime
|
||||
import functools
|
||||
import logging
|
||||
@@ -10,10 +13,30 @@ from flask import current_app
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
import pymongo.results
|
||||
|
||||
__all__ = ('remove_private_keys', 'PillarJSONEncoder')
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def node_setattr(node, key, value):
|
||||
"""Sets a node property by dotted key.
|
||||
|
||||
Modifies the node in-place. Deletes None values.
|
||||
|
||||
:type node: dict
|
||||
:type key: str
|
||||
:param value: the value to set, or None to delete the key.
|
||||
"""
|
||||
|
||||
set_on = node
|
||||
while key and '.' in key:
|
||||
head, key = key.split('.', 1)
|
||||
set_on = set_on[head]
|
||||
|
||||
if value is None:
|
||||
set_on.pop(key, None)
|
||||
else:
|
||||
set_on[key] = value
|
||||
|
||||
|
||||
def remove_private_keys(document):
|
||||
"""Removes any key that starts with an underscore, returns result as new
|
||||
dictionary.
|
||||
@@ -62,6 +85,18 @@ def jsonify(mongo_doc, status=200, headers=None):
|
||||
headers=headers)
|
||||
|
||||
|
||||
def bsonify(mongo_doc, status=200, headers=None):
|
||||
"""BSonifies a Mongo document into a Flask response object."""
|
||||
|
||||
import bson
|
||||
|
||||
data = bson.BSON.encode(mongo_doc)
|
||||
return current_app.response_class(data,
|
||||
mimetype='application/bson',
|
||||
status=status,
|
||||
headers=headers)
|
||||
|
||||
|
||||
def skip_when_testing(func):
|
||||
"""Decorator, skips the decorated function when app.config['TESTING']"""
|
||||
|
||||
@@ -104,3 +139,57 @@ def str2id(document_id):
|
||||
except bson.objectid.InvalidId:
|
||||
log.debug('str2id(%r): Invalid Object ID', document_id)
|
||||
raise wz_exceptions.BadRequest('Invalid object ID %r' % document_id)
|
||||
|
||||
|
||||
def gravatar(email, size=64):
|
||||
parameters = {'s': str(size), 'd': 'mm'}
|
||||
return "https://www.gravatar.com/avatar/" + \
|
||||
hashlib.md5(str(email)).hexdigest() + \
|
||||
"?" + urllib.urlencode(parameters)
|
||||
|
||||
|
||||
|
||||
class MetaFalsey(type):
|
||||
def __nonzero__(cls):
|
||||
return False
|
||||
__bool__ = __nonzero__ # for Python 3
|
||||
|
||||
|
||||
class DoesNotExist(object):
|
||||
"""Returned as value by doc_diff if a value does not exist."""
|
||||
__metaclass__ = MetaFalsey
|
||||
|
||||
|
||||
def doc_diff(doc1, doc2, falsey_is_equal=True):
|
||||
"""Generator, yields differences between documents.
|
||||
|
||||
Yields changes as (key, value in doc1, value in doc2) tuples, where
|
||||
the value can also be the DoesNotExist class. Does not report changed
|
||||
private keys (i.e. starting with underscores).
|
||||
|
||||
Sub-documents (i.e. dicts) are recursed, and dot notation is used
|
||||
for the keys if changes are found.
|
||||
|
||||
If falsey_is_equal=True, all Falsey values compare as equal, i.e. this
|
||||
function won't report differences between DoesNotExist, False, '', and 0.
|
||||
"""
|
||||
|
||||
for key in set(doc1.keys()).union(set(doc2.keys())):
|
||||
if isinstance(key, basestring) and key[0] == u'_':
|
||||
continue
|
||||
|
||||
val1 = doc1.get(key, DoesNotExist)
|
||||
val2 = doc2.get(key, DoesNotExist)
|
||||
|
||||
# Only recurse if both values are dicts
|
||||
if isinstance(val1, dict) and isinstance(val2, dict):
|
||||
for subkey, subval1, subval2 in doc_diff(val1, val2):
|
||||
yield '%s.%s' % (key, subkey), subval1, subval2
|
||||
continue
|
||||
|
||||
if val1 == val2:
|
||||
continue
|
||||
if falsey_is_equal and bool(val1) == bool(val2) == False:
|
||||
continue
|
||||
|
||||
yield key, val1, val2
|
101
pillar/api/utils/algolia.py
Normal file
101
pillar/api/utils/algolia.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import current_app
|
||||
|
||||
from pillar.api.file_storage import generate_link
|
||||
from . import skip_when_testing
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
INDEX_ALLOWED_USER_ROLES = {'admin', 'subscriber', 'demo'}
|
||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_user_save(user):
|
||||
if current_app.algolia_index_users is None:
|
||||
return
|
||||
# Strip unneeded roles
|
||||
if 'roles' in user:
|
||||
roles = set(user['roles']).intersection(INDEX_ALLOWED_USER_ROLES)
|
||||
else:
|
||||
roles = set()
|
||||
if current_app.algolia_index_users:
|
||||
# Create or update Algolia index for the user
|
||||
current_app.algolia_index_users.save_object({
|
||||
'objectID': user['_id'],
|
||||
'full_name': user['full_name'],
|
||||
'username': user['username'],
|
||||
'roles': list(roles),
|
||||
'groups': user['groups'],
|
||||
'email': user['email']
|
||||
})
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_node_save(node):
|
||||
if not current_app.algolia_index_nodes:
|
||||
return
|
||||
if node['node_type'] not in INDEX_ALLOWED_NODE_TYPES:
|
||||
return
|
||||
# If a nodes does not have status published, do not index
|
||||
if node['properties'].get('status') != 'published':
|
||||
return
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one({'_id': ObjectId(node['project'])})
|
||||
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
user = users_collection.find_one({'_id': ObjectId(node['user'])})
|
||||
|
||||
node_ob = {
|
||||
'objectID': node['_id'],
|
||||
'name': node['name'],
|
||||
'project': {
|
||||
'_id': project['_id'],
|
||||
'name': project['name']
|
||||
},
|
||||
'created': node['_created'],
|
||||
'updated': node['_updated'],
|
||||
'node_type': node['node_type'],
|
||||
'user': {
|
||||
'_id': user['_id'],
|
||||
'full_name': user['full_name']
|
||||
},
|
||||
}
|
||||
if 'description' in node and node['description']:
|
||||
node_ob['description'] = node['description']
|
||||
if 'picture' in node and node['picture']:
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
lookup = {'_id': ObjectId(node['picture'])}
|
||||
picture = files_collection.find_one(lookup)
|
||||
if picture['backend'] == 'gcs':
|
||||
variation_t = next((item for item in picture['variations'] \
|
||||
if item['size'] == 't'), None)
|
||||
if variation_t:
|
||||
node_ob['picture'] = generate_link(picture['backend'],
|
||||
variation_t['file_path'], project_id=str(picture['project']),
|
||||
is_public=True)
|
||||
# If the node has world permissions, compute the Free permission
|
||||
if 'permissions' in node and 'world' in node['permissions']:
|
||||
if 'GET' in node['permissions']['world']:
|
||||
node_ob['is_free'] = True
|
||||
|
||||
# Append the media key if the node is of node_type 'asset'
|
||||
if node['node_type'] == 'asset':
|
||||
node_ob['media'] = node['properties']['content_type']
|
||||
|
||||
# Add extra properties
|
||||
for prop in ('tags', 'license_notes'):
|
||||
if prop in node['properties']:
|
||||
node_ob[prop] = node['properties'][prop]
|
||||
|
||||
current_app.algolia_index_nodes.save_object(node_ob)
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_node_delete(node):
|
||||
if current_app.algolia_index_nodes is None:
|
||||
return
|
||||
current_app.algolia_index_nodes.delete_object(node['_id'])
|
@@ -1,7 +1,7 @@
|
||||
"""Generic authentication.
|
||||
|
||||
Contains functionality to validate tokens, create users and tokens, and make
|
||||
unique usernames from emails. Calls out to the application.modules.blender_id
|
||||
unique usernames from emails. Calls out to the pillar_server.modules.blender_id
|
||||
module for Blender ID communication.
|
||||
"""
|
||||
|
||||
@@ -12,10 +12,25 @@ from bson import tz_util
|
||||
from flask import g
|
||||
from flask import request
|
||||
from flask import current_app
|
||||
from eve.methods.post import post_internal
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
CLI_USER = {
|
||||
'user_id': 'CLI',
|
||||
'groups': [],
|
||||
'roles': {'admin'},
|
||||
}
|
||||
|
||||
|
||||
def force_cli_user():
|
||||
"""Sets g.current_user to the CLI_USER object.
|
||||
|
||||
This is used as a marker to avoid authorization checks and just allow everything.
|
||||
"""
|
||||
|
||||
log.warning('Logging in as CLI_USER, circumventing authentication.')
|
||||
g.current_user = CLI_USER
|
||||
|
||||
|
||||
def validate_token():
|
||||
"""Validate the token provided in the request and populate the current_user
|
||||
@@ -28,21 +43,39 @@ def validate_token():
|
||||
@returns True iff the user is logged in with a valid Blender ID token.
|
||||
"""
|
||||
|
||||
# Default to no user at all.
|
||||
g.current_user = None
|
||||
if request.authorization:
|
||||
token = request.authorization.username
|
||||
oauth_subclient = request.authorization.password
|
||||
else:
|
||||
# Check the session, the user might be logged in through Flask-Login.
|
||||
from pillar import auth
|
||||
|
||||
_delete_expired_tokens()
|
||||
token = auth.get_blender_id_oauth_token()
|
||||
if token and isinstance(token, (tuple, list)):
|
||||
token = token[0]
|
||||
oauth_subclient = None
|
||||
|
||||
if not request.authorization:
|
||||
if not token:
|
||||
# If no authorization headers are provided, we are getting a request
|
||||
# from a non logged in user. Proceed accordingly.
|
||||
log.debug('No authentication headers, so not logged in.')
|
||||
g.current_user = None
|
||||
return False
|
||||
|
||||
# Check the users to see if there is one with this Blender ID token.
|
||||
token = request.authorization.username
|
||||
oauth_subclient = request.authorization.password
|
||||
return validate_this_token(token, oauth_subclient) is not None
|
||||
|
||||
|
||||
def validate_this_token(token, oauth_subclient=None):
|
||||
"""Validates a given token, and sets g.current_user.
|
||||
|
||||
:returns: the user in MongoDB, or None if not a valid token.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
g.current_user = None
|
||||
_delete_expired_tokens()
|
||||
|
||||
# Check the users to see if there is one with this Blender ID token.
|
||||
db_token = find_token(token, oauth_subclient)
|
||||
if not db_token:
|
||||
log.debug('Token %s not found in our local database.', token)
|
||||
@@ -51,7 +84,7 @@ def validate_token():
|
||||
# request to the Blender ID server to verify the validity of the token
|
||||
# passed via the HTTP header. We will get basic user info if the user
|
||||
# is authorized, and we will store the token in our local database.
|
||||
from application.modules import blender_id
|
||||
from pillar.api import blender_id
|
||||
|
||||
db_user, status = blender_id.validate_create_user('', token, oauth_subclient)
|
||||
else:
|
||||
@@ -61,13 +94,13 @@ def validate_token():
|
||||
|
||||
if db_user is None:
|
||||
log.debug('Validation failed, user not logged in')
|
||||
return False
|
||||
return None
|
||||
|
||||
g.current_user = {'user_id': db_user['_id'],
|
||||
'groups': db_user['groups'],
|
||||
'roles': set(db_user.get('roles', []))}
|
||||
|
||||
return True
|
||||
return db_user
|
||||
|
||||
|
||||
def find_token(token, is_subclient_token=False, **extra_filters):
|
||||
@@ -91,6 +124,8 @@ def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
|
||||
:returns: the token document from MongoDB
|
||||
"""
|
||||
|
||||
assert isinstance(token, (str, unicode)), 'token must be string type, not %r' % type(token)
|
||||
|
||||
token_data = {
|
||||
'user': user_id,
|
||||
'token': token,
|
||||
@@ -99,7 +134,7 @@ def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
|
||||
if oauth_subclient_id:
|
||||
token_data['is_subclient_token'] = True
|
||||
|
||||
r, _, _, status = post_internal('tokens', token_data)
|
||||
r, _, _, status = current_app.post_internal('tokens', token_data)
|
||||
|
||||
if status not in {200, 201}:
|
||||
log.error('Unable to store authentication token: %s', r)
|
||||
@@ -119,7 +154,7 @@ def create_new_user(email, username, user_id):
|
||||
"""
|
||||
|
||||
user_data = create_new_user_document(email, user_id, username)
|
||||
r = post_internal('users', user_data)
|
||||
r = current_app.post_internal('users', user_data)
|
||||
user_id = r[0]['_id']
|
||||
return user_id
|
||||
|
||||
@@ -196,3 +231,10 @@ def current_user_id():
|
||||
|
||||
current_user = g.get('current_user') or {}
|
||||
return current_user.get('user_id')
|
||||
|
||||
|
||||
def setup_app(app):
|
||||
@app.before_request
|
||||
def validate_token_at_each_request():
|
||||
validate_token()
|
||||
return None
|
@@ -7,7 +7,7 @@ from flask import abort
|
||||
from flask import current_app
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
CHECK_PERMISSIONS_IMPLEMENTED_FOR = {'projects', 'nodes'}
|
||||
CHECK_PERMISSIONS_IMPLEMENTED_FOR = {'projects', 'nodes', 'flamenco_jobs'}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -62,15 +62,18 @@ def compute_allowed_methods(collection_name, resource, check_node_type=None):
|
||||
|
||||
# Accumulate allowed methods from the user, group and world level.
|
||||
allowed_methods = set()
|
||||
current_user = g.current_user
|
||||
current_user = getattr(g, 'current_user', None)
|
||||
|
||||
if current_user:
|
||||
user_is_admin = is_admin(current_user)
|
||||
|
||||
# If the user is authenticated, proceed to compare the group permissions
|
||||
for permission in computed_permissions.get('groups', ()):
|
||||
if permission['group'] in current_user['groups']:
|
||||
if user_is_admin or permission['group'] in current_user['groups']:
|
||||
allowed_methods.update(permission['methods'])
|
||||
|
||||
for permission in computed_permissions.get('users', ()):
|
||||
if current_user['user_id'] == permission['user']:
|
||||
if user_is_admin or current_user['user_id'] == permission['user']:
|
||||
allowed_methods.update(permission['methods'])
|
||||
|
||||
# Check if the node is public or private. This must be set for non logged
|
||||
@@ -132,6 +135,14 @@ def compute_aggr_permissions(collection_name, resource, check_node_type=None):
|
||||
if check_node_type is None:
|
||||
return project['permissions']
|
||||
node_type_name = check_node_type
|
||||
elif 'node_type' not in resource:
|
||||
# Neither a project, nor a node, therefore is another collection
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one(
|
||||
ObjectId(resource['project']),
|
||||
{'permissions': 1})
|
||||
return project['permissions']
|
||||
|
||||
else:
|
||||
# Not a project, so it's a node.
|
||||
assert 'project' in resource
|
||||
@@ -155,7 +166,7 @@ def compute_aggr_permissions(collection_name, resource, check_node_type=None):
|
||||
project_permissions = project['permissions']
|
||||
|
||||
# Find the node type from the project.
|
||||
node_type = next((node_type for node_type in project['node_types']
|
||||
node_type = next((node_type for node_type in project.get('node_types', ())
|
||||
if node_type['name'] == node_type_name), None)
|
||||
if node_type is None: # This node type is not known, so doesn't give permissions.
|
||||
node_type_permissions = {}
|
@@ -3,8 +3,6 @@ import os
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from application import encoding_service_client
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -18,7 +16,7 @@ class Encoder:
|
||||
"""Create an encoding job. Return the backend used as well as an id.
|
||||
"""
|
||||
if current_app.config['ENCODING_BACKEND'] != 'zencoder' or \
|
||||
encoding_service_client is None:
|
||||
current_app.encoding_service_client is None:
|
||||
log.error('I can only work with Zencoder, check the config file.')
|
||||
return None
|
||||
|
||||
@@ -35,9 +33,9 @@ class Encoder:
|
||||
outputs = [{'format': v['format'],
|
||||
'url': os.path.join(storage_base, v['file_path'])}
|
||||
for v in src_file['variations']]
|
||||
r = encoding_service_client.job.create(file_input,
|
||||
outputs=outputs,
|
||||
options=options)
|
||||
r = current_app.encoding_service_client.job.create(file_input,
|
||||
outputs=outputs,
|
||||
options=options)
|
||||
if r.code != 201:
|
||||
log.error('Error %i creating Zencoder job: %s', r.code, r.body)
|
||||
return None
|
||||
@@ -47,8 +45,10 @@ class Encoder:
|
||||
|
||||
@staticmethod
|
||||
def job_progress(job_id):
|
||||
if isinstance(encoding_service_client, Zencoder):
|
||||
r = encoding_service_client.job.progress(int(job_id))
|
||||
from zencoder import Zencoder
|
||||
|
||||
if isinstance(current_app.encoding_service_client, Zencoder):
|
||||
r = current_app.encoding_service_client.job.progress(int(job_id))
|
||||
return r.body
|
||||
else:
|
||||
return None
|
@@ -169,6 +169,15 @@ class GoogleCloudStorageBucket(object):
|
||||
blob.content_disposition = u'attachment; filename="{0}"'.format(name)
|
||||
blob.patch()
|
||||
|
||||
def copy_blob(self, blob, to_bucket):
|
||||
"""Copies the given blob from this bucket to the other bucket.
|
||||
|
||||
Returns the new blob.
|
||||
"""
|
||||
|
||||
assert isinstance(to_bucket, GoogleCloudStorageBucket)
|
||||
return self.bucket.copy_blob(blob, to_bucket.bucket)
|
||||
|
||||
|
||||
def update_file_name(node):
|
||||
"""Assign to the CGS blob the same name of the asset node. This way when
|
||||
@@ -197,6 +206,11 @@ def update_file_name(node):
|
||||
|
||||
storage = GoogleCloudStorageBucket(str(node['project']))
|
||||
blob = storage.Get(file_doc['file_path'], to_dict=False)
|
||||
if blob is None:
|
||||
log.warning('Unable to find blob for file %s in project %s',
|
||||
file_doc['file_path'], file_doc['project'])
|
||||
return
|
||||
|
||||
# Pick file extension from original filename
|
||||
_, ext = os.path.splitext(file_doc['filename'])
|
||||
name = _format_name(node['name'], ext, map_type=map_type)
|
||||
@@ -222,3 +236,16 @@ def update_file_name(node):
|
||||
if 'files' in node['properties']:
|
||||
for file_props in node['properties']['files']:
|
||||
_update_name(file_props['file'], file_props)
|
||||
|
||||
|
||||
def copy_to_bucket(file_path, src_project_id, dest_project_id):
|
||||
"""Copies a file from one bucket to the other."""
|
||||
|
||||
log.info('Copying %s from project bucket %s to %s',
|
||||
file_path, src_project_id, dest_project_id)
|
||||
|
||||
src_storage = GoogleCloudStorageBucket(str(src_project_id))
|
||||
dest_storage = GoogleCloudStorageBucket(str(dest_project_id))
|
||||
|
||||
blob = src_storage.Get(file_path, to_dict=False)
|
||||
src_storage.copy_blob(blob, dest_storage)
|
84
pillar/api/utils/node_type_utils.py
Normal file
84
pillar/api/utils/node_type_utils.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import copy
|
||||
import logging
|
||||
import types
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def assign_permissions(project, node_types, permission_callback):
|
||||
"""Generator, yields the node types with certain permissions set.
|
||||
|
||||
The permission_callback is called for each node type, and each user
|
||||
and group permission in the project, and should return the appropriate
|
||||
extra permissions for that node type.
|
||||
|
||||
Yields copies of the given node types with new permissions.
|
||||
|
||||
permission_callback(node_type, uwg, ident, proj_methods) is returned, where
|
||||
- 'node_type' is the node type dict
|
||||
- 'ugw' is either 'user', 'group', or 'world',
|
||||
- 'ident' is the group or user ID, or None when ugw is 'world',
|
||||
- 'proj_methods' is the list of already-allowed project methods.
|
||||
"""
|
||||
|
||||
proj_perms = project['permissions']
|
||||
|
||||
for nt in node_types:
|
||||
permissions = {}
|
||||
|
||||
for key in ('users', 'groups'):
|
||||
perms = proj_perms[key]
|
||||
singular = key.rstrip('s')
|
||||
|
||||
for perm in perms:
|
||||
assert isinstance(perm, dict), 'perm should be dict, but is %r' % perm
|
||||
ident = perm[singular] # group or user ID.
|
||||
|
||||
methods_to_allow = permission_callback(nt, singular, ident, perm['methods'])
|
||||
if not methods_to_allow:
|
||||
continue
|
||||
|
||||
permissions.setdefault(key, []).append(
|
||||
{singular: ident,
|
||||
'methods': methods_to_allow}
|
||||
)
|
||||
|
||||
# World permissions are simpler.
|
||||
world_methods_to_allow = permission_callback(nt, 'world', None,
|
||||
permissions.get('world', []))
|
||||
if world_methods_to_allow:
|
||||
permissions.setdefault('world', []).extend(world_methods_to_allow)
|
||||
|
||||
node_type = copy.deepcopy(nt)
|
||||
if permissions:
|
||||
node_type['permissions'] = permissions
|
||||
yield node_type
|
||||
|
||||
|
||||
def add_to_project(project, node_types, replace_existing):
|
||||
"""Adds the given node types to the project.
|
||||
|
||||
Overwrites any existing by the same name when replace_existing=True.
|
||||
"""
|
||||
|
||||
assert isinstance(project, dict)
|
||||
assert isinstance(node_types, (list, set, frozenset, tuple, types.GeneratorType)), \
|
||||
'node_types is of wrong type %s' % type(node_types)
|
||||
|
||||
project_id = project['_id']
|
||||
|
||||
for node_type in node_types:
|
||||
found = [nt for nt in project['node_types']
|
||||
if nt['name'] == node_type['name']]
|
||||
if found:
|
||||
assert len(found) == 1, 'node type name should be unique (found %ix)' % len(found)
|
||||
|
||||
# TODO: validate that the node type contains all the properties Attract needs.
|
||||
if replace_existing:
|
||||
log.info('Replacing existing node type %s on project %s',
|
||||
node_type['name'], project_id)
|
||||
project['node_types'].remove(found[0])
|
||||
else:
|
||||
continue
|
||||
|
||||
project['node_types'].append(node_type)
|
@@ -1,8 +1,8 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import os
|
||||
from flask import current_app
|
||||
from application.utils.gcs import GoogleCloudStorageBucket
|
||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
|
||||
|
||||
|
||||
def get_sizedata(filepath):
|
@@ -1,268 +0,0 @@
|
||||
import logging.config
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from bson import ObjectId
|
||||
from datetime import datetime
|
||||
from flask import g
|
||||
from flask import request
|
||||
from flask import abort
|
||||
from eve import Eve
|
||||
|
||||
from eve.auth import TokenAuth
|
||||
from eve.io.mongo import Validator
|
||||
|
||||
from application.utils import project_get_node_type
|
||||
|
||||
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
|
||||
|
||||
|
||||
class ValidateCustomFields(Validator):
|
||||
def convert_properties(self, properties, node_schema):
|
||||
for prop in node_schema:
|
||||
if not prop in properties:
|
||||
continue
|
||||
schema_prop = node_schema[prop]
|
||||
prop_type = schema_prop['type']
|
||||
if prop_type == 'dict':
|
||||
properties[prop] = self.convert_properties(
|
||||
properties[prop], schema_prop['schema'])
|
||||
if prop_type == 'list':
|
||||
if properties[prop] in ['', '[]']:
|
||||
properties[prop] = []
|
||||
for k, val in enumerate(properties[prop]):
|
||||
if not 'schema' in schema_prop:
|
||||
continue
|
||||
item_schema = {'item': schema_prop['schema']}
|
||||
item_prop = {'item': properties[prop][k]}
|
||||
properties[prop][k] = self.convert_properties(
|
||||
item_prop, item_schema)['item']
|
||||
# Convert datetime string to RFC1123 datetime
|
||||
elif prop_type == 'datetime':
|
||||
prop_val = properties[prop]
|
||||
properties[prop] = datetime.strptime(prop_val, RFC1123_DATE_FORMAT)
|
||||
elif prop_type == 'objectid':
|
||||
prop_val = properties[prop]
|
||||
if prop_val:
|
||||
properties[prop] = ObjectId(prop_val)
|
||||
else:
|
||||
properties[prop] = None
|
||||
|
||||
return properties
|
||||
|
||||
def _validate_valid_properties(self, valid_properties, field, value):
|
||||
projects_collection = app.data.driver.db['projects']
|
||||
lookup = {'_id': ObjectId(self.document['project'])}
|
||||
|
||||
project = projects_collection.find_one(lookup, {
|
||||
'node_types.name': 1,
|
||||
'node_types.dyn_schema': 1,
|
||||
})
|
||||
if project is None:
|
||||
log.warning('Unknown project %s, declared by node %s',
|
||||
lookup, self.document.get('_id'))
|
||||
self._error(field, 'Unknown project')
|
||||
return False
|
||||
|
||||
node_type_name = self.document['node_type']
|
||||
node_type = project_get_node_type(project, node_type_name)
|
||||
if node_type is None:
|
||||
log.warning('Project %s has no node type %s, declared by node %s',
|
||||
project, node_type_name, self.document.get('_id'))
|
||||
self._error(field, 'Unknown node type')
|
||||
return False
|
||||
|
||||
try:
|
||||
value = self.convert_properties(value, node_type['dyn_schema'])
|
||||
except Exception as e:
|
||||
log.warning("Error converting form properties", exc_info=True)
|
||||
|
||||
v = Validator(node_type['dyn_schema'])
|
||||
val = v.validate(value)
|
||||
|
||||
if val:
|
||||
return True
|
||||
|
||||
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
|
||||
self._error(field, "Error validating properties")
|
||||
|
||||
|
||||
# We specify a settings.py file because when running on wsgi we can't detect it
|
||||
# automatically. The default path (which works in Docker) can be overridden with
|
||||
# an env variable.
|
||||
settings_path = os.environ.get(
|
||||
'EVE_SETTINGS', '/data/git/pillar/pillar/settings.py')
|
||||
app = Eve(settings=settings_path, validator=ValidateCustomFields)
|
||||
|
||||
# Load configuration from three different sources, to make it easy to override
|
||||
# settings with secrets, as well as for development & testing.
|
||||
app_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
app.config.from_pyfile(os.path.join(app_root, 'config.py'), silent=False)
|
||||
app.config.from_pyfile(os.path.join(app_root, 'config_local.py'), silent=True)
|
||||
from_envvar = os.environ.get('PILLAR_CONFIG')
|
||||
if from_envvar:
|
||||
# Don't use from_envvar, as we want different behaviour. If the envvar
|
||||
# is not set, it's fine (i.e. silent=True), but if it is set and the
|
||||
# configfile doesn't exist, it should error out (i.e. silent=False).
|
||||
app.config.from_pyfile(from_envvar, silent=False)
|
||||
|
||||
# Set the TMP environment variable to manage where uploads are stored.
|
||||
# These are all used by tempfile.mkstemp(), but we don't knwow in whic
|
||||
# order. As such, we remove all used variables but the one we set.
|
||||
tempfile.tempdir = app.config['STORAGE_DIR']
|
||||
os.environ['TMP'] = app.config['STORAGE_DIR']
|
||||
os.environ.pop('TEMP', None)
|
||||
os.environ.pop('TMPDIR', None)
|
||||
|
||||
|
||||
# Configure logging
|
||||
logging.config.dictConfig(app.config['LOGGING'])
|
||||
log = logging.getLogger(__name__)
|
||||
if app.config['DEBUG']:
|
||||
log.info('Pillar starting, debug=%s', app.config['DEBUG'])
|
||||
|
||||
# Get the Git hash
|
||||
try:
|
||||
git_cmd = ['git', '-C', app_root, 'describe', '--always']
|
||||
description = subprocess.check_output(git_cmd)
|
||||
app.config['GIT_REVISION'] = description.strip()
|
||||
except (subprocess.CalledProcessError, OSError) as ex:
|
||||
log.warning('Unable to run "git describe" to get git revision: %s', ex)
|
||||
app.config['GIT_REVISION'] = 'unknown'
|
||||
log.info('Git revision %r', app.config['GIT_REVISION'])
|
||||
|
||||
# Configure Bugsnag
|
||||
if not app.config.get('TESTING') and app.config.get('BUGSNAG_API_KEY'):
|
||||
import bugsnag
|
||||
import bugsnag.flask
|
||||
import bugsnag.handlers
|
||||
|
||||
bugsnag.configure(
|
||||
api_key=app.config['BUGSNAG_API_KEY'],
|
||||
project_root="/data/git/pillar/pillar",
|
||||
)
|
||||
bugsnag.flask.handle_exceptions(app)
|
||||
|
||||
bs_handler = bugsnag.handlers.BugsnagHandler()
|
||||
bs_handler.setLevel(logging.ERROR)
|
||||
log.addHandler(bs_handler)
|
||||
else:
|
||||
log.info('Bugsnag NOT configured.')
|
||||
|
||||
# Google Cloud project
|
||||
try:
|
||||
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = \
|
||||
app.config['GCLOUD_APP_CREDENTIALS']
|
||||
except KeyError:
|
||||
raise SystemExit('GCLOUD_APP_CREDENTIALS configuration is missing')
|
||||
|
||||
# Storage backend (GCS)
|
||||
try:
|
||||
os.environ['GCLOUD_PROJECT'] = app.config['GCLOUD_PROJECT']
|
||||
except KeyError:
|
||||
raise SystemExit('GCLOUD_PROJECT configuration value is missing')
|
||||
|
||||
# Algolia search
|
||||
if app.config['SEARCH_BACKEND'] == 'algolia':
|
||||
from algoliasearch import algoliasearch
|
||||
|
||||
client = algoliasearch.Client(
|
||||
app.config['ALGOLIA_USER'],
|
||||
app.config['ALGOLIA_API_KEY'])
|
||||
algolia_index_users = client.init_index(app.config['ALGOLIA_INDEX_USERS'])
|
||||
algolia_index_nodes = client.init_index(app.config['ALGOLIA_INDEX_NODES'])
|
||||
else:
|
||||
algolia_index_users = None
|
||||
algolia_index_nodes = None
|
||||
|
||||
# Encoding backend
|
||||
if app.config['ENCODING_BACKEND'] == 'zencoder':
|
||||
from zencoder import Zencoder
|
||||
encoding_service_client = Zencoder(app.config['ZENCODER_API_KEY'])
|
||||
else:
|
||||
encoding_service_client = None
|
||||
|
||||
from utils.authentication import validate_token
|
||||
from utils.authorization import check_permissions
|
||||
from utils.activities import notification_parse
|
||||
from modules.projects import before_inserting_projects
|
||||
from modules.projects import after_inserting_projects
|
||||
|
||||
|
||||
@app.before_request
|
||||
def validate_token_at_every_request():
|
||||
validate_token()
|
||||
|
||||
|
||||
def before_returning_item_notifications(response):
|
||||
if request.args.get('parse'):
|
||||
notification_parse(response)
|
||||
|
||||
|
||||
def before_returning_resource_notifications(response):
|
||||
for item in response['_items']:
|
||||
if request.args.get('parse'):
|
||||
notification_parse(item)
|
||||
|
||||
|
||||
app.on_fetched_item_notifications += before_returning_item_notifications
|
||||
app.on_fetched_resource_notifications += before_returning_resource_notifications
|
||||
|
||||
|
||||
@app.before_first_request
|
||||
def setup_db_indices():
|
||||
"""Adds missing database indices.
|
||||
|
||||
This does NOT drop and recreate existing indices,
|
||||
nor does it reconfigure existing indices.
|
||||
If you want that, drop them manually first.
|
||||
"""
|
||||
|
||||
log.debug('Adding missing database indices.')
|
||||
|
||||
import pymongo
|
||||
|
||||
db = app.data.driver.db
|
||||
|
||||
coll = db['tokens']
|
||||
coll.create_index([('user', pymongo.ASCENDING)])
|
||||
coll.create_index([('token', pymongo.ASCENDING)])
|
||||
|
||||
coll = db['notifications']
|
||||
coll.create_index([('user', pymongo.ASCENDING)])
|
||||
|
||||
coll = db['activities-subscriptions']
|
||||
coll.create_index([('context_object', pymongo.ASCENDING)])
|
||||
|
||||
coll = db['nodes']
|
||||
# This index is used for queries on project, and for queries on
|
||||
# the combination (project, node type).
|
||||
coll.create_index([('project', pymongo.ASCENDING),
|
||||
('node_type', pymongo.ASCENDING)])
|
||||
coll.create_index([('parent', pymongo.ASCENDING)])
|
||||
coll.create_index([('short_code', pymongo.ASCENDING)],
|
||||
sparse=True, unique=True)
|
||||
|
||||
|
||||
# The encoding module (receive notification and report progress)
|
||||
from modules.encoding import encoding
|
||||
from modules.blender_id import blender_id
|
||||
from modules import projects
|
||||
from modules import local_auth
|
||||
from modules import file_storage
|
||||
from modules import users
|
||||
from modules import nodes
|
||||
from modules import latest
|
||||
from modules import blender_cloud
|
||||
from modules import service
|
||||
|
||||
app.register_blueprint(encoding, url_prefix='/encoding')
|
||||
app.register_blueprint(blender_id, url_prefix='/blender_id')
|
||||
projects.setup_app(app, url_prefix='/p')
|
||||
local_auth.setup_app(app, url_prefix='/auth')
|
||||
file_storage.setup_app(app, url_prefix='/storage')
|
||||
latest.setup_app(app, url_prefix='/latest')
|
||||
blender_cloud.setup_app(app, url_prefix='/bcloud')
|
||||
users.setup_app(app, url_prefix='/users')
|
||||
service.setup_app(app, url_prefix='/service')
|
||||
nodes.setup_app(app, url_prefix='/nodes')
|
@@ -1,472 +0,0 @@
|
||||
import copy
|
||||
import logging
|
||||
import json
|
||||
|
||||
from bson import ObjectId
|
||||
from eve.methods.post import post_internal
|
||||
from eve.methods.patch import patch_internal
|
||||
from flask import g, Blueprint, request, abort, current_app, make_response
|
||||
from gcloud import exceptions as gcs_exceptions
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from application.utils import remove_private_keys, jsonify, mongo, str2id
|
||||
from application.utils import authorization, authentication
|
||||
from application.utils.gcs import GoogleCloudStorageBucket
|
||||
from application.utils.authorization import user_has_role, check_permissions, require_login
|
||||
from manage_extra.node_types.asset import node_type_asset
|
||||
from manage_extra.node_types.comment import node_type_comment
|
||||
from manage_extra.node_types.group import node_type_group
|
||||
from manage_extra.node_types.texture import node_type_texture
|
||||
from manage_extra.node_types.group_texture import node_type_group_texture
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint = Blueprint('projects', __name__)
|
||||
|
||||
# Default project permissions for the admin group.
|
||||
DEFAULT_ADMIN_GROUP_PERMISSIONS = ['GET', 'PUT', 'POST', 'DELETE']
|
||||
|
||||
|
||||
def before_inserting_projects(items):
|
||||
"""Strip unwanted properties, that will be assigned after creation. Also,
|
||||
verify permission to create a project (check quota, check role).
|
||||
|
||||
:param items: List of project docs that have been inserted (normally one)
|
||||
"""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
for item in items:
|
||||
item.pop('url', None)
|
||||
|
||||
|
||||
def override_is_private_field(project, original):
|
||||
"""Override the 'is_private' property from the world permissions.
|
||||
|
||||
:param project: the project, which will be updated
|
||||
"""
|
||||
|
||||
# No permissions, no access.
|
||||
if 'permissions' not in project:
|
||||
project['is_private'] = True
|
||||
return
|
||||
|
||||
world_perms = project['permissions'].get('world', [])
|
||||
is_private = 'GET' not in world_perms
|
||||
project['is_private'] = is_private
|
||||
|
||||
|
||||
def before_inserting_override_is_private_field(projects):
|
||||
for project in projects:
|
||||
override_is_private_field(project, None)
|
||||
|
||||
|
||||
def before_edit_check_permissions(document, original):
|
||||
# Allow admin users to do whatever they want.
|
||||
# TODO: possibly move this into the check_permissions function.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
check_permissions('projects', original, request.method)
|
||||
|
||||
|
||||
def before_delete_project(document):
|
||||
"""Checks permissions before we allow deletion"""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
# TODO: possibly move this into the check_permissions function.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
check_permissions('projects', document, request.method)
|
||||
|
||||
|
||||
def protect_sensitive_fields(document, original):
|
||||
"""When not logged in as admin, prevents update to certain fields."""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
return
|
||||
|
||||
def revert(name):
|
||||
if name not in original:
|
||||
try:
|
||||
del document[name]
|
||||
except KeyError:
|
||||
pass
|
||||
return
|
||||
document[name] = original[name]
|
||||
|
||||
revert('status')
|
||||
revert('category')
|
||||
revert('user')
|
||||
|
||||
if 'url' in original:
|
||||
revert('url')
|
||||
|
||||
|
||||
def after_inserting_projects(projects):
|
||||
"""After inserting a project in the collection we do some processing such as:
|
||||
- apply the right permissions
|
||||
- define basic node types
|
||||
- optionally generate a url
|
||||
- initialize storage space
|
||||
|
||||
:param projects: List of project docs that have been inserted (normally one)
|
||||
"""
|
||||
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
for project in projects:
|
||||
owner_id = project.get('user', None)
|
||||
owner = users_collection.find_one(owner_id)
|
||||
after_inserting_project(project, owner)
|
||||
|
||||
|
||||
def after_inserting_project(project, db_user):
|
||||
project_id = project['_id']
|
||||
user_id = db_user['_id']
|
||||
|
||||
# Create a project-specific admin group (with name matching the project id)
|
||||
result, _, _, status = post_internal('groups', {'name': str(project_id)})
|
||||
if status != 201:
|
||||
log.error('Unable to create admin group for new project %s: %s',
|
||||
project_id, result)
|
||||
return abort_with_error(status)
|
||||
|
||||
admin_group_id = result['_id']
|
||||
log.debug('Created admin group %s for project %s', admin_group_id, project_id)
|
||||
|
||||
# Assign the current user to the group
|
||||
db_user.setdefault('groups', []).append(admin_group_id)
|
||||
|
||||
result, _, _, status = patch_internal('users', {'groups': db_user['groups']}, _id=user_id)
|
||||
if status != 200:
|
||||
log.error('Unable to add user %s as member of admin group %s for new project %s: %s',
|
||||
user_id, admin_group_id, project_id, result)
|
||||
return abort_with_error(status)
|
||||
log.debug('Made user %s member of group %s', user_id, admin_group_id)
|
||||
|
||||
# Assign the group to the project with admin rights
|
||||
is_admin = authorization.is_admin(db_user)
|
||||
world_permissions = ['GET'] if is_admin else []
|
||||
permissions = {
|
||||
'world': world_permissions,
|
||||
'users': [],
|
||||
'groups': [
|
||||
{'group': admin_group_id,
|
||||
'methods': DEFAULT_ADMIN_GROUP_PERMISSIONS[:]},
|
||||
]
|
||||
}
|
||||
|
||||
def with_permissions(node_type):
|
||||
copied = copy.deepcopy(node_type)
|
||||
copied['permissions'] = permissions
|
||||
return copied
|
||||
|
||||
# Assign permissions to the project itself, as well as to the node_types
|
||||
project['permissions'] = permissions
|
||||
project['node_types'] = [
|
||||
with_permissions(node_type_group),
|
||||
with_permissions(node_type_asset),
|
||||
with_permissions(node_type_comment),
|
||||
with_permissions(node_type_texture),
|
||||
with_permissions(node_type_group_texture),
|
||||
]
|
||||
|
||||
# Allow admin users to use whatever url they want.
|
||||
if not is_admin or not project.get('url'):
|
||||
if project.get('category', '') == 'home':
|
||||
project['url'] = 'home'
|
||||
else:
|
||||
project['url'] = "p-{!s}".format(project_id)
|
||||
|
||||
# Initialize storage page (defaults to GCS)
|
||||
if current_app.config.get('TESTING'):
|
||||
log.warning('Not creating Google Cloud Storage bucket while running unit tests!')
|
||||
else:
|
||||
try:
|
||||
gcs_storage = GoogleCloudStorageBucket(str(project_id))
|
||||
if gcs_storage.bucket.exists():
|
||||
log.info('Created GCS instance for project %s', project_id)
|
||||
else:
|
||||
log.warning('Unable to create GCS instance for project %s', project_id)
|
||||
except gcs_exceptions.Forbidden as ex:
|
||||
log.warning('GCS forbids me to create CGS instance for project %s: %s', project_id, ex)
|
||||
|
||||
# Commit the changes directly to the MongoDB; a PUT is not allowed yet,
|
||||
# as the project doesn't have a valid permission structure.
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
result = projects_collection.update_one({'_id': project_id},
|
||||
{'$set': remove_private_keys(project)})
|
||||
if result.matched_count != 1:
|
||||
log.warning('Unable to update project %s: %s', project_id, result.raw_result)
|
||||
abort_with_error(500)
|
||||
|
||||
|
||||
def create_new_project(project_name, user_id, overrides):
|
||||
"""Creates a new project owned by the given user."""
|
||||
|
||||
log.info('Creating new project "%s" for user %s', project_name, user_id)
|
||||
|
||||
# Create the project itself, the rest will be done by the after-insert hook.
|
||||
project = {'description': '',
|
||||
'name': project_name,
|
||||
'node_types': [],
|
||||
'status': 'published',
|
||||
'user': user_id,
|
||||
'is_private': True,
|
||||
'permissions': {},
|
||||
'url': '',
|
||||
'summary': '',
|
||||
'category': 'assets', # TODO: allow the user to choose this.
|
||||
}
|
||||
if overrides is not None:
|
||||
project.update(overrides)
|
||||
|
||||
result, _, _, status = post_internal('projects', project)
|
||||
if status != 201:
|
||||
log.error('Unable to create project "%s": %s', project_name, result)
|
||||
return abort_with_error(status)
|
||||
project.update(result)
|
||||
|
||||
# Now re-fetch the project, as both the initial document and the returned
|
||||
# result do not contain the same etag as the database. This also updates
|
||||
# other fields set by hooks.
|
||||
document = current_app.data.driver.db['projects'].find_one(project['_id'])
|
||||
project.update(document)
|
||||
|
||||
log.info('Created project %s for user %s', project['_id'], user_id)
|
||||
|
||||
return project
|
||||
|
||||
|
||||
@blueprint.route('/create', methods=['POST'])
|
||||
@authorization.require_login(require_roles={u'admin', u'subscriber', u'demo'})
|
||||
def create_project(overrides=None):
|
||||
"""Creates a new project."""
|
||||
|
||||
if request.mimetype == 'application/json':
|
||||
project_name = request.json['name']
|
||||
else:
|
||||
project_name = request.form['project_name']
|
||||
user_id = g.current_user['user_id']
|
||||
|
||||
project = create_new_project(project_name, user_id, overrides)
|
||||
|
||||
# Return the project in the response.
|
||||
return jsonify(project, status=201, headers={'Location': '/projects/%s' % project['_id']})
|
||||
|
||||
|
||||
@blueprint.route('/users', methods=['GET', 'POST'])
|
||||
@authorization.require_login()
|
||||
def project_manage_users():
|
||||
"""Manage users of a project. In this initial implementation, we handle
|
||||
addition and removal of a user to the admin group of a project.
|
||||
No changes are done on the project itself.
|
||||
"""
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
|
||||
# TODO: check if user is admin of the project before anything
|
||||
if request.method == 'GET':
|
||||
project_id = request.args['project_id']
|
||||
project = projects_collection.find_one({'_id': ObjectId(project_id)})
|
||||
admin_group_id = project['permissions']['groups'][0]['group']
|
||||
|
||||
users = users_collection.find(
|
||||
{'groups': {'$in': [admin_group_id]}},
|
||||
{'username': 1, 'email': 1, 'full_name': 1})
|
||||
return jsonify({'_status': 'OK', '_items': list(users)})
|
||||
|
||||
# The request is not a form, since it comes from the API sdk
|
||||
data = json.loads(request.data)
|
||||
project_id = ObjectId(data['project_id'])
|
||||
target_user_id = ObjectId(data['user_id'])
|
||||
action = data['action']
|
||||
current_user_id = g.current_user['user_id']
|
||||
|
||||
project = projects_collection.find_one({'_id': project_id})
|
||||
|
||||
# Check if the current_user is owner of the project, or removing themselves.
|
||||
remove_self = target_user_id == current_user_id and action == 'remove'
|
||||
if project['user'] != current_user_id and not remove_self:
|
||||
return abort_with_error(403)
|
||||
|
||||
admin_group = get_admin_group(project)
|
||||
|
||||
# Get the user and add the admin group to it
|
||||
if action == 'add':
|
||||
operation = '$addToSet'
|
||||
log.info('project_manage_users: Adding user %s to admin group of project %s',
|
||||
target_user_id, project_id)
|
||||
elif action == 'remove':
|
||||
log.info('project_manage_users: Removing user %s from admin group of project %s',
|
||||
target_user_id, project_id)
|
||||
operation = '$pull'
|
||||
else:
|
||||
log.warning('project_manage_users: Unsupported action %r called by user %s',
|
||||
action, current_user_id)
|
||||
raise wz_exceptions.UnprocessableEntity()
|
||||
|
||||
users_collection.update({'_id': target_user_id},
|
||||
{operation: {'groups': admin_group['_id']}})
|
||||
|
||||
user = users_collection.find_one({'_id': target_user_id},
|
||||
{'username': 1, 'email': 1,
|
||||
'full_name': 1})
|
||||
|
||||
if not user:
|
||||
return jsonify({'_status': 'ERROR'}), 404
|
||||
|
||||
user['_status'] = 'OK'
|
||||
return jsonify(user)
|
||||
|
||||
|
||||
def get_admin_group(project):
|
||||
"""Returns the admin group for the project."""
|
||||
|
||||
groups_collection = current_app.data.driver.db['groups']
|
||||
|
||||
# TODO: search through all groups to find the one with the project ID as its name.
|
||||
admin_group_id = ObjectId(project['permissions']['groups'][0]['group'])
|
||||
group = groups_collection.find_one({'_id': admin_group_id})
|
||||
|
||||
if group is None:
|
||||
raise ValueError('Unable to handle project without admin group.')
|
||||
|
||||
if group['name'] != str(project['_id']):
|
||||
return abort_with_error(403)
|
||||
|
||||
return group
|
||||
|
||||
|
||||
def abort_with_error(status):
|
||||
"""Aborts with the given status, or 500 if the status doesn't indicate an error.
|
||||
|
||||
If the status is < 400, status 500 is used instead.
|
||||
"""
|
||||
|
||||
abort(status if status // 100 >= 4 else 500)
|
||||
|
||||
|
||||
@blueprint.route('/<string:project_id>/quotas')
|
||||
@require_login()
|
||||
def project_quotas(project_id):
|
||||
"""Returns information about the project's limits."""
|
||||
|
||||
# Check that the user has GET permissions on the project itself.
|
||||
project = mongo.find_one_or_404('projects', project_id)
|
||||
check_permissions('projects', project, 'GET')
|
||||
|
||||
file_size_used = project_total_file_size(project_id)
|
||||
|
||||
info = {
|
||||
'file_size_quota': None, # TODO: implement this later.
|
||||
'file_size_used': file_size_used,
|
||||
}
|
||||
|
||||
return jsonify(info)
|
||||
|
||||
|
||||
def project_total_file_size(project_id):
|
||||
"""Returns the total number of bytes used by files of this project."""
|
||||
|
||||
files = current_app.data.driver.db['files']
|
||||
file_size_used = files.aggregate([
|
||||
{'$match': {'project': ObjectId(project_id)}},
|
||||
{'$project': {'length_aggregate_in_bytes': 1}},
|
||||
{'$group': {'_id': None,
|
||||
'all_files': {'$sum': '$length_aggregate_in_bytes'}}}
|
||||
])
|
||||
|
||||
# The aggregate function returns a cursor, not a document.
|
||||
try:
|
||||
return next(file_size_used)['all_files']
|
||||
except StopIteration:
|
||||
# No files used at all.
|
||||
return 0
|
||||
|
||||
|
||||
def before_returning_project_permissions(response):
|
||||
# Run validation process, since GET on nodes entry point is public
|
||||
check_permissions('projects', response, 'GET', append_allowed_methods=True)
|
||||
|
||||
|
||||
def before_returning_project_resource_permissions(response):
|
||||
# Return only those projects the user has access to.
|
||||
allow = []
|
||||
for project in response['_items']:
|
||||
if authorization.has_permissions('projects', project,
|
||||
'GET', append_allowed_methods=True):
|
||||
allow.append(project)
|
||||
else:
|
||||
log.debug('User %s requested project %s, but has no access to it; filtered out.',
|
||||
authentication.current_user_id(), project['_id'])
|
||||
|
||||
response['_items'] = allow
|
||||
|
||||
|
||||
def project_node_type_has_method(response):
|
||||
"""Check for a specific request arg, and check generate the allowed_methods
|
||||
list for the required node_type.
|
||||
"""
|
||||
|
||||
node_type_name = request.args.get('node_type', '')
|
||||
|
||||
# Proceed only node_type has been requested
|
||||
if not node_type_name:
|
||||
return
|
||||
|
||||
# Look up the node type in the project document
|
||||
if not any(node_type.get('name') == node_type_name
|
||||
for node_type in response['node_types']):
|
||||
return abort(404)
|
||||
|
||||
# Check permissions and append the allowed_methods to the node_type
|
||||
check_permissions('projects', response, 'GET', append_allowed_methods=True,
|
||||
check_node_type=node_type_name)
|
||||
|
||||
|
||||
def projects_node_type_has_method(response):
|
||||
for project in response['_items']:
|
||||
project_node_type_has_method(project)
|
||||
|
||||
|
||||
@blueprint.route('/<project_id>/<node_type>', methods=['OPTIONS', 'GET'])
|
||||
def get_allowed_methods(project_id=None, node_type=None):
|
||||
"""Returns allowed methods to create a node of a certain type.
|
||||
|
||||
Either project_id or parent_node_id must be given. If the latter is given,
|
||||
the former is deducted from it.
|
||||
"""
|
||||
|
||||
project = mongo.find_one_or_404('projects', str2id(project_id))
|
||||
proj_methods = authorization.compute_allowed_methods('projects', project, node_type)
|
||||
|
||||
resp = make_response()
|
||||
resp.headers['Allowed'] = ', '.join(sorted(proj_methods))
|
||||
resp.status_code = 204
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.on_replace_projects += override_is_private_field
|
||||
app.on_replace_projects += before_edit_check_permissions
|
||||
app.on_replace_projects += protect_sensitive_fields
|
||||
app.on_update_projects += override_is_private_field
|
||||
app.on_update_projects += before_edit_check_permissions
|
||||
app.on_update_projects += protect_sensitive_fields
|
||||
app.on_delete_item_projects += before_delete_project
|
||||
app.on_insert_projects += before_inserting_override_is_private_field
|
||||
app.on_insert_projects += before_inserting_projects
|
||||
app.on_inserted_projects += after_inserting_projects
|
||||
|
||||
app.on_fetched_item_projects += before_returning_project_permissions
|
||||
app.on_fetched_resource_projects += before_returning_project_resource_permissions
|
||||
app.on_fetched_item_projects += project_node_type_has_method
|
||||
app.on_fetched_resource_projects += projects_node_type_has_method
|
||||
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
@@ -1,3 +0,0 @@
|
||||
# Ignore everything but self
|
||||
*
|
||||
!.gitignore
|
@@ -1,98 +0,0 @@
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import current_app
|
||||
|
||||
from application import algolia_index_users
|
||||
from application import algolia_index_nodes
|
||||
from application.modules.file_storage import generate_link
|
||||
from . import skip_when_testing
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
INDEX_ALLOWED_USER_ROLES = {'admin', 'subscriber', 'demo'}
|
||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_user_save(user):
|
||||
if algolia_index_users is None:
|
||||
return
|
||||
# Strip unneeded roles
|
||||
if 'roles' in user:
|
||||
roles = set(user['roles']).intersection(INDEX_ALLOWED_USER_ROLES)
|
||||
else:
|
||||
roles = set()
|
||||
if algolia_index_users:
|
||||
# Create or update Algolia index for the user
|
||||
algolia_index_users.save_object({
|
||||
'objectID': user['_id'],
|
||||
'full_name': user['full_name'],
|
||||
'username': user['username'],
|
||||
'roles': list(roles),
|
||||
'groups': user['groups'],
|
||||
'email': user['email']
|
||||
})
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_node_save(node):
|
||||
if node['node_type'] in INDEX_ALLOWED_NODE_TYPES and algolia_index_nodes:
|
||||
# If a nodes does not have status published, do not index
|
||||
if 'status' in node['properties'] \
|
||||
and node['properties']['status'] != 'published':
|
||||
return
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one({'_id': ObjectId(node['project'])})
|
||||
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
user = users_collection.find_one({'_id': ObjectId(node['user'])})
|
||||
|
||||
node_ob = {
|
||||
'objectID': node['_id'],
|
||||
'name': node['name'],
|
||||
'project': {
|
||||
'_id': project['_id'],
|
||||
'name': project['name']
|
||||
},
|
||||
'created': node['_created'],
|
||||
'updated': node['_updated'],
|
||||
'node_type': node['node_type'],
|
||||
'user': {
|
||||
'_id': user['_id'],
|
||||
'full_name': user['full_name']
|
||||
},
|
||||
}
|
||||
if 'description' in node and node['description']:
|
||||
node_ob['description'] = node['description']
|
||||
if 'picture' in node and node['picture']:
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
lookup = {'_id': ObjectId(node['picture'])}
|
||||
picture = files_collection.find_one(lookup)
|
||||
if picture['backend'] == 'gcs':
|
||||
variation_t = next((item for item in picture['variations'] \
|
||||
if item['size'] == 't'), None)
|
||||
if variation_t:
|
||||
node_ob['picture'] = generate_link(picture['backend'],
|
||||
variation_t['file_path'], project_id=str(picture['project']),
|
||||
is_public=True)
|
||||
# If the node has world permissions, compute the Free permission
|
||||
if 'permissions' in node and 'world' in node['permissions']:
|
||||
if 'GET' in node['permissions']['world']:
|
||||
node_ob['is_free'] = True
|
||||
# Append the media key if the node is of node_type 'asset'
|
||||
if node['node_type'] == 'asset':
|
||||
node_ob['media'] = node['properties']['content_type']
|
||||
# Add tags
|
||||
if 'tags' in node['properties']:
|
||||
node_ob['tags'] = node['properties']['tags']
|
||||
|
||||
algolia_index_nodes.save_object(node_ob)
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
def algolia_index_node_delete(node):
|
||||
if algolia_index_nodes is None:
|
||||
return
|
||||
algolia_index_nodes.delete_object(node['_id'])
|
17
pillar/attrs_extra.py
Normal file
17
pillar/attrs_extra.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Extra functionality for attrs."""
|
||||
|
||||
import logging
|
||||
|
||||
import attr
|
||||
|
||||
|
||||
def log(name):
|
||||
"""Returns a logger attr.ib
|
||||
|
||||
:param name: name to pass to logging.getLogger()
|
||||
:rtype: attr.ib
|
||||
"""
|
||||
return attr.ib(default=logging.getLogger(name),
|
||||
repr=False,
|
||||
hash=False,
|
||||
cmp=False)
|
116
pillar/auth/__init__.py
Normal file
116
pillar/auth/__init__.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Authentication code common to the web and api modules."""
|
||||
|
||||
import logging
|
||||
|
||||
from flask import current_app, session
|
||||
import flask_login
|
||||
import flask_oauthlib.client
|
||||
|
||||
from ..api import utils, blender_id
|
||||
from ..api.utils import authentication
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UserClass(flask_login.UserMixin):
|
||||
def __init__(self, token):
|
||||
# We store the Token instead of ID
|
||||
self.id = token
|
||||
self.username = None
|
||||
self.full_name = None
|
||||
self.objectid = None
|
||||
self.gravatar = None
|
||||
self.email = None
|
||||
self.roles = []
|
||||
|
||||
def has_role(self, *roles):
|
||||
"""Returns True iff the user has one or more of the given roles."""
|
||||
|
||||
if not self.roles:
|
||||
return False
|
||||
|
||||
return bool(set(self.roles).intersection(set(roles)))
|
||||
|
||||
|
||||
class AnonymousUser(flask_login.AnonymousUserMixin):
|
||||
@property
|
||||
def objectid(self):
|
||||
"""Anonymous user has no settable objectid."""
|
||||
return None
|
||||
|
||||
def has_role(self, *roles):
|
||||
return False
|
||||
|
||||
|
||||
def _load_user(token):
|
||||
"""Loads a user by their token.
|
||||
|
||||
:returns: returns a UserClass instance if logged in, or an AnonymousUser() if not.
|
||||
:rtype: UserClass
|
||||
"""
|
||||
|
||||
db_user = authentication.validate_this_token(token)
|
||||
if not db_user:
|
||||
return AnonymousUser()
|
||||
|
||||
login_user = UserClass(token)
|
||||
login_user.email = db_user['email']
|
||||
login_user.objectid = unicode(db_user['_id'])
|
||||
login_user.username = db_user['username']
|
||||
login_user.gravatar = utils.gravatar(db_user['email'])
|
||||
login_user.roles = db_user.get('roles', [])
|
||||
login_user.groups = [unicode(g) for g in db_user['groups'] or ()]
|
||||
login_user.full_name = db_user.get('full_name', '')
|
||||
|
||||
return login_user
|
||||
|
||||
|
||||
def config_login_manager(app):
|
||||
"""Configures the Flask-Login manager, used for the web endpoints."""
|
||||
|
||||
login_manager = flask_login.LoginManager()
|
||||
login_manager.init_app(app)
|
||||
login_manager.login_view = "users.login"
|
||||
login_manager.anonymous_user = AnonymousUser
|
||||
# noinspection PyTypeChecker
|
||||
login_manager.user_loader(_load_user)
|
||||
|
||||
return login_manager
|
||||
|
||||
|
||||
def login_user(oauth_token):
|
||||
"""Log in the user identified by the given token."""
|
||||
|
||||
user = UserClass(oauth_token)
|
||||
flask_login.login_user(user)
|
||||
|
||||
|
||||
def get_blender_id_oauth_token():
|
||||
"""Returns a tuple (token, ''), for use with flask_oauthlib."""
|
||||
return session.get('blender_id_oauth_token')
|
||||
|
||||
|
||||
def config_oauth_login(app):
|
||||
config = app.config
|
||||
if not config.get('SOCIAL_BLENDER_ID'):
|
||||
log.info('OAuth Blender-ID login not setup.')
|
||||
return None
|
||||
|
||||
oauth = flask_oauthlib.client.OAuth(app)
|
||||
social_blender_id = config.get('SOCIAL_BLENDER_ID')
|
||||
|
||||
oauth_blender_id = oauth.remote_app(
|
||||
'blender_id',
|
||||
consumer_key=social_blender_id['app_id'],
|
||||
consumer_secret=social_blender_id['app_secret'],
|
||||
request_token_params={'scope': 'email'},
|
||||
base_url=config['BLENDER_ID_OAUTH_URL'],
|
||||
request_token_url=None,
|
||||
access_token_url=config['BLENDER_ID_BASE_ACCESS_TOKEN_URL'],
|
||||
authorize_url=config['BLENDER_ID_AUTHORIZE_URL']
|
||||
)
|
||||
|
||||
oauth_blender_id.tokengetter(get_blender_id_oauth_token)
|
||||
log.info('OAuth Blender-ID login setup as %s', social_blender_id['app_id'])
|
||||
|
||||
return oauth_blender_id
|
51
pillar/auth/subscriptions.py
Normal file
51
pillar/auth/subscriptions.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""Cloud subscription info.
|
||||
|
||||
Connects to the external subscription server to obtain user info.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from flask import current_app
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_user(email):
|
||||
"""Returns the user info dict from the external subscriptions management server.
|
||||
|
||||
:returns: the store user info, or None if the user can't be found or there
|
||||
was an error communicating. A dict like this is returned:
|
||||
{
|
||||
"shop_id": 700,
|
||||
"cloud_access": 1,
|
||||
"paid_balance": 314.75,
|
||||
"balance_currency": "EUR",
|
||||
"start_date": "2014-08-25 17:05:46",
|
||||
"expiration_date": "2016-08-24 13:38:45",
|
||||
"subscription_status": "wc-active",
|
||||
"expiration_date_approximate": true
|
||||
}
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
external_subscriptions_server = current_app.config['EXTERNAL_SUBSCRIPTIONS_MANAGEMENT_SERVER']
|
||||
|
||||
log.debug('Connecting to store at %s?blenderid=%s', external_subscriptions_server, email)
|
||||
|
||||
# Retry a few times when contacting the store.
|
||||
s = requests.Session()
|
||||
s.mount(external_subscriptions_server, HTTPAdapter(max_retries=5))
|
||||
r = s.get(external_subscriptions_server, params={'blenderid': email},
|
||||
verify=current_app.config['TLS_CERT_FILE'])
|
||||
|
||||
if r.status_code != 200:
|
||||
log.warning("Error communicating with %s, code=%i, unable to check "
|
||||
"subscription status of user %s",
|
||||
external_subscriptions_server, r.status_code, email)
|
||||
return None
|
||||
|
||||
store_user = r.json()
|
||||
return store_user
|
||||
|
795
pillar/cli.py
Normal file
795
pillar/cli.py
Normal file
@@ -0,0 +1,795 @@
|
||||
"""Commandline interface.
|
||||
|
||||
Run commands with 'flask <command>'
|
||||
"""
|
||||
|
||||
from __future__ import print_function, division
|
||||
|
||||
import copy
|
||||
import logging
|
||||
|
||||
from bson.objectid import ObjectId, InvalidId
|
||||
from eve.methods.put import put_internal
|
||||
from eve.methods.post import post_internal
|
||||
|
||||
from flask import current_app
|
||||
from flask_script import Manager
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
manager = Manager(current_app)
|
||||
|
||||
manager_maintenance = Manager(
|
||||
current_app, usage="Maintenance scripts, to update user groups")
|
||||
manager_setup = Manager(
|
||||
current_app, usage="Setup utilities, like setup_db() or create_blog()")
|
||||
manager_operations = Manager(
|
||||
current_app, usage="Backend operations, like moving nodes across projects")
|
||||
|
||||
|
||||
@manager_setup.command
|
||||
def setup_db(admin_email):
|
||||
"""Setup the database
|
||||
- Create admin, subscriber and demo Group collection
|
||||
- Create admin user (must use valid blender-id credentials)
|
||||
- Create one project
|
||||
"""
|
||||
|
||||
# Create default groups
|
||||
groups_list = []
|
||||
for group in ['admin', 'subscriber', 'demo']:
|
||||
g = {'name': group}
|
||||
g = current_app.post_internal('groups', g)
|
||||
groups_list.append(g[0]['_id'])
|
||||
print("Creating group {0}".format(group))
|
||||
|
||||
# Create admin user
|
||||
user = {'username': admin_email,
|
||||
'groups': groups_list,
|
||||
'roles': ['admin', 'subscriber', 'demo'],
|
||||
'settings': {'email_communications': 1},
|
||||
'auth': [],
|
||||
'full_name': admin_email,
|
||||
'email': admin_email}
|
||||
result, _, _, status = current_app.post_internal('users', user)
|
||||
if status != 201:
|
||||
raise SystemExit('Error creating user {}: {}'.format(admin_email, result))
|
||||
user.update(result)
|
||||
print("Created user {0}".format(user['_id']))
|
||||
|
||||
# Create a default project by faking a POST request.
|
||||
with current_app.test_request_context(data={'project_name': u'Default Project'}):
|
||||
from flask import g
|
||||
from pillar.api.projects import routes as proj_routes
|
||||
|
||||
g.current_user = {'user_id': user['_id'],
|
||||
'groups': user['groups'],
|
||||
'roles': set(user['roles'])}
|
||||
|
||||
proj_routes.create_project(overrides={'url': 'default-project',
|
||||
'is_private': False})
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def find_duplicate_users():
|
||||
"""Finds users that have the same BlenderID user_id."""
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
users_coll = current_app.data.driver.db['users']
|
||||
nodes_coll = current_app.data.driver.db['nodes']
|
||||
projects_coll = current_app.data.driver.db['projects']
|
||||
|
||||
found_users = defaultdict(list)
|
||||
|
||||
for user in users_coll.find():
|
||||
blender_ids = [auth['user_id'] for auth in user['auth']
|
||||
if auth['provider'] == 'blender-id']
|
||||
if not blender_ids:
|
||||
continue
|
||||
blender_id = blender_ids[0]
|
||||
found_users[blender_id].append(user)
|
||||
|
||||
for blender_id, users in found_users.iteritems():
|
||||
if len(users) == 1:
|
||||
continue
|
||||
|
||||
usernames = ', '.join(user['username'] for user in users)
|
||||
print('Blender ID: %5s has %i users: %s' % (
|
||||
blender_id, len(users), usernames))
|
||||
|
||||
for user in users:
|
||||
print(' %s owns %i nodes and %i projects' % (
|
||||
user['username'],
|
||||
nodes_coll.count({'user': user['_id']}),
|
||||
projects_coll.count({'user': user['_id']}),
|
||||
))
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def sync_role_groups(do_revoke_groups):
|
||||
"""For each user, synchronizes roles and group membership.
|
||||
|
||||
This ensures that everybody with the 'subscriber' role is also member of the 'subscriber'
|
||||
group, and people without the 'subscriber' role are not member of that group. Same for
|
||||
admin and demo groups.
|
||||
|
||||
When do_revoke_groups=False (the default), people are only added to groups.
|
||||
when do_revoke_groups=True, people are also removed from groups.
|
||||
"""
|
||||
|
||||
from pillar.api import service
|
||||
|
||||
if do_revoke_groups not in {'true', 'false'}:
|
||||
print('Use either "true" or "false" as first argument.')
|
||||
print('When passing "false", people are only added to groups.')
|
||||
print('when passing "true", people are also removed from groups.')
|
||||
raise SystemExit()
|
||||
do_revoke_groups = do_revoke_groups == 'true'
|
||||
|
||||
service.fetch_role_to_group_id_map()
|
||||
|
||||
users_coll = current_app.data.driver.db['users']
|
||||
groups_coll = current_app.data.driver.db['groups']
|
||||
|
||||
group_names = {}
|
||||
|
||||
def gname(gid):
|
||||
try:
|
||||
return group_names[gid]
|
||||
except KeyError:
|
||||
name = groups_coll.find_one(gid, projection={'name': 1})['name']
|
||||
name = str(name)
|
||||
group_names[gid] = name
|
||||
return name
|
||||
|
||||
ok_users = bad_users = 0
|
||||
for user in users_coll.find():
|
||||
grant_groups = set()
|
||||
revoke_groups = set()
|
||||
current_groups = set(user.get('groups', []))
|
||||
user_roles = user.get('roles', set())
|
||||
|
||||
for role in service.ROLES_WITH_GROUPS:
|
||||
action = 'grant' if role in user_roles else 'revoke'
|
||||
groups = service.manage_user_group_membership(user, role, action)
|
||||
|
||||
if groups is None:
|
||||
# No changes required
|
||||
continue
|
||||
|
||||
if groups == current_groups:
|
||||
continue
|
||||
|
||||
grant_groups.update(groups.difference(current_groups))
|
||||
revoke_groups.update(current_groups.difference(groups))
|
||||
|
||||
if grant_groups or revoke_groups:
|
||||
bad_users += 1
|
||||
|
||||
expected_groups = current_groups.union(grant_groups).difference(revoke_groups)
|
||||
|
||||
print('Discrepancy for user %s/%s:' % (user['_id'], user['full_name'].encode('utf8')))
|
||||
print(' - actual groups :', sorted(gname(gid) for gid in user.get('groups')))
|
||||
print(' - expected groups:', sorted(gname(gid) for gid in expected_groups))
|
||||
print(' - will grant :', sorted(gname(gid) for gid in grant_groups))
|
||||
|
||||
if do_revoke_groups:
|
||||
label = 'WILL REVOKE '
|
||||
else:
|
||||
label = 'could revoke'
|
||||
print(' - %s :' % label, sorted(gname(gid) for gid in revoke_groups))
|
||||
|
||||
if grant_groups and revoke_groups:
|
||||
print(' ------ CAREFUL this one has BOTH grant AND revoke -----')
|
||||
|
||||
# Determine which changes we'll apply
|
||||
final_groups = current_groups.union(grant_groups)
|
||||
if do_revoke_groups:
|
||||
final_groups.difference_update(revoke_groups)
|
||||
print(' - final groups :', sorted(gname(gid) for gid in final_groups))
|
||||
|
||||
# Perform the actual update
|
||||
users_coll.update_one({'_id': user['_id']},
|
||||
{'$set': {'groups': list(final_groups)}})
|
||||
else:
|
||||
ok_users += 1
|
||||
|
||||
print('%i bad and %i ok users seen.' % (bad_users, ok_users))
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def sync_project_groups(user_email, fix):
|
||||
"""Gives the user access to their self-created projects."""
|
||||
|
||||
if fix.lower() not in {'true', 'false'}:
|
||||
print('Use either "true" or "false" as second argument.')
|
||||
print('When passing "false", only a report is produced.')
|
||||
print('when passing "true", group membership is fixed.')
|
||||
raise SystemExit()
|
||||
fix = fix.lower() == 'true'
|
||||
|
||||
users_coll = current_app.data.driver.db['users']
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
groups_coll = current_app.data.driver.db['groups']
|
||||
|
||||
# Find by email or by user ID
|
||||
if '@' in user_email:
|
||||
where = {'email': user_email}
|
||||
else:
|
||||
try:
|
||||
where = {'_id': ObjectId(user_email)}
|
||||
except InvalidId:
|
||||
log.warning('Invalid ObjectID: %s', user_email)
|
||||
return
|
||||
|
||||
user = users_coll.find_one(where, projection={'_id': 1, 'groups': 1})
|
||||
if user is None:
|
||||
log.error('User %s not found', where)
|
||||
raise SystemExit()
|
||||
|
||||
user_groups = set(user['groups'])
|
||||
user_id = user['_id']
|
||||
log.info('Updating projects for user %s', user_id)
|
||||
|
||||
ok_groups = missing_groups = 0
|
||||
for proj in proj_coll.find({'user': user_id}):
|
||||
project_id = proj['_id']
|
||||
log.info('Investigating project %s (%s)', project_id, proj['name'])
|
||||
|
||||
# Find the admin group
|
||||
admin_group = groups_coll.find_one({'name': str(project_id)}, projection={'_id': 1})
|
||||
if admin_group is None:
|
||||
log.warning('No admin group for project %s', project_id)
|
||||
continue
|
||||
group_id = admin_group['_id']
|
||||
|
||||
# Check membership
|
||||
if group_id not in user_groups:
|
||||
log.info('Missing group membership')
|
||||
missing_groups += 1
|
||||
user_groups.add(group_id)
|
||||
else:
|
||||
ok_groups += 1
|
||||
|
||||
log.info('User %s was missing %i group memberships; %i projects were ok.',
|
||||
user_id, missing_groups, ok_groups)
|
||||
|
||||
if missing_groups > 0 and fix:
|
||||
log.info('Updating database.')
|
||||
result = users_coll.update_one({'_id': user_id},
|
||||
{'$set': {'groups': list(user_groups)}})
|
||||
log.info('Updated %i user.', result.modified_count)
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def check_home_project_groups():
|
||||
"""Checks all users' group membership of their home project admin group."""
|
||||
|
||||
users_coll = current_app.data.driver.db['users']
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
|
||||
good = bad = 0
|
||||
for proj in proj_coll.find({'category': 'home'}):
|
||||
try:
|
||||
admin_group_perms = proj['permissions']['groups'][0]
|
||||
except IndexError:
|
||||
log.error('Project %s has no admin group', proj['_id'])
|
||||
return 255
|
||||
except KeyError:
|
||||
log.error('Project %s has no group permissions at all', proj['_id'])
|
||||
return 255
|
||||
|
||||
user = users_coll.find_one({'_id': proj['user']},
|
||||
projection={'groups': 1})
|
||||
if user is None:
|
||||
log.error('Project %s has non-existing owner %s', proj['user'])
|
||||
return 255
|
||||
|
||||
user_groups = set(user['groups'])
|
||||
admin_group_id = admin_group_perms['group']
|
||||
if admin_group_id in user_groups:
|
||||
# All is fine!
|
||||
good += 1
|
||||
continue
|
||||
|
||||
log.warning('User %s has no admin rights to home project %s -- needs group %s',
|
||||
proj['user'], proj['_id'], admin_group_id)
|
||||
bad += 1
|
||||
|
||||
log.info('%i projects OK, %i projects in error', good, bad)
|
||||
return bad
|
||||
|
||||
|
||||
@manager_setup.command
|
||||
def badger(action, user_email, role):
|
||||
from pillar.api import service
|
||||
|
||||
with current_app.app_context():
|
||||
service.fetch_role_to_group_id_map()
|
||||
response, status = service.do_badger(action, user_email, role)
|
||||
|
||||
if status == 204:
|
||||
log.info('Done.')
|
||||
else:
|
||||
log.info('Response: %s', response)
|
||||
log.info('Status : %i', status)
|
||||
|
||||
|
||||
def create_service_account(email, service_roles, service_definition, update_existing=None):
|
||||
from pillar.api import service
|
||||
from pillar.api.utils import dumps
|
||||
|
||||
account, token = service.create_service_account(
|
||||
email,
|
||||
service_roles,
|
||||
service_definition,
|
||||
update_existing=update_existing
|
||||
)
|
||||
|
||||
print('Service account information:')
|
||||
print(dumps(account, indent=4, sort_keys=True))
|
||||
print()
|
||||
print('Access token: %s' % token['token'])
|
||||
print(' expires on: %s' % token['expire_time'])
|
||||
return account, token
|
||||
|
||||
|
||||
@manager_setup.command
|
||||
def create_badger_account(email, badges):
|
||||
"""
|
||||
Creates a new service account that can give badges (i.e. roles).
|
||||
|
||||
:param email: email address associated with the account
|
||||
:param badges: single space-separated argument containing the roles
|
||||
this account can assign and revoke.
|
||||
"""
|
||||
|
||||
create_service_account(email, [u'badger'], {'badger': badges.strip().split()})
|
||||
|
||||
|
||||
@manager_setup.command
|
||||
def create_urler_account(email):
|
||||
"""Creates a new service account that can fetch all project URLs."""
|
||||
|
||||
create_service_account(email, [u'urler'], {})
|
||||
|
||||
|
||||
@manager_setup.command
|
||||
def create_local_user_account(email, password):
|
||||
from pillar.api.local_auth import create_local_user
|
||||
create_local_user(email, password)
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
@manager_maintenance.option('-c', '--chunk', dest='chunk_size', default=50,
|
||||
help='Number of links to update, use 0 to update all.')
|
||||
@manager_maintenance.option('-q', '--quiet', dest='quiet', action='store_true', default=False)
|
||||
@manager_maintenance.option('-w', '--window', dest='window', default=12,
|
||||
help='Refresh links that expire in this many hours.')
|
||||
def refresh_backend_links(backend_name, chunk_size=50, quiet=False, window=12):
|
||||
"""Refreshes all file links that are using a certain storage backend.
|
||||
|
||||
Use `--chunk 0` to refresh all links.
|
||||
"""
|
||||
|
||||
chunk_size = int(chunk_size)
|
||||
window = int(window)
|
||||
|
||||
loglevel = logging.WARNING if quiet else logging.DEBUG
|
||||
logging.getLogger('pillar.api.file_storage').setLevel(loglevel)
|
||||
|
||||
chunk_size = int(chunk_size) # CLI parameters are passed as strings
|
||||
from pillar.api import file_storage
|
||||
|
||||
file_storage.refresh_links_for_backend(backend_name, chunk_size, window * 3600)
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def expire_all_project_links(project_uuid):
|
||||
"""Expires all file links for a certain project without refreshing.
|
||||
|
||||
This is just for testing.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import bson.tz_util
|
||||
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
expires = now - datetime.timedelta(days=1)
|
||||
|
||||
result = files_collection.update_many(
|
||||
{'project': ObjectId(project_uuid)},
|
||||
{'$set': {'link_expires': expires}}
|
||||
)
|
||||
|
||||
print('Expired %i links' % result.matched_count)
|
||||
|
||||
|
||||
@manager_operations.command
|
||||
def file_change_backend(file_id, dest_backend='gcs'):
|
||||
"""Given a file document, move it to the specified backend (if not already
|
||||
there) and update the document to reflect that.
|
||||
Files on the original backend are not deleted automatically.
|
||||
"""
|
||||
|
||||
from pillar.api.file_storage.moving import change_file_storage_backend
|
||||
change_file_storage_backend(file_id, dest_backend)
|
||||
|
||||
|
||||
@manager_operations.command
|
||||
def mass_copy_between_backends(src_backend='cdnsun', dest_backend='gcs'):
|
||||
"""Copies all files from one backend to the other, updating them in Mongo.
|
||||
|
||||
Files on the original backend are not deleted.
|
||||
"""
|
||||
|
||||
import requests.exceptions
|
||||
|
||||
from pillar.api.file_storage import moving
|
||||
|
||||
logging.getLogger('pillar').setLevel(logging.INFO)
|
||||
log.info('Mass-moving all files from backend %r to %r',
|
||||
src_backend, dest_backend)
|
||||
|
||||
files_coll = current_app.data.driver.db['files']
|
||||
|
||||
fdocs = files_coll.find({'backend': src_backend},
|
||||
projection={'_id': True})
|
||||
copied_ok = 0
|
||||
copy_errs = 0
|
||||
try:
|
||||
for fdoc in fdocs:
|
||||
try:
|
||||
moving.change_file_storage_backend(fdoc['_id'], dest_backend)
|
||||
except moving.PrerequisiteNotMetError as ex:
|
||||
log.error('Error copying %s: %s', fdoc['_id'], ex)
|
||||
copy_errs += 1
|
||||
except requests.exceptions.HTTPError as ex:
|
||||
log.error('Error copying %s (%s): %s',
|
||||
fdoc['_id'], ex.response.url, ex)
|
||||
copy_errs += 1
|
||||
except Exception:
|
||||
log.exception('Unexpected exception handling file %s', fdoc['_id'])
|
||||
copy_errs += 1
|
||||
else:
|
||||
copied_ok += 1
|
||||
except KeyboardInterrupt:
|
||||
log.error('Stopping due to keyboard interrupt')
|
||||
|
||||
log.info('%i files copied ok', copied_ok)
|
||||
log.info('%i files we did not copy', copy_errs)
|
||||
|
||||
|
||||
@manager_operations.command
|
||||
@manager_operations.option('-p', '--project', dest='dest_proj_url',
|
||||
help='Destination project URL')
|
||||
@manager_operations.option('-f', '--force', dest='force', action='store_true', default=False,
|
||||
help='Move even when already at the given project.')
|
||||
@manager_operations.option('-s', '--skip-gcs', dest='skip_gcs', action='store_true', default=False,
|
||||
help='Skip file handling on GCS, just update the database.')
|
||||
def move_group_node_project(node_uuid, dest_proj_url, force=False, skip_gcs=False):
|
||||
"""Copies all files from one project to the other, then moves the nodes.
|
||||
|
||||
The node and all its children are moved recursively.
|
||||
"""
|
||||
|
||||
from pillar.api.nodes import moving
|
||||
from pillar.api.utils import str2id
|
||||
|
||||
logging.getLogger('pillar').setLevel(logging.INFO)
|
||||
|
||||
db = current_app.db()
|
||||
nodes_coll = db['nodes']
|
||||
projs_coll = db['projects']
|
||||
|
||||
# Parse CLI args and get the node, source and destination projects.
|
||||
node_uuid = str2id(node_uuid)
|
||||
node = nodes_coll.find_one({'_id': node_uuid})
|
||||
if node is None:
|
||||
log.error("Node %s can't be found!", node_uuid)
|
||||
return 1
|
||||
|
||||
if node.get('parent', None):
|
||||
log.error('Node cannot have a parent, it must be top-level.')
|
||||
return 4
|
||||
|
||||
src_proj = projs_coll.find_one({'_id': node['project']})
|
||||
dest_proj = projs_coll.find_one({'url': dest_proj_url})
|
||||
|
||||
if src_proj is None:
|
||||
log.warning("Node's source project %s doesn't exist!", node['project'])
|
||||
if dest_proj is None:
|
||||
log.error("Destination project url='%s' doesn't exist.", dest_proj_url)
|
||||
return 2
|
||||
if src_proj['_id'] == dest_proj['_id']:
|
||||
if force:
|
||||
log.warning("Node is already at project url='%s'!", dest_proj_url)
|
||||
else:
|
||||
log.error("Node is already at project url='%s'!", dest_proj_url)
|
||||
return 3
|
||||
|
||||
log.info("Mass-moving %s (%s) and children from project '%s' (%s) to '%s' (%s)",
|
||||
node_uuid, node['name'], src_proj['url'], src_proj['_id'], dest_proj['url'],
|
||||
dest_proj['_id'])
|
||||
|
||||
mover = moving.NodeMover(db=db, skip_gcs=skip_gcs)
|
||||
mover.change_project(node, dest_proj)
|
||||
|
||||
log.info('Done moving.')
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
|
||||
help='Project URL')
|
||||
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
|
||||
help='Replace on all projects.')
|
||||
def replace_pillar_node_type_schemas(proj_url=None, all_projects=False):
|
||||
"""Replaces the project's node type schemas with the standard Pillar ones.
|
||||
|
||||
Non-standard node types are left alone.
|
||||
"""
|
||||
|
||||
if bool(proj_url) == all_projects:
|
||||
log.error('Use either --project or --all.')
|
||||
return 1
|
||||
|
||||
from pillar.api.utils.authentication import force_cli_user
|
||||
force_cli_user()
|
||||
|
||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
|
||||
from pillar.api.utils import remove_private_keys
|
||||
|
||||
projects_collection = current_app.db()['projects']
|
||||
|
||||
def handle_project(project):
|
||||
log.info('Handling project %s', project['url'])
|
||||
is_public_proj = not project.get('is_private', True)
|
||||
|
||||
for proj_nt in project['node_types']:
|
||||
nt_name = proj_nt['name']
|
||||
try:
|
||||
pillar_nt = PILLAR_NAMED_NODE_TYPES[nt_name]
|
||||
except KeyError:
|
||||
log.info(' - skipping non-standard node type "%s"', nt_name)
|
||||
continue
|
||||
|
||||
log.info(' - replacing schema on node type "%s"', nt_name)
|
||||
|
||||
# This leaves node type keys intact that aren't in Pillar's node_type_xxx definitions,
|
||||
# such as permissions.
|
||||
proj_nt.update(copy.deepcopy(pillar_nt))
|
||||
|
||||
# On our own public projects we want to be able to set license stuff.
|
||||
if is_public_proj:
|
||||
proj_nt['form_schema'].pop('license_type', None)
|
||||
proj_nt['form_schema'].pop('license_notes', None)
|
||||
|
||||
# Use Eve to PUT, so we have schema checking.
|
||||
db_proj = remove_private_keys(project)
|
||||
r, _, _, status = put_internal('projects', db_proj, _id=project['_id'])
|
||||
if status != 200:
|
||||
log.error('Error %i storing altered project %s %s', status, project['_id'], r)
|
||||
raise SystemExit('Error storing project, see log.')
|
||||
log.info('Project saved succesfully.')
|
||||
|
||||
if all_projects:
|
||||
for project in projects_collection.find():
|
||||
handle_project(project)
|
||||
return
|
||||
|
||||
project = projects_collection.find_one({'url': proj_url})
|
||||
if not project:
|
||||
log.error('Project url=%s not found', proj_url)
|
||||
return 3
|
||||
|
||||
handle_project(project)
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def remarkdown_comments():
|
||||
"""Retranslates all Markdown to HTML for all comment nodes.
|
||||
"""
|
||||
|
||||
from pillar.api.nodes import convert_markdown
|
||||
|
||||
nodes_collection = current_app.db()['nodes']
|
||||
comments = nodes_collection.find({'node_type': 'comment'},
|
||||
projection={'properties.content': 1,
|
||||
'node_type': 1})
|
||||
|
||||
updated = identical = skipped = errors = 0
|
||||
for node in comments:
|
||||
convert_markdown(node)
|
||||
node_id = node['_id']
|
||||
|
||||
try:
|
||||
content_html = node['properties']['content_html']
|
||||
except KeyError:
|
||||
log.warning('Node %s has no content_html', node_id)
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
result = nodes_collection.update_one(
|
||||
{'_id': node_id},
|
||||
{'$set': {'properties.content_html': content_html}}
|
||||
)
|
||||
if result.matched_count != 1:
|
||||
log.error('Unable to update node %s', node_id)
|
||||
errors += 1
|
||||
continue
|
||||
|
||||
if result.modified_count:
|
||||
updated += 1
|
||||
else:
|
||||
identical += 1
|
||||
|
||||
log.info('updated : %i', updated)
|
||||
log.info('identical: %i', identical)
|
||||
log.info('skipped : %i', skipped)
|
||||
log.info('errors : %i', errors)
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
|
||||
help='Project URL')
|
||||
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
|
||||
help='Replace on all projects.')
|
||||
def upgrade_attachment_schema(proj_url=None, all_projects=False):
|
||||
"""Replaces the project's attachments with the new schema.
|
||||
|
||||
Updates both the schema definition and the nodes with attachments (asset, page, post).
|
||||
"""
|
||||
|
||||
if bool(proj_url) == all_projects:
|
||||
log.error('Use either --project or --all.')
|
||||
return 1
|
||||
|
||||
from pillar.api.utils.authentication import force_cli_user
|
||||
force_cli_user()
|
||||
|
||||
from pillar.api.node_types.asset import node_type_asset
|
||||
from pillar.api.node_types.page import node_type_page
|
||||
from pillar.api.node_types.post import node_type_post
|
||||
from pillar.api.node_types import _attachments_embedded_schema
|
||||
from pillar.api.utils import remove_private_keys
|
||||
|
||||
# Node types that support attachments
|
||||
node_types = (node_type_asset, node_type_page, node_type_post)
|
||||
nts_by_name = {nt['name']: nt for nt in node_types}
|
||||
|
||||
db = current_app.db()
|
||||
projects_coll = db['projects']
|
||||
nodes_coll = db['nodes']
|
||||
|
||||
def handle_project(project):
|
||||
log.info('Handling project %s', project['url'])
|
||||
|
||||
replace_schemas(project)
|
||||
replace_attachments(project)
|
||||
|
||||
def replace_schemas(project):
|
||||
for proj_nt in project['node_types']:
|
||||
nt_name = proj_nt['name']
|
||||
if nt_name not in nts_by_name:
|
||||
continue
|
||||
|
||||
log.info(' - replacing attachment schema on node type "%s"', nt_name)
|
||||
pillar_nt = nts_by_name[nt_name]
|
||||
proj_nt['dyn_schema']['attachments'] = copy.deepcopy(_attachments_embedded_schema)
|
||||
|
||||
# Get the form schema the same as the official Pillar one, but only for attachments.
|
||||
try:
|
||||
pillar_form_schema = pillar_nt['form_schema']['attachments']
|
||||
except KeyError:
|
||||
proj_nt['form_schema'].pop('attachments', None)
|
||||
else:
|
||||
proj_nt['form_schema']['attachments'] = pillar_form_schema
|
||||
|
||||
# Use Eve to PUT, so we have schema checking.
|
||||
db_proj = remove_private_keys(project)
|
||||
r, _, _, status = put_internal('projects', db_proj, _id=project['_id'])
|
||||
if status != 200:
|
||||
log.error('Error %i storing altered project %s %s', status, project['_id'], r)
|
||||
raise SystemExit('Error storing project, see log.')
|
||||
log.info('Project saved succesfully.')
|
||||
|
||||
def replace_attachments(project):
|
||||
log.info('Upgrading nodes for project %s', project['url'])
|
||||
nodes = nodes_coll.find({
|
||||
'_deleted': False,
|
||||
'project': project['_id'],
|
||||
'node_type': {'$in': list(nts_by_name)},
|
||||
'properties.attachments': {'$exists': True},
|
||||
})
|
||||
for node in nodes:
|
||||
attachments = node[u'properties'][u'attachments']
|
||||
if isinstance(attachments, dict):
|
||||
# This node has already been upgraded.
|
||||
continue
|
||||
|
||||
log.info(' - Updating schema on node %s (%s)', node['_id'], node.get('name'))
|
||||
new_atts = {}
|
||||
for field_info in attachments:
|
||||
for attachment in field_info.get('files', []):
|
||||
new_atts[attachment[u'slug']] = {u'oid': attachment[u'file']}
|
||||
|
||||
node[u'properties'][u'attachments'] = new_atts
|
||||
|
||||
# Use Eve to PUT, so we have schema checking.
|
||||
db_node = remove_private_keys(node)
|
||||
r, _, _, status = put_internal('nodes', db_node, _id=node['_id'])
|
||||
if status != 200:
|
||||
log.error('Error %i storing altered node %s %s', status, node['_id'], r)
|
||||
raise SystemExit('Error storing node; see log.')
|
||||
|
||||
if all_projects:
|
||||
for proj in projects_coll.find():
|
||||
handle_project(proj)
|
||||
return
|
||||
|
||||
proj = projects_coll.find_one({'url': proj_url})
|
||||
if not proj:
|
||||
log.error('Project url=%s not found', proj_url)
|
||||
return 3
|
||||
|
||||
handle_project(proj)
|
||||
|
||||
|
||||
@manager_setup.command
|
||||
def create_blog(proj_url):
|
||||
"""Adds a blog to the project."""
|
||||
|
||||
from pillar.api.utils.authentication import force_cli_user
|
||||
from pillar.api.utils import node_type_utils
|
||||
from pillar.api.node_types.blog import node_type_blog
|
||||
from pillar.api.node_types.post import node_type_post
|
||||
from pillar.api.utils import remove_private_keys
|
||||
|
||||
force_cli_user()
|
||||
|
||||
db = current_app.db()
|
||||
|
||||
# Add the blog & post node types to the project.
|
||||
projects_coll = db['projects']
|
||||
proj = projects_coll.find_one({'url': proj_url})
|
||||
if not proj:
|
||||
log.error('Project url=%s not found', proj_url)
|
||||
return 3
|
||||
|
||||
node_type_utils.add_to_project(proj,
|
||||
(node_type_blog, node_type_post),
|
||||
replace_existing=False)
|
||||
|
||||
proj_id = proj['_id']
|
||||
r, _, _, status = put_internal('projects', remove_private_keys(proj), _id=proj_id)
|
||||
if status != 200:
|
||||
log.error('Error %i storing altered project %s %s', status, proj_id, r)
|
||||
return 4
|
||||
log.info('Project saved succesfully.')
|
||||
|
||||
# Create a blog node.
|
||||
nodes_coll = db['nodes']
|
||||
blog = nodes_coll.find_one({'node_type': 'blog', 'project': proj_id})
|
||||
if not blog:
|
||||
blog = {
|
||||
u'node_type': node_type_blog['name'],
|
||||
u'name': u'Blog',
|
||||
u'description': u'',
|
||||
u'properties': {},
|
||||
u'project': proj_id,
|
||||
}
|
||||
r, _, _, status = post_internal('nodes', blog)
|
||||
if status != 201:
|
||||
log.error('Error %i storing blog node: %s', status, r)
|
||||
return 4
|
||||
log.info('Blog node saved succesfully: %s', r)
|
||||
else:
|
||||
log.info('Blog node already exists: %s', blog)
|
||||
|
||||
return 0
|
||||
|
||||
manager.add_command("maintenance", manager_maintenance)
|
||||
manager.add_command("setup", manager_setup)
|
||||
manager.add_command("operations", manager_operations)
|
@@ -1,4 +1,5 @@
|
||||
import os.path
|
||||
from os import getenv
|
||||
from collections import defaultdict
|
||||
import requests.certs
|
||||
|
||||
@@ -6,18 +7,24 @@ import requests.certs
|
||||
TLS_CERT_FILE = requests.certs.where()
|
||||
print('Loading TLS certificates from %s' % TLS_CERT_FILE)
|
||||
|
||||
import requests.certs
|
||||
|
||||
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
|
||||
PILLAR_SERVER_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
SCHEME = 'https'
|
||||
STORAGE_DIR = '/data/storage/pillar'
|
||||
SHARED_DIR = '/data/storage/shared'
|
||||
STORAGE_DIR = getenv('PILLAR_STORAGE_DIR', '/data/storage/pillar')
|
||||
PORT = 5000
|
||||
HOST = '0.0.0.0'
|
||||
DEBUG = False
|
||||
|
||||
SECRET_KEY = '123'
|
||||
|
||||
# Authentication settings
|
||||
BLENDER_ID_ENDPOINT = 'http://blender_id:8000/'
|
||||
|
||||
PILLAR_SERVER_ENDPOINT = 'http://pillar:5001/api/'
|
||||
|
||||
CDN_USE_URL_SIGNING = True
|
||||
CDN_SERVICE_DOMAIN_PROTOCOL = 'https'
|
||||
CDN_SERVICE_DOMAIN = '-CONFIG-THIS-'
|
||||
@@ -44,7 +51,7 @@ BIN_FFMPEG = '/usr/bin/ffmpeg'
|
||||
BIN_SSH = '/usr/bin/ssh'
|
||||
BIN_RSYNC = '/usr/bin/rsync'
|
||||
|
||||
GCLOUD_APP_CREDENTIALS = os.path.join(os.path.dirname(__file__), 'google_app.json')
|
||||
GCLOUD_APP_CREDENTIALS = 'google_app.json'
|
||||
GCLOUD_PROJECT = '-SECRET-'
|
||||
|
||||
ADMIN_USER_GROUP = '5596e975ea893b269af85c0e'
|
||||
@@ -93,7 +100,7 @@ LOGGING = {
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'application': {'level': 'INFO'},
|
||||
'pillar': {'level': 'INFO'},
|
||||
'werkzeug': {'level': 'INFO'},
|
||||
},
|
||||
'root': {
|
||||
@@ -111,3 +118,32 @@ SHORT_CODE_LENGTH = 6 # characters
|
||||
FILESIZE_LIMIT_BYTES_NONSUBS = 32 * 2 ** 20
|
||||
# Unless they have one of those roles.
|
||||
ROLES_FOR_UNLIMITED_UPLOADS = {u'subscriber', u'demo', u'admin'}
|
||||
|
||||
|
||||
#############################################
|
||||
# Old pillar-web config:
|
||||
|
||||
# Mapping from /{path} to URL to redirect to.
|
||||
REDIRECTS = {}
|
||||
|
||||
GIT = 'git'
|
||||
|
||||
# Setting this to True can be useful for development.
|
||||
# Note that it doesn't add the /p/home/{node-id} endpoint, so you will have to
|
||||
# change the URL of the home project if you want to have direct access to nodes.
|
||||
RENDER_HOME_AS_REGULAR_PROJECT = False
|
||||
|
||||
|
||||
# Authentication token for the Urler service. If None, defaults
|
||||
# to the authentication token of the current user.
|
||||
URLER_SERVICE_AUTH_TOKEN = None
|
||||
|
||||
|
||||
# Blender Cloud add-on version. This updates the value in all places in the
|
||||
# front-end.
|
||||
BLENDER_CLOUD_ADDON_VERSION = '1.4'
|
||||
|
||||
EXTERNAL_SUBSCRIPTIONS_MANAGEMENT_SERVER = 'https://store.blender.org/api/'
|
||||
|
||||
# Certificate file for communication with other systems.
|
||||
TLS_CERT_FILE = requests.certs.where()
|
||||
|
96
pillar/extension.py
Normal file
96
pillar/extension.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Pillar extensions support.
|
||||
|
||||
Each Pillar extension should create a subclass of PillarExtension, which
|
||||
can then be registered to the application at app creation time:
|
||||
|
||||
from pillar_server import PillarServer
|
||||
from attract_server import AttractExtension
|
||||
|
||||
app = PillarServer('.')
|
||||
app.load_extension(AttractExtension(), url_prefix='/attract')
|
||||
app.process_extensions() # Always process extensions after the last one is loaded.
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run('::0', 5000)
|
||||
|
||||
"""
|
||||
|
||||
import abc
|
||||
|
||||
|
||||
class PillarExtension(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""The name of this extension.
|
||||
|
||||
The name determines the path at which Eve exposes the extension's
|
||||
resources (/{extension name}/{resource name}), as well as the
|
||||
MongoDB collection in which those resources are stored
|
||||
({extensions name}.{resource name}).
|
||||
|
||||
:rtype: unicode
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def flask_config(self):
|
||||
"""Returns extension-specific defaults for the Flask configuration.
|
||||
|
||||
Use this to set sensible default values for configuration settings
|
||||
introduced by the extension.
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def blueprints(self):
|
||||
"""Returns the list of top-level blueprints for the extension.
|
||||
|
||||
These blueprints will be mounted at the url prefix given to
|
||||
app.load_extension().
|
||||
|
||||
:rtype: list of flask.Blueprint objects.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def eve_settings(self):
|
||||
"""Returns extensions to the Eve settings.
|
||||
|
||||
Currently only the DOMAIN key is used to insert new resources into
|
||||
Eve's configuration.
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
@property
|
||||
def template_path(self):
|
||||
"""Returns the path where templates for this extension are stored.
|
||||
|
||||
Note that this path is not connected to any blueprint, so it is up to
|
||||
the extension to provide extension-unique subdirectories.
|
||||
"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def static_path(self):
|
||||
"""Returns the path where static files are stored.
|
||||
|
||||
Registers an endpoint named 'static_<extension name>', to use like:
|
||||
`url_for('static_attract', filename='js/somefile.js')`
|
||||
|
||||
May return None, in which case the extension will not be able to serve
|
||||
static files.
|
||||
"""
|
||||
return None
|
||||
|
||||
def setup_app(self, app):
|
||||
"""Called during app startup, after all extensions have loaded."""
|
||||
|
||||
def sidebar_links(self, project):
|
||||
"""Returns the sidebar link(s) for the given projects.
|
||||
|
||||
:returns: HTML as a string for the sidebar.
|
||||
"""
|
||||
|
||||
return ''
|
1141
pillar/manage.py
1141
pillar/manage.py
File diff suppressed because it is too large
Load Diff
@@ -1,182 +0,0 @@
|
||||
def import_data(path):
|
||||
import json
|
||||
import pprint
|
||||
from bson import json_util
|
||||
if not os.path.isfile(path):
|
||||
return "File does not exist"
|
||||
with open(path, 'r') as infile:
|
||||
d = json.load(infile)
|
||||
|
||||
def commit_object(collection, f, parent=None):
|
||||
variation_id = f.get('variation_id')
|
||||
if variation_id:
|
||||
del f['variation_id']
|
||||
|
||||
asset_id = f.get('asset_id')
|
||||
if asset_id:
|
||||
del f['asset_id']
|
||||
|
||||
node_id = f.get('node_id')
|
||||
if node_id:
|
||||
del f['node_id']
|
||||
|
||||
if parent:
|
||||
f['parent'] = parent
|
||||
else:
|
||||
if f.get('parent'):
|
||||
del f['parent']
|
||||
|
||||
#r = [{'_status': 'OK', '_id': 'DRY-ID'}]
|
||||
r = post_item(collection, f)
|
||||
if r[0]['_status'] == 'ERR':
|
||||
print r[0]['_issues']
|
||||
print "Tried to commit the following object"
|
||||
pprint.pprint(f)
|
||||
|
||||
# Assign the Mongo ObjectID
|
||||
f['_id'] = str(r[0]['_id'])
|
||||
# Restore variation_id
|
||||
if variation_id:
|
||||
f['variation_id'] = variation_id
|
||||
if asset_id:
|
||||
f['asset_id'] = asset_id
|
||||
if node_id:
|
||||
f['node_id'] = node_id
|
||||
try:
|
||||
print "{0} {1}".format(f['_id'], f['name'])
|
||||
except UnicodeEncodeError:
|
||||
print "{0}".format(f['_id'])
|
||||
return f
|
||||
|
||||
# Build list of parent files
|
||||
parent_files = [f for f in d['files'] if 'parent_asset_id' in f]
|
||||
children_files = [f for f in d['files'] if 'parent_asset_id' not in f]
|
||||
|
||||
for p in parent_files:
|
||||
# Store temp property
|
||||
parent_asset_id = p['parent_asset_id']
|
||||
# Remove from dict to prevent invalid submission
|
||||
del p['parent_asset_id']
|
||||
# Commit to database
|
||||
p = commit_object('files', p)
|
||||
# Restore temp property
|
||||
p['parent_asset_id'] = parent_asset_id
|
||||
# Find children of the current file
|
||||
children = [c for c in children_files if c['parent'] == p['variation_id']]
|
||||
for c in children:
|
||||
# Commit to database with parent id
|
||||
c = commit_object('files', c, p['_id'])
|
||||
|
||||
|
||||
# Merge the dicts and replace the original one
|
||||
d['files'] = parent_files + children_files
|
||||
|
||||
# Files for picture previews of folders (groups)
|
||||
for f in d['files_group']:
|
||||
item_id = f['item_id']
|
||||
del f['item_id']
|
||||
f = commit_object('files', f)
|
||||
f['item_id'] = item_id
|
||||
|
||||
# Files for picture previews of assets
|
||||
for f in d['files_asset']:
|
||||
item_id = f['item_id']
|
||||
del f['item_id']
|
||||
f = commit_object('files',f)
|
||||
f['item_id'] = item_id
|
||||
|
||||
|
||||
nodes_asset = [n for n in d['nodes'] if 'asset_id' in n]
|
||||
nodes_group = [n for n in d['nodes'] if 'node_id' in n]
|
||||
|
||||
def get_parent(node_id):
|
||||
#print "Searching for {0}".format(node_id)
|
||||
try:
|
||||
parent = [p for p in nodes_group if p['node_id'] == node_id][0]
|
||||
except IndexError:
|
||||
return None
|
||||
return parent
|
||||
|
||||
def traverse_nodes(parent_id):
|
||||
parents_list = []
|
||||
while True:
|
||||
parent = get_parent(parent_id)
|
||||
#print parent
|
||||
if not parent:
|
||||
break
|
||||
else:
|
||||
parents_list.append(parent['node_id'])
|
||||
if parent.get('parent'):
|
||||
parent_id = parent['parent']
|
||||
else:
|
||||
break
|
||||
parents_list.reverse()
|
||||
return parents_list
|
||||
|
||||
for n in nodes_asset:
|
||||
node_type_asset = db.node_types.find_one({"name": "asset"})
|
||||
if n.get('picture'):
|
||||
filename = os.path.splitext(n['picture'])[0]
|
||||
pictures = [p for p in d['files_asset'] if p['name'] == filename]
|
||||
if pictures:
|
||||
n['picture'] = pictures[0]['_id']
|
||||
print "Adding picture link {0}".format(n['picture'])
|
||||
n['node_type'] = node_type_asset['_id']
|
||||
# An asset node must have a parent
|
||||
# parent = [p for p in nodes_group if p['node_id'] == n['parent']][0]
|
||||
parents_list = traverse_nodes(n['parent'])
|
||||
|
||||
tree_index = 0
|
||||
for node_id in parents_list:
|
||||
node = [p for p in nodes_group if p['node_id'] == node_id][0]
|
||||
|
||||
if node.get('_id') is None:
|
||||
node_type_group = db.node_types.find_one({"name": "group"})
|
||||
node['node_type'] = node_type_group['_id']
|
||||
# Assign picture to the node group
|
||||
if node.get('picture'):
|
||||
filename = os.path.splitext(node['picture'])[0]
|
||||
picture = [p for p in d['files_group'] if p['name'] == filename][0]
|
||||
node['picture'] = picture['_id']
|
||||
print "Adding picture link to node {0}".format(node['picture'])
|
||||
if tree_index == 0:
|
||||
# We are at the root of the tree (so we link to the project)
|
||||
node_type_project = db.node_types.find_one({"name": "project"})
|
||||
node['node_type'] = node_type_project['_id']
|
||||
parent = None
|
||||
if node['properties'].get('picture_square'):
|
||||
filename = os.path.splitext(node['properties']['picture_square'])[0]
|
||||
picture = [p for p in d['files_group'] if p['name'] == filename][0]
|
||||
node['properties']['picture_square'] = picture['_id']
|
||||
print "Adding picture_square link to node"
|
||||
if node['properties'].get('picture_header'):
|
||||
filename = os.path.splitext(node['properties']['picture_header'])[0]
|
||||
picture = [p for p in d['files_group'] if p['name'] == filename][0]
|
||||
node['properties']['picture_header'] = picture['_id']
|
||||
print "Adding picture_header link to node"
|
||||
else:
|
||||
# Get the parent node id
|
||||
parents_list_node_id = parents_list[tree_index - 1]
|
||||
parent_node = [p for p in nodes_group if p['node_id'] == parents_list_node_id][0]
|
||||
parent = parent_node['_id']
|
||||
print "About to commit Node"
|
||||
commit_object('nodes', node, parent)
|
||||
tree_index += 1
|
||||
# Commit the asset
|
||||
print "About to commit Asset {0}".format(n['asset_id'])
|
||||
parent_node = [p for p in nodes_group if p['node_id'] == parents_list[-1]][0]
|
||||
try:
|
||||
asset_file = [a for a in d['files'] if a['md5'] == n['properties']['file']][0]
|
||||
n['properties']['file'] = str(asset_file['_id'])
|
||||
commit_object('nodes', n, parent_node['_id'])
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
return
|
||||
|
||||
|
||||
# New path with _
|
||||
path = '_' + path
|
||||
with open(path, 'w') as outfile:
|
||||
json.dump(d, outfile, default=json_util.default)
|
||||
return
|
@@ -1,8 +0,0 @@
|
||||
_file_embedded_schema = {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'files',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
@@ -1,5 +0,0 @@
|
||||
node_type_act = {
|
||||
'name': 'act',
|
||||
'description': 'Act node type',
|
||||
'parent': []
|
||||
}
|
@@ -1,54 +0,0 @@
|
||||
from manage_extra.node_types import _file_embedded_schema
|
||||
|
||||
node_type_page = {
|
||||
'name': 'page',
|
||||
'description': 'A single page',
|
||||
'dyn_schema': {
|
||||
# The page content (Markdown format)
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'maxlength': 90000,
|
||||
'required': True
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending'
|
||||
],
|
||||
'default': 'pending'
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'attachments': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'field': {'type': 'string'},
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'slug': {'type': 'string', 'minlength': 1},
|
||||
'size': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'content': {},
|
||||
'status': {},
|
||||
'url': {},
|
||||
'attachments': {'visible': False},
|
||||
},
|
||||
'parent': ['project', ],
|
||||
'permissions': {}
|
||||
}
|
@@ -1,59 +0,0 @@
|
||||
from manage_extra.node_types import _file_embedded_schema
|
||||
|
||||
node_type_post = {
|
||||
'name': 'post',
|
||||
'description': 'A blog post, for any project',
|
||||
'dyn_schema': {
|
||||
# The blogpost content (Markdown format)
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'maxlength': 90000,
|
||||
'required': True
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending'
|
||||
],
|
||||
'default': 'pending'
|
||||
},
|
||||
# Global categories, will be enforced to be 1 word
|
||||
'category': {
|
||||
'type': 'string',
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'attachments': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'field': {'type': 'string'},
|
||||
'files': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'file': _file_embedded_schema,
|
||||
'slug': {'type': 'string', 'minlength': 1},
|
||||
'size': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'content': {},
|
||||
'status': {},
|
||||
'category': {},
|
||||
'url': {},
|
||||
'attachments': {'visible': False},
|
||||
},
|
||||
'parent': ['blog', ],
|
||||
'permissions': {}
|
||||
}
|
@@ -1,124 +0,0 @@
|
||||
from manage_extra.node_types import _file_embedded_schema
|
||||
|
||||
node_type_project = {
|
||||
'name': 'project',
|
||||
'parent': {},
|
||||
'description': 'The official project type',
|
||||
'dyn_schema': {
|
||||
'category': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'training',
|
||||
'film',
|
||||
'assets',
|
||||
'software',
|
||||
'game'
|
||||
],
|
||||
'required': True,
|
||||
},
|
||||
'is_private': {
|
||||
'type': 'boolean'
|
||||
},
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'organization': {
|
||||
'type': 'objectid',
|
||||
'nullable': True,
|
||||
'data_relation': {
|
||||
'resource': 'organizations',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
},
|
||||
},
|
||||
'owners': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'users': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
'groups': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'groups',
|
||||
'field': '_id',
|
||||
'embeddable': True
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'published',
|
||||
'pending',
|
||||
],
|
||||
},
|
||||
# Logo
|
||||
'picture_square': _file_embedded_schema,
|
||||
# Header
|
||||
'picture_header': _file_embedded_schema,
|
||||
# Short summary for the project
|
||||
'summary': {
|
||||
'type': 'string',
|
||||
'maxlength': 128
|
||||
},
|
||||
# Latest nodes being edited
|
||||
'nodes_latest': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
# Featured nodes, manually added
|
||||
'nodes_featured': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
# Latest blog posts, manually added
|
||||
'nodes_blog': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
}
|
||||
},
|
||||
'form_schema': {
|
||||
'is_private': {},
|
||||
# TODO add group parsing
|
||||
'category': {},
|
||||
'url': {},
|
||||
'organization': {},
|
||||
'picture_square': {},
|
||||
'picture_header': {},
|
||||
'summary': {},
|
||||
'owners': {
|
||||
'schema': {
|
||||
'users': {},
|
||||
'groups': {
|
||||
'items': [('Group', 'name')],
|
||||
},
|
||||
}
|
||||
},
|
||||
'status': {},
|
||||
'nodes_featured': {},
|
||||
'nodes_latest': {},
|
||||
'nodes_blog': {}
|
||||
},
|
||||
'permissions': {
|
||||
# 'groups': [{
|
||||
# 'group': app.config['ADMIN_USER_GROUP'],
|
||||
# 'methods': ['GET', 'PUT', 'POST']
|
||||
# }],
|
||||
# 'users': [],
|
||||
# 'world': ['GET']
|
||||
}
|
||||
}
|
@@ -1,5 +0,0 @@
|
||||
node_type_scene = {
|
||||
'name': 'scene',
|
||||
'description': 'Scene node type',
|
||||
'parent': ['act'],
|
||||
}
|
@@ -1,45 +0,0 @@
|
||||
node_type_shot = {
|
||||
'name': 'shot',
|
||||
'description': 'Shot Node Type, for shots',
|
||||
'dyn_schema': {
|
||||
'url': {
|
||||
'type': 'string',
|
||||
},
|
||||
'cut_in': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'cut_out': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'on_hold',
|
||||
'todo',
|
||||
'in_progress',
|
||||
'review',
|
||||
'final'
|
||||
],
|
||||
},
|
||||
'notes': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
},
|
||||
'shot_group': {
|
||||
'type': 'string',
|
||||
#'data_relation': {
|
||||
# 'resource': 'nodes',
|
||||
# 'field': '_id',
|
||||
#},
|
||||
},
|
||||
},
|
||||
'form_schema': {
|
||||
'url': {},
|
||||
'cut_in': {},
|
||||
'cut_out': {},
|
||||
'status': {},
|
||||
'notes': {},
|
||||
'shot_group': {}
|
||||
},
|
||||
'parent': ['scene']
|
||||
}
|
@@ -1,107 +0,0 @@
|
||||
node_type_task = {
|
||||
'name': 'task',
|
||||
'description': 'Task Node Type, for tasks',
|
||||
'dyn_schema': {
|
||||
'status': {
|
||||
'type': 'string',
|
||||
'allowed': [
|
||||
'todo',
|
||||
'in_progress',
|
||||
'on_hold',
|
||||
'approved',
|
||||
'cbb',
|
||||
'final',
|
||||
'review'
|
||||
],
|
||||
'required': True,
|
||||
},
|
||||
'filepath': {
|
||||
'type': 'string',
|
||||
},
|
||||
'revision': {
|
||||
'type': 'integer',
|
||||
},
|
||||
'owners': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'users': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
},
|
||||
'groups': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'objectid',
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'time': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'start': {
|
||||
'type': 'datetime'
|
||||
},
|
||||
'duration': {
|
||||
'type': 'integer'
|
||||
},
|
||||
'chunks': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'start': {
|
||||
'type': 'datetime',
|
||||
},
|
||||
'duration': {
|
||||
'type': 'integer',
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
'is_conflicting' : {
|
||||
'type': 'boolean'
|
||||
},
|
||||
'is_processing' : {
|
||||
'type': 'boolean'
|
||||
},
|
||||
'is_open' : {
|
||||
'type': 'boolean'
|
||||
}
|
||||
|
||||
},
|
||||
'form_schema': {
|
||||
'status': {},
|
||||
'filepath': {},
|
||||
'revision': {},
|
||||
'owners': {
|
||||
'schema': {
|
||||
'users':{
|
||||
'items': [('User', 'first_name')],
|
||||
},
|
||||
'groups': {}
|
||||
}
|
||||
},
|
||||
'time': {
|
||||
'schema': {
|
||||
'start': {},
|
||||
'duration': {},
|
||||
'chunks': {
|
||||
'visible': False,
|
||||
'schema': {
|
||||
'start': {},
|
||||
'duration': {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'is_conflicting': {},
|
||||
'is_open': {},
|
||||
'is_processing': {},
|
||||
},
|
||||
'parent': ['shot']
|
||||
}
|
49
pillar/markdown.py
Normal file
49
pillar/markdown.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Bleached Markdown functionality.
|
||||
|
||||
This is for user-generated stuff, like comments.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import bleach
|
||||
import CommonMark
|
||||
|
||||
ALLOWED_TAGS = [
|
||||
'a',
|
||||
'abbr',
|
||||
'acronym',
|
||||
'b', 'strong',
|
||||
'i', 'em',
|
||||
'del', 'kbd',
|
||||
'dl', 'dt', 'dd',
|
||||
'blockquote',
|
||||
'code',
|
||||
'li', 'ol', 'ul',
|
||||
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
|
||||
'p', 'br', 'hr',
|
||||
'sup', 'sub', 'strike',
|
||||
'img',
|
||||
'iframe',
|
||||
]
|
||||
|
||||
ALLOWED_ATTRIBUTES = {
|
||||
'a': ['href', 'title', 'target'],
|
||||
'abbr': ['title'],
|
||||
'acronym': ['title'],
|
||||
'img': ['src', 'alt', 'width', 'height', 'title'],
|
||||
'iframe': ['src', 'width', 'height', 'frameborder', 'allowfullscreen'],
|
||||
'*': ['style'],
|
||||
}
|
||||
|
||||
ALLOWED_STYLES = [
|
||||
'color', 'font-weight', 'background-color',
|
||||
]
|
||||
|
||||
|
||||
def markdown(s):
|
||||
tainted_html = CommonMark.commonmark(s)
|
||||
safe_html = bleach.clean(tainted_html,
|
||||
tags=ALLOWED_TAGS,
|
||||
attributes=ALLOWED_ATTRIBUTES,
|
||||
styles=ALLOWED_STYLES)
|
||||
return safe_html
|
@@ -1,11 +0,0 @@
|
||||
import sys
|
||||
|
||||
activate_this = '/data/venv/bin/activate_this.py'
|
||||
execfile(activate_this, dict(__file__=activate_this))
|
||||
from flup.server.fcgi import WSGIServer
|
||||
|
||||
sys.path.append('/data/git/pillar/pillar/')
|
||||
from application import app as application
|
||||
|
||||
if __name__ == '__main__':
|
||||
WSGIServer(application).run()
|
101
pillar/sdk.py
Normal file
101
pillar/sdk.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""PillarSDK subclass for direct Flask-internal calls."""
|
||||
|
||||
import logging
|
||||
import urlparse
|
||||
from flask import current_app
|
||||
|
||||
import pillarsdk
|
||||
from pillarsdk import exceptions
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FlaskInternalApi(pillarsdk.Api):
|
||||
"""SDK API subclass that calls Flask directly.
|
||||
|
||||
Can only be used from the same Python process the Pillar server itself is
|
||||
running on.
|
||||
"""
|
||||
|
||||
def http_call(self, url, method, **kwargs):
|
||||
"""Fakes a http call through Flask/Werkzeug."""
|
||||
client = current_app.test_client()
|
||||
self.requests_to_flask_kwargs(kwargs)
|
||||
|
||||
# Leave out the query string and fragment from the URL.
|
||||
split_url = urlparse.urlsplit(url)
|
||||
path = urlparse.urlunsplit(split_url[:-2] + (None, None))
|
||||
try:
|
||||
response = client.open(path=path, query_string=split_url.query, method=method,
|
||||
**kwargs)
|
||||
except Exception as ex:
|
||||
log.warning('Error performing HTTP %s request to %s: %s', method,
|
||||
url, str(ex))
|
||||
raise
|
||||
|
||||
if method == 'OPTIONS':
|
||||
return response
|
||||
|
||||
self.flask_to_requests_response(response)
|
||||
|
||||
try:
|
||||
content = self.handle_response(response, response.data)
|
||||
except:
|
||||
log.warning("%s: Response[%s]: %s", url, response.status_code,
|
||||
response.data)
|
||||
raise
|
||||
|
||||
return content
|
||||
|
||||
def requests_to_flask_kwargs(self, kwargs):
|
||||
"""Converts Requests arguments to Flask test client arguments."""
|
||||
|
||||
kwargs.pop('verify', None)
|
||||
# No network connection, so nothing to verify.
|
||||
|
||||
# Files to upload need to be sent in the 'data' kwarg instead of the
|
||||
# 'files' kwarg, and have a different order.
|
||||
if 'files' in kwargs:
|
||||
# By default, 'data' is there but None, so setdefault('data', {})
|
||||
# won't work.
|
||||
data = kwargs.get('data') or {}
|
||||
|
||||
for file_name, file_value in kwargs['files'].items():
|
||||
fname, fobj, mimeytpe = file_value
|
||||
data[file_name] = (fobj, fname)
|
||||
|
||||
del kwargs['files']
|
||||
kwargs['data'] = data
|
||||
|
||||
def flask_to_requests_response(self, response):
|
||||
"""Adds some properties to a Flask response object to mimick a Requests
|
||||
object.
|
||||
"""
|
||||
|
||||
# Our API always sends back UTF8, so we don't have to check headers for
|
||||
# that.
|
||||
if response.mimetype.startswith('text'):
|
||||
response.text = response.data.decode('utf8')
|
||||
else:
|
||||
response.text = None
|
||||
|
||||
def OPTIONS(self, action, headers=None):
|
||||
"""Make OPTIONS request.
|
||||
|
||||
Contrary to other requests, this method returns the raw requests.Response object.
|
||||
|
||||
:rtype: requests.Response
|
||||
"""
|
||||
import os
|
||||
|
||||
url = os.path.join(self.endpoint, action.strip('/'))
|
||||
response = self.request(url, 'OPTIONS', headers=headers)
|
||||
if 200 <= response.status_code <= 299:
|
||||
return response
|
||||
|
||||
exception = exceptions.exception_for_status(response.status_code)
|
||||
if exception:
|
||||
raise exception(response, response.text)
|
||||
|
||||
raise exceptions.ConnectionError(response, response.text,
|
||||
"Unknown response code: %s" % response.status_code)
|
@@ -1,12 +1,17 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
import json
|
||||
from __future__ import print_function
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import base64
|
||||
import copy
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import base64
|
||||
import sys
|
||||
|
||||
try:
|
||||
from urllib.parse import urlencode
|
||||
@@ -16,16 +21,17 @@ except ImportError:
|
||||
from bson import ObjectId, tz_util
|
||||
|
||||
# Override Eve settings before importing eve.tests.
|
||||
import common_test_settings
|
||||
from pillar.tests import eve_test_settings
|
||||
|
||||
common_test_settings.override_eve()
|
||||
eve_test_settings.override_eve()
|
||||
|
||||
from eve.tests import TestMinimal
|
||||
import pymongo.collection
|
||||
from flask.testing import FlaskClient
|
||||
import responses
|
||||
|
||||
from common_test_data import EXAMPLE_PROJECT, EXAMPLE_FILE
|
||||
import pillar
|
||||
from . import common_test_data as ctd
|
||||
|
||||
# from six:
|
||||
PY3 = sys.version_info[0] == 3
|
||||
@@ -42,39 +48,47 @@ TEST_EMAIL_USER = 'koro'
|
||||
TEST_EMAIL_ADDRESS = '%s@testing.blender.org' % TEST_EMAIL_USER
|
||||
TEST_FULL_NAME = u'врач Сергей'
|
||||
TEST_SUBCLIENT_TOKEN = 'my-subclient-token-for-pillar'
|
||||
BLENDER_ID_TEST_USERID = 1896
|
||||
BLENDER_ID_USER_RESPONSE = {'status': 'success',
|
||||
'user': {'email': TEST_EMAIL_ADDRESS,
|
||||
'full_name': TEST_FULL_NAME,
|
||||
'id': BLENDER_ID_TEST_USERID},
|
||||
'id': ctd.BLENDER_ID_TEST_USERID},
|
||||
'token_expires': 'Mon, 1 Jan 2018 01:02:03 GMT'}
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format='%(asctime)-15s %(levelname)8s %(name)s %(message)s')
|
||||
|
||||
class PillarTestServer(pillar.PillarServer):
|
||||
def _load_flask_config(self):
|
||||
super(PillarTestServer, self)._load_flask_config()
|
||||
|
||||
pillar_config_file = os.path.join(MY_PATH, 'config_testing.py')
|
||||
self.config.from_pyfile(pillar_config_file)
|
||||
|
||||
def _config_logging(self):
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format='%(asctime)-15s %(levelname)8s %(name)s %(message)s')
|
||||
logging.getLogger('').setLevel(logging.DEBUG)
|
||||
logging.getLogger('pillar').setLevel(logging.DEBUG)
|
||||
logging.getLogger('werkzeug').setLevel(logging.DEBUG)
|
||||
logging.getLogger('eve').setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
class AbstractPillarTest(TestMinimal):
|
||||
pillar_server_class = PillarTestServer
|
||||
|
||||
def setUp(self, **kwargs):
|
||||
eve_settings_file = os.path.join(MY_PATH, 'common_test_settings.py')
|
||||
pillar_config_file = os.path.join(MY_PATH, 'config_testing.py')
|
||||
eve_settings_file = os.path.join(MY_PATH, 'eve_test_settings.py')
|
||||
kwargs['settings_file'] = eve_settings_file
|
||||
os.environ['EVE_SETTINGS'] = eve_settings_file
|
||||
os.environ['PILLAR_CONFIG'] = pillar_config_file
|
||||
super(AbstractPillarTest, self).setUp(**kwargs)
|
||||
|
||||
from application import app
|
||||
|
||||
logging.getLogger('').setLevel(logging.DEBUG)
|
||||
logging.getLogger('application').setLevel(logging.DEBUG)
|
||||
logging.getLogger('werkzeug').setLevel(logging.DEBUG)
|
||||
logging.getLogger('eve').setLevel(logging.DEBUG)
|
||||
|
||||
from eve.utils import config
|
||||
config.DEBUG = True
|
||||
|
||||
self.app = app
|
||||
self.client = app.test_client()
|
||||
self.app = self.pillar_server_class(os.path.dirname(os.path.dirname(__file__)))
|
||||
self.app.process_extensions()
|
||||
assert self.app.config['MONGO_DBNAME'] == 'pillar_test'
|
||||
|
||||
self.client = self.app.test_client()
|
||||
assert isinstance(self.client, FlaskClient)
|
||||
|
||||
def tearDown(self):
|
||||
@@ -82,19 +96,29 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
# Not only delete self.app (like the superclass does),
|
||||
# but also un-import the application.
|
||||
del sys.modules['application']
|
||||
remove = [modname for modname in sys.modules
|
||||
if modname.startswith('application.')]
|
||||
self.unload_modules('pillar')
|
||||
|
||||
def unload_modules(self, module_name):
|
||||
"""Uploads the named module, and all submodules."""
|
||||
|
||||
del sys.modules[module_name]
|
||||
|
||||
remove = {modname for modname in sys.modules
|
||||
if modname.startswith('%s.' % module_name)}
|
||||
for modname in remove:
|
||||
del sys.modules[modname]
|
||||
|
||||
def ensure_file_exists(self, file_overrides=None):
|
||||
self.ensure_project_exists()
|
||||
if file_overrides and file_overrides.get('project'):
|
||||
self.ensure_project_exists({'_id': file_overrides['project']})
|
||||
else:
|
||||
self.ensure_project_exists()
|
||||
|
||||
with self.app.test_request_context():
|
||||
files_collection = self.app.data.driver.db['files']
|
||||
assert isinstance(files_collection, pymongo.collection.Collection)
|
||||
|
||||
file = copy.deepcopy(EXAMPLE_FILE)
|
||||
file = copy.deepcopy(ctd.EXAMPLE_FILE)
|
||||
if file_overrides is not None:
|
||||
file.update(file_overrides)
|
||||
if '_id' in file and file['_id'] is None:
|
||||
@@ -109,13 +133,24 @@ class AbstractPillarTest(TestMinimal):
|
||||
return file_id, from_db
|
||||
|
||||
def ensure_project_exists(self, project_overrides=None):
|
||||
self.ensure_group_exists(ctd.EXAMPLE_ADMIN_GROUP_ID, 'project admin')
|
||||
self.ensure_group_exists(ctd.EXAMPLE_PROJECT_READONLY_GROUP_ID, 'r/o group')
|
||||
self.ensure_group_exists(ctd.EXAMPLE_PROJECT_READONLY_GROUP2_ID, 'r/o group 2')
|
||||
self.ensure_user_exists(ctd.EXAMPLE_PROJECT_OWNER_ID,
|
||||
'proj-owner',
|
||||
[ctd.EXAMPLE_ADMIN_GROUP_ID])
|
||||
|
||||
with self.app.test_request_context():
|
||||
projects_collection = self.app.data.driver.db['projects']
|
||||
assert isinstance(projects_collection, pymongo.collection.Collection)
|
||||
|
||||
project = copy.deepcopy(EXAMPLE_PROJECT)
|
||||
project = copy.deepcopy(ctd.EXAMPLE_PROJECT)
|
||||
if project_overrides is not None:
|
||||
project.update(project_overrides)
|
||||
for key, value in project_overrides.items():
|
||||
if value is None:
|
||||
project.pop(key, None)
|
||||
else:
|
||||
project[key] = value
|
||||
|
||||
found = projects_collection.find_one(project['_id'])
|
||||
if found is None:
|
||||
@@ -124,9 +159,40 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
return found['_id'], found
|
||||
|
||||
def ensure_user_exists(self, user_id, name, group_ids=()):
|
||||
user = copy.deepcopy(ctd.EXAMPLE_USER)
|
||||
user['groups'] = list(group_ids)
|
||||
user['full_name'] = name
|
||||
user['_id'] = ObjectId(user_id)
|
||||
|
||||
with self.app.test_request_context():
|
||||
users_coll = self.app.data.driver.db['users']
|
||||
assert isinstance(users_coll, pymongo.collection.Collection)
|
||||
|
||||
found = users_coll.find_one(user_id)
|
||||
if found:
|
||||
return
|
||||
|
||||
result = users_coll.insert_one(user)
|
||||
assert result.inserted_id
|
||||
|
||||
def ensure_group_exists(self, group_id, name):
|
||||
group_id = ObjectId(group_id)
|
||||
|
||||
with self.app.test_request_context():
|
||||
groups_coll = self.app.data.driver.db['groups']
|
||||
assert isinstance(groups_coll, pymongo.collection.Collection)
|
||||
|
||||
found = groups_coll.find_one(group_id)
|
||||
if found:
|
||||
return
|
||||
|
||||
result = groups_coll.insert_one({'_id': group_id, 'name': name})
|
||||
assert result.inserted_id
|
||||
|
||||
def create_user(self, user_id='cafef00dc379cf10c4aaceaf', roles=('subscriber',),
|
||||
groups=None):
|
||||
from application.utils.authentication import make_unique_username
|
||||
from pillar.api.utils.authentication import make_unique_username
|
||||
|
||||
with self.app.test_request_context():
|
||||
users = self.app.data.driver.db['users']
|
||||
@@ -141,7 +207,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
'roles': list(roles),
|
||||
'settings': {'email_communications': 1},
|
||||
'auth': [{'token': '',
|
||||
'user_id': unicode(BLENDER_ID_TEST_USERID),
|
||||
'user_id': unicode(ctd.BLENDER_ID_TEST_USERID),
|
||||
'provider': 'blender-id'}],
|
||||
'full_name': u'คนรักของผัดไทย',
|
||||
'email': TEST_EMAIL_ADDRESS
|
||||
@@ -154,12 +220,45 @@ class AbstractPillarTest(TestMinimal):
|
||||
future = now + datetime.timedelta(days=1)
|
||||
|
||||
with self.app.test_request_context():
|
||||
from application.utils import authentication as auth
|
||||
from pillar.api.utils import authentication as auth
|
||||
|
||||
token_data = auth.store_token(user_id, token, future, None)
|
||||
|
||||
return token_data
|
||||
|
||||
def create_project_with_admin(self, user_id='cafef00dc379cf10c4aaceaf', roles=('subscriber',)):
|
||||
"""Creates a project and a user that's member of the project's admin group.
|
||||
|
||||
:returns: (project_id, user_id)
|
||||
:rtype: tuple
|
||||
"""
|
||||
project_id, proj = self.ensure_project_exists()
|
||||
user_id = self.create_project_admin(proj, user_id, roles)
|
||||
|
||||
return project_id, user_id
|
||||
|
||||
def create_project_admin(self, proj, user_id='cafef00dc379cf10c4aaceaf', roles=('subscriber',)):
|
||||
"""Creates a user that's member of the project's admin group.
|
||||
|
||||
:param proj: project document, or at least a dict with permissions in it.
|
||||
:type proj: dict
|
||||
:returns: user_id
|
||||
:rtype: ObjectId
|
||||
"""
|
||||
|
||||
admin_group_id = proj['permissions']['groups'][0]['group']
|
||||
user_id = self.create_user(user_id=user_id, roles=roles, groups=[admin_group_id])
|
||||
|
||||
return user_id
|
||||
|
||||
def create_node(self, node_doc):
|
||||
"""Creates a node, returning its ObjectId. """
|
||||
|
||||
with self.app.test_request_context():
|
||||
nodes_coll = self.app.data.driver.db['nodes']
|
||||
result = nodes_coll.insert_one(node_doc)
|
||||
return result.inserted_id
|
||||
|
||||
def badger(self, user_email, roles, action, srv_token=None):
|
||||
"""Creates a service account, and uses it to grant or revoke a role to the user.
|
||||
|
||||
@@ -174,7 +273,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
# Create a service account if needed.
|
||||
if srv_token is None:
|
||||
from application.modules.service import create_service_account
|
||||
from pillar.api.service import create_service_account
|
||||
with self.app.test_request_context():
|
||||
_, srv_token_doc = create_service_account('service@example.com',
|
||||
{'badger'},
|
||||
@@ -182,14 +281,12 @@ class AbstractPillarTest(TestMinimal):
|
||||
srv_token = srv_token_doc['token']
|
||||
|
||||
for role in roles:
|
||||
resp = self.client.post('/service/badger',
|
||||
headers={'Authorization': self.make_header(srv_token),
|
||||
'Content-Type': 'application/json'},
|
||||
data=json.dumps({'action': action,
|
||||
'role': role,
|
||||
'user_email': user_email}))
|
||||
self.assertEqual(204, resp.status_code, resp.data)
|
||||
|
||||
self.post('/api/service/badger',
|
||||
auth_token=srv_token,
|
||||
json={'action': action,
|
||||
'role': role,
|
||||
'user_email': user_email},
|
||||
expected_status=204)
|
||||
return srv_token
|
||||
|
||||
def mock_blenderid_validate_unhappy(self):
|
||||
@@ -218,7 +315,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
:returns: mapping from group name to group ID
|
||||
"""
|
||||
from application.modules import service
|
||||
from pillar.api import service
|
||||
|
||||
with self.app.test_request_context():
|
||||
group_ids = {}
|
||||
@@ -232,6 +329,11 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
return group_ids
|
||||
|
||||
def fetch_project_from_db(self, project_id=ctd.EXAMPLE_PROJECT_ID):
|
||||
with self.app.app_context():
|
||||
proj_coll = self.app.db()['projects']
|
||||
return proj_coll.find_one(project_id)
|
||||
|
||||
@staticmethod
|
||||
def join_url_params(params):
|
||||
"""Constructs a query string from a dictionary and appends it to a url.
|
||||
@@ -266,7 +368,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
data=None, headers=None, files=None, content_type=None):
|
||||
"""Performs a HTTP request to the server."""
|
||||
|
||||
from application.utils import dumps
|
||||
from pillar.api.utils import dumps
|
||||
import json as mod_json
|
||||
|
||||
headers = headers or {}
|
||||
@@ -313,3 +415,16 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
def patch(self, *args, **kwargs):
|
||||
return self.client_request('PATCH', *args, **kwargs)
|
||||
|
||||
|
||||
def mongo_to_sdk(data):
|
||||
"""Transforms a MongoDB dict to a dict suitable to give to the PillarSDK.
|
||||
|
||||
Not efficient, as it converts to JSON and back again. Only use in unittests.
|
||||
"""
|
||||
|
||||
import pillar.api.utils
|
||||
import json
|
||||
|
||||
as_json = pillar.api.utils.dumps(data)
|
||||
return json.loads(as_json)
|
118
pillar/tests/common_test_data.py
Normal file
118
pillar/tests/common_test_data.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import datetime
|
||||
|
||||
from bson import tz_util, ObjectId
|
||||
|
||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
|
||||
|
||||
EXAMPLE_ADMIN_GROUP_ID = ObjectId('5596e975ea893b269af85c0e')
|
||||
EXAMPLE_PROJECT_READONLY_GROUP_ID = ObjectId('5596e975ea893b269af85c0f')
|
||||
EXAMPLE_PROJECT_READONLY_GROUP2_ID = ObjectId('564733b56dcaf85da2faee8a')
|
||||
|
||||
EXAMPLE_PROJECT_ID = ObjectId('5672beecc0261b2005ed1a33')
|
||||
EXAMPLE_PROJECT_OWNER_ID = ObjectId('552b066b41acdf5dec4436f2')
|
||||
|
||||
EXAMPLE_FILE = {u'_id': ObjectId('5672e2c1c379cf0007b31995'),
|
||||
u'_updated': datetime.datetime(2016, 3, 25, 10, 28, 24, tzinfo=tz_util.utc),
|
||||
u'height': 2048,
|
||||
u'name': 'c2a5c897769ce1ef0eb10f8fa1c472bcb8e2d5a4.png', u'format': 'png',
|
||||
u'variations': [
|
||||
{u'format': 'jpg', u'height': 160, u'width': 160, u'length': 8558,
|
||||
u'link': 'http://localhost:8002/file-variant-h', u'content_type': 'image/jpeg',
|
||||
u'md5': '--', u'file_path': 'c2a5c897769ce1ef0eb10f8fa1c472bcb8e2d5a4-b.jpg',
|
||||
u'size': 'b'},
|
||||
{u'format': 'jpg', u'height': 2048, u'width': 2048, u'length': 819569,
|
||||
u'link': 'http://localhost:8002/file-variant-h', u'content_type': 'image/jpeg',
|
||||
u'md5': '--', u'file_path': 'c2a5c897769ce1ef0eb10f8fa1c472bcb8e2d5a4-h.jpg',
|
||||
u'size': 'h'},
|
||||
{u'format': 'jpg', u'height': 64, u'width': 64, u'length': 8195,
|
||||
u'link': 'http://localhost:8002/file-variant-t', u'content_type': 'image/jpeg',
|
||||
u'md5': '--', u'file_path': 'c2a5c897769ce1ef0eb10f8fa1c472bcb8e2d5a4-t.jpg',
|
||||
u'size': 't'},
|
||||
],
|
||||
u'filename': 'brick_dutch_soft_bump.png',
|
||||
u'project': EXAMPLE_PROJECT_ID,
|
||||
u'width': 2048, u'length': 6227670,
|
||||
u'user': ObjectId('56264fc4fa3a250344bd10c5'),
|
||||
u'content_type': 'image/png',
|
||||
u'_etag': '044ce3aede2e123e261c0d8bd77212f264d4f7b0',
|
||||
u'_created': datetime.datetime(2015, 12, 17, 16, 28, 49, tzinfo=tz_util.utc),
|
||||
u'md5': '',
|
||||
u'file_path': 'c2a5c897769ce1ef0eb10f8fa1c472bcb8e2d5a4.png',
|
||||
u'backend': 'pillar',
|
||||
u'link': 'http://localhost:8002/file',
|
||||
u'link_expires': datetime.datetime(2016, 3, 22, 9, 28, 22, tzinfo=tz_util.utc)}
|
||||
|
||||
EXAMPLE_PROJECT = {
|
||||
u'_created': datetime.datetime(2015, 12, 17, 13, 22, 56, tzinfo=tz_util.utc),
|
||||
u'_etag': u'cc4643e98d3606f87bbfaaa200bfbae941b642f3',
|
||||
u'_id': EXAMPLE_PROJECT_ID,
|
||||
u'_updated': datetime.datetime(2016, 1, 7, 18, 59, 4, tzinfo=tz_util.utc),
|
||||
u'category': u'assets',
|
||||
u'description': u'Welcome to this curated collection of Blender Institute textures and image '
|
||||
u'resources. This collection is an on-going project, as with each project we '
|
||||
u'create a number of textures based on our own resources (photographs, scans, '
|
||||
u'etc.) or made completely from scratch. At the moment you can find all the '
|
||||
u'textures from the past Open Projects that were deemed re-usable. \r\n\r\n'
|
||||
u'People who have contributed to these textures:\r\n\r\nAndrea Weikert, Andy '
|
||||
u'Goralczyk, Basse Salmela, Ben Dansie, Campbell Barton, Enrico Valenza, Ian '
|
||||
u'Hubert, Kjartan Tysdal, Manu J\xe4rvinen, Massimiliana Pulieso, Matt Ebb, '
|
||||
u'Pablo Vazquez, Rob Tuytel, Roland Hess, Sarah Feldlaufer, S\xf6nke M\xe4ter',
|
||||
u'is_private': False,
|
||||
u'name': u'Unittest project',
|
||||
u'node_types': [
|
||||
PILLAR_NAMED_NODE_TYPES['group_texture'],
|
||||
PILLAR_NAMED_NODE_TYPES['group'],
|
||||
PILLAR_NAMED_NODE_TYPES['asset'],
|
||||
PILLAR_NAMED_NODE_TYPES['storage'],
|
||||
PILLAR_NAMED_NODE_TYPES['comment'],
|
||||
PILLAR_NAMED_NODE_TYPES['blog'],
|
||||
PILLAR_NAMED_NODE_TYPES['post'],
|
||||
PILLAR_NAMED_NODE_TYPES['texture'],
|
||||
],
|
||||
u'nodes_blog': [],
|
||||
u'nodes_featured': [],
|
||||
u'nodes_latest': [],
|
||||
u'permissions': {u'groups': [{u'group': EXAMPLE_ADMIN_GROUP_ID,
|
||||
u'methods': [u'GET', u'POST', u'PUT', u'DELETE']}],
|
||||
u'users': [],
|
||||
u'world': [u'GET']},
|
||||
u'picture_header': ObjectId('5673f260c379cf0007b31bc4'),
|
||||
u'picture_square': ObjectId('5673f256c379cf0007b31bc3'),
|
||||
u'status': u'published',
|
||||
u'summary': u'Texture collection from all Blender Institute open projects.',
|
||||
u'url': u'textures',
|
||||
u'user': EXAMPLE_PROJECT_OWNER_ID}
|
||||
|
||||
EXAMPLE_NODE = {
|
||||
u'_id': ObjectId('572761099837730efe8e120d'),
|
||||
u'picture': ObjectId('572761f39837730efe8e1210'),
|
||||
u'description': u'',
|
||||
u'node_type': u'asset',
|
||||
u'user': ObjectId('57164ca1983773118cbaf779'),
|
||||
u'properties': {
|
||||
u'status': u'published',
|
||||
u'content_type': u'image',
|
||||
u'file': ObjectId('572761129837730efe8e120e')
|
||||
},
|
||||
u'_updated': datetime.datetime(2016, 5, 2, 14, 19, 58, 0, tzinfo=tz_util.utc),
|
||||
u'name': u'Image test',
|
||||
u'project': EXAMPLE_PROJECT_ID,
|
||||
u'_created': datetime.datetime(2016, 5, 2, 14, 19, 37, 0, tzinfo=tz_util.utc),
|
||||
u'_etag': u'6b8589b42c880e3626f43f3e82a5c5b946742687'
|
||||
}
|
||||
|
||||
BLENDER_ID_TEST_USERID = 1533
|
||||
EXAMPLE_USER = {'_id': EXAMPLE_PROJECT_OWNER_ID,
|
||||
'username': 'sybren+unittests@blender.studio',
|
||||
'groups': [],
|
||||
'auth': [{
|
||||
'provider': 'blender-id',
|
||||
'token': '',
|
||||
'user_id': str(BLENDER_ID_TEST_USERID),
|
||||
}],
|
||||
'full_name': 'sybren+unittest@blender.studio',
|
||||
'settings': {'email_communications': 1},
|
||||
'_updated': datetime.datetime(2016, 8, 5, 18, 19, 29),
|
||||
'_etag': '25a6a90781bf27333218fbbf33b3e8d53e37b1cb',
|
||||
'_created': datetime.datetime(2016, 8, 5, 18, 19, 29),
|
||||
'email': 'sybren+unittests@blender.studio'}
|
@@ -1,6 +1,6 @@
|
||||
from settings import *
|
||||
from pillar.api.eve_settings import *
|
||||
|
||||
from eve.tests.test_settings import MONGO_DBNAME
|
||||
MONGO_DBNAME = 'pillar_test'
|
||||
|
||||
|
||||
def override_eve():
|
||||
@@ -9,5 +9,6 @@ def override_eve():
|
||||
|
||||
test_settings.MONGO_HOST = MONGO_HOST
|
||||
test_settings.MONGO_PORT = MONGO_PORT
|
||||
test_settings.MONGO_DBNAME = MONGO_DBNAME
|
||||
tests.MONGO_HOST = MONGO_HOST
|
||||
tests.MONGO_PORT = MONGO_PORT
|
||||
tests.MONGO_DBNAME = MONGO_DBNAME
|
9
pillar/web/__init__.py
Normal file
9
pillar/web/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
def setup_app(app):
|
||||
from . import main, users, projects, nodes, notifications, redirects, subquery
|
||||
main.setup_app(app, url_prefix=None)
|
||||
users.setup_app(app, url_prefix=None)
|
||||
redirects.setup_app(app, url_prefix='/r')
|
||||
projects.setup_app(app, url_prefix='/p')
|
||||
nodes.setup_app(app, url_prefix='/nodes')
|
||||
notifications.setup_app(app, url_prefix='/notifications')
|
||||
subquery.setup_app(app)
|
152
pillar/web/jinja.py
Normal file
152
pillar/web/jinja.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""Our custom Jinja filters and other template stuff."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
import flask
|
||||
import jinja2.filters
|
||||
import jinja2.utils
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
import pillar.api.utils
|
||||
from pillar.web.utils import pretty_date
|
||||
from pillar.web.nodes.routes import url_for_node
|
||||
import pillar.markdown
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def format_pretty_date(d):
|
||||
return pretty_date(d)
|
||||
|
||||
|
||||
def format_pretty_date_time(d):
|
||||
return pretty_date(d, detail=True)
|
||||
|
||||
|
||||
def format_undertitle(s):
|
||||
"""Underscore-replacing title filter.
|
||||
|
||||
Replaces underscores with spaces, and then applies Jinja2's own title filter.
|
||||
"""
|
||||
|
||||
# Just keep empty strings and Nones as they are.
|
||||
if not s:
|
||||
return s
|
||||
|
||||
return jinja2.filters.do_title(s.replace('_', ' '))
|
||||
|
||||
|
||||
def do_hide_none(s):
|
||||
"""Returns the input, or an empty string if the input is None."""
|
||||
|
||||
if s is None:
|
||||
return ''
|
||||
return s
|
||||
|
||||
|
||||
# Source: Django, django/template/defaultfilters.py
|
||||
def do_pluralize(value, arg='s'):
|
||||
"""
|
||||
Returns a plural suffix if the value is not 1. By default, 's' is used as
|
||||
the suffix:
|
||||
|
||||
* If value is 0, vote{{ value|pluralize }} displays "0 votes".
|
||||
* If value is 1, vote{{ value|pluralize }} displays "1 vote".
|
||||
* If value is 2, vote{{ value|pluralize }} displays "2 votes".
|
||||
|
||||
If an argument is provided, that string is used instead:
|
||||
|
||||
* If value is 0, class{{ value|pluralize:"es" }} displays "0 classes".
|
||||
* If value is 1, class{{ value|pluralize:"es" }} displays "1 class".
|
||||
* If value is 2, class{{ value|pluralize:"es" }} displays "2 classes".
|
||||
|
||||
If the provided argument contains a comma, the text before the comma is
|
||||
used for the singular case and the text after the comma is used for the
|
||||
plural case:
|
||||
|
||||
* If value is 0, cand{{ value|pluralize:"y,ies" }} displays "0 candies".
|
||||
* If value is 1, cand{{ value|pluralize:"y,ies" }} displays "1 candy".
|
||||
* If value is 2, cand{{ value|pluralize:"y,ies" }} displays "2 candies".
|
||||
"""
|
||||
|
||||
if ',' not in arg:
|
||||
arg = ',' + arg
|
||||
bits = arg.split(',')
|
||||
if len(bits) > 2:
|
||||
return ''
|
||||
singular_suffix, plural_suffix = bits[:2]
|
||||
|
||||
try:
|
||||
if float(value) != 1:
|
||||
return plural_suffix
|
||||
except ValueError: # Invalid string that's not a number.
|
||||
pass
|
||||
except TypeError: # Value isn't a string or a number; maybe it's a list?
|
||||
try:
|
||||
if len(value) != 1:
|
||||
return plural_suffix
|
||||
except TypeError: # len() of unsized object.
|
||||
pass
|
||||
return singular_suffix
|
||||
|
||||
|
||||
def do_markdown(s):
|
||||
# FIXME: get rid of this filter altogether and cache HTML of comments.
|
||||
safe_html = pillar.markdown.markdown(s)
|
||||
return jinja2.utils.Markup(safe_html)
|
||||
|
||||
|
||||
def do_url_for_node(node_id=None, node=None):
|
||||
try:
|
||||
return url_for_node(node_id=node_id, node=node)
|
||||
except wz_exceptions.NotFound:
|
||||
log.info('%s: do_url_for_node(node_id=%r, ...) called for non-existing node.',
|
||||
flask.request.url, node_id)
|
||||
return None
|
||||
|
||||
|
||||
# Source: Django 1.9 defaultfilters.py
|
||||
def do_yesno(value, arg=None):
|
||||
"""
|
||||
Given a string mapping values for true, false and (optionally) None,
|
||||
returns one of those strings according to the value:
|
||||
|
||||
========== ====================== ==================================
|
||||
Value Argument Outputs
|
||||
========== ====================== ==================================
|
||||
``True`` ``"yeah,no,maybe"`` ``yeah``
|
||||
``False`` ``"yeah,no,maybe"`` ``no``
|
||||
``None`` ``"yeah,no,maybe"`` ``maybe``
|
||||
``None`` ``"yeah,no"`` ``"no"`` (converts None to False
|
||||
if no mapping for None is given.
|
||||
========== ====================== ==================================
|
||||
"""
|
||||
if arg is None:
|
||||
arg = 'yes,no,maybe'
|
||||
bits = arg.split(',')
|
||||
if len(bits) < 2:
|
||||
return value # Invalid arg.
|
||||
try:
|
||||
yes, no, maybe = bits
|
||||
except ValueError:
|
||||
# Unpack list of wrong size (no "maybe" value provided).
|
||||
yes, no, maybe = bits[0], bits[1], bits[1]
|
||||
if value is None:
|
||||
return maybe
|
||||
if value:
|
||||
return yes
|
||||
return no
|
||||
|
||||
|
||||
def setup_jinja_env(jinja_env):
|
||||
jinja_env.filters['pretty_date'] = format_pretty_date
|
||||
jinja_env.filters['pretty_date_time'] = format_pretty_date_time
|
||||
jinja_env.filters['undertitle'] = format_undertitle
|
||||
jinja_env.filters['hide_none'] = do_hide_none
|
||||
jinja_env.filters['pluralize'] = do_pluralize
|
||||
jinja_env.filters['gravatar'] = pillar.api.utils.gravatar
|
||||
jinja_env.filters['markdown'] = do_markdown
|
||||
jinja_env.filters['yesno'] = do_yesno
|
||||
jinja_env.globals['url_for_node'] = do_url_for_node
|
5
pillar/web/main/__init__.py
Normal file
5
pillar/web/main/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .routes import blueprint
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
320
pillar/web/main/routes.py
Normal file
320
pillar/web/main/routes.py
Normal file
@@ -0,0 +1,320 @@
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
from pillarsdk import Node
|
||||
from pillarsdk import Project
|
||||
from pillarsdk.exceptions import ResourceNotFound
|
||||
from flask import abort
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import render_template
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from werkzeug.contrib.atom import AtomFeed
|
||||
|
||||
from pillar.web.utils import system_util
|
||||
from pillar.web.nodes.routes import url_for_node
|
||||
from pillar.web.nodes.custom.posts import posts_view
|
||||
from pillar.web.nodes.custom.posts import posts_create
|
||||
from pillar.web.utils import attach_project_pictures
|
||||
from pillar.web.utils import current_user_is_authenticated
|
||||
from pillar.web.utils import get_file
|
||||
|
||||
blueprint = Blueprint('main', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@blueprint.route('/')
|
||||
def homepage():
|
||||
# Workaround to cache rendering of a page if user not logged in
|
||||
@current_app.cache.cached(timeout=3600)
|
||||
def render_page():
|
||||
return render_template('join.html')
|
||||
|
||||
if current_user.is_anonymous:
|
||||
return render_page()
|
||||
|
||||
# Get latest blog posts
|
||||
api = system_util.pillar_api()
|
||||
latest_posts = Node.all({
|
||||
'projection': {'name': 1, 'project': 1, 'node_type': 1,
|
||||
'picture': 1, 'properties.status': 1, 'properties.url': 1},
|
||||
'where': {'node_type': 'post', 'properties.status': 'published'},
|
||||
'embedded': {'project': 1},
|
||||
'sort': '-_created',
|
||||
'max_results': '5'
|
||||
}, api=api)
|
||||
|
||||
# Append picture Files to last_posts
|
||||
for post in latest_posts._items:
|
||||
post.picture = get_file(post.picture, api=api)
|
||||
|
||||
# Get latest assets added to any project
|
||||
latest_assets = Node.latest('assets', api=api)
|
||||
|
||||
# Append picture Files to latest_assets
|
||||
for asset in latest_assets._items:
|
||||
asset.picture = get_file(asset.picture, api=api)
|
||||
|
||||
# Get latest comments to any node
|
||||
latest_comments = Node.latest('comments', api=api)
|
||||
|
||||
# Get a list of random featured assets
|
||||
random_featured = get_random_featured_nodes()
|
||||
|
||||
# Parse results for replies
|
||||
to_remove = []
|
||||
for idx, comment in enumerate(latest_comments._items):
|
||||
if comment.properties.is_reply:
|
||||
try:
|
||||
comment.attached_to = Node.find(comment.parent.parent,
|
||||
{'projection': {
|
||||
'_id': 1,
|
||||
'name': 1,
|
||||
}},
|
||||
api=api)
|
||||
except ResourceNotFound:
|
||||
# Remove this comment
|
||||
to_remove.append(idx)
|
||||
else:
|
||||
comment.attached_to = comment.parent
|
||||
|
||||
for idx in reversed(to_remove):
|
||||
del latest_comments._items[idx]
|
||||
|
||||
main_project = Project.find(current_app.config['MAIN_PROJECT_ID'], api=api)
|
||||
main_project.picture_header = get_file(main_project.picture_header, api=api)
|
||||
|
||||
# Merge latest assets and comments into one activity stream.
|
||||
def sort_key(item):
|
||||
return item._created
|
||||
|
||||
activities = itertools.chain(latest_assets._items,
|
||||
latest_comments._items)
|
||||
activity_stream = sorted(activities, key=sort_key, reverse=True)
|
||||
|
||||
return render_template(
|
||||
'homepage.html',
|
||||
main_project=main_project,
|
||||
latest_posts=latest_posts._items,
|
||||
activity_stream=activity_stream,
|
||||
random_featured=random_featured,
|
||||
api=api)
|
||||
|
||||
|
||||
# @blueprint.errorhandler(500)
|
||||
# def error_500(e):
|
||||
# return render_template('errors/500.html'), 500
|
||||
#
|
||||
#
|
||||
# @blueprint.errorhandler(404)
|
||||
# def error_404(e):
|
||||
# return render_template('errors/404.html'), 404
|
||||
#
|
||||
#
|
||||
# @blueprint.errorhandler(403)
|
||||
# def error_404(e):
|
||||
# return render_template('errors/403_embed.html'), 403
|
||||
#
|
||||
|
||||
@blueprint.route('/join')
|
||||
def join():
|
||||
"""Join page"""
|
||||
return redirect('https://store.blender.org/product/membership/')
|
||||
|
||||
|
||||
@blueprint.route('/services')
|
||||
def services():
|
||||
"""Services page"""
|
||||
return render_template('services.html')
|
||||
|
||||
|
||||
@blueprint.route('/blog/')
|
||||
@blueprint.route('/blog/<url>')
|
||||
def main_blog(url=None):
|
||||
"""Blog with project news"""
|
||||
project_id = current_app.config['MAIN_PROJECT_ID']
|
||||
return posts_view(project_id, url=url)
|
||||
|
||||
|
||||
@blueprint.route('/blog/create')
|
||||
def main_posts_create():
|
||||
project_id = current_app.config['MAIN_PROJECT_ID']
|
||||
return posts_create(project_id)
|
||||
|
||||
|
||||
@blueprint.route('/p/<project_url>/blog/')
|
||||
@blueprint.route('/p/<project_url>/blog/<url>')
|
||||
def project_blog(project_url, url=None):
|
||||
"""View project blog"""
|
||||
return posts_view(project_url=project_url, url=url)
|
||||
|
||||
|
||||
def get_projects(category):
|
||||
"""Utility to get projects based on category. Should be moved on the API
|
||||
and improved with more extensive filtering capabilities.
|
||||
"""
|
||||
api = system_util.pillar_api()
|
||||
projects = Project.all({
|
||||
'where': {
|
||||
'category': category,
|
||||
'is_private': False},
|
||||
'sort': '-_created',
|
||||
}, api=api)
|
||||
for project in projects._items:
|
||||
attach_project_pictures(project, api)
|
||||
return projects
|
||||
|
||||
|
||||
def get_random_featured_nodes():
|
||||
|
||||
import random
|
||||
|
||||
api = system_util.pillar_api()
|
||||
projects = Project.all({
|
||||
'projection': {'nodes_featured': 1},
|
||||
'where': {'is_private': False},
|
||||
'max_results': '15'
|
||||
}, api=api)
|
||||
|
||||
featured_nodes = (p.nodes_featured for p in projects._items if p.nodes_featured)
|
||||
featured_nodes = [item for sublist in featured_nodes for item in sublist]
|
||||
if len(featured_nodes) > 3:
|
||||
featured_nodes = random.sample(featured_nodes, 3)
|
||||
|
||||
featured_node_documents = []
|
||||
|
||||
for node in featured_nodes:
|
||||
node_document = Node.find(node, {
|
||||
'projection': {'name': 1, 'project': 1, 'picture': 1,
|
||||
'properties.content_type': 1, 'properties.url': 1},
|
||||
'embedded': {'project': 1}
|
||||
}, api=api)
|
||||
|
||||
node_document.picture = get_file(node_document.picture, api=api)
|
||||
featured_node_documents.append(node_document)
|
||||
|
||||
return featured_node_documents
|
||||
|
||||
|
||||
@blueprint.route('/open-projects')
|
||||
def open_projects():
|
||||
@current_app.cache.cached(timeout=3600, unless=current_user_is_authenticated)
|
||||
def render_page():
|
||||
projects = get_projects('film')
|
||||
return render_template(
|
||||
'projects/index_collection.html',
|
||||
title='open-projects',
|
||||
projects=projects._items,
|
||||
api=system_util.pillar_api())
|
||||
|
||||
return render_page()
|
||||
|
||||
|
||||
@blueprint.route('/training')
|
||||
def training():
|
||||
@current_app.cache.cached(timeout=3600, unless=current_user_is_authenticated)
|
||||
def render_page():
|
||||
projects = get_projects('training')
|
||||
return render_template(
|
||||
'projects/index_collection.html',
|
||||
title='training',
|
||||
projects=projects._items,
|
||||
api=system_util.pillar_api())
|
||||
|
||||
return render_page()
|
||||
|
||||
|
||||
@blueprint.route('/gallery')
|
||||
def gallery():
|
||||
return redirect('/p/gallery')
|
||||
|
||||
|
||||
@blueprint.route('/textures')
|
||||
def redir_textures():
|
||||
return redirect('/p/textures')
|
||||
|
||||
|
||||
@blueprint.route('/hdri')
|
||||
def redir_hdri():
|
||||
return redirect('/p/hdri')
|
||||
|
||||
|
||||
@blueprint.route('/caminandes')
|
||||
def caminandes():
|
||||
return redirect('/p/caminandes-3')
|
||||
|
||||
|
||||
@blueprint.route('/cf2')
|
||||
def cf2():
|
||||
return redirect('/p/creature-factory-2')
|
||||
|
||||
|
||||
@blueprint.route('/characters')
|
||||
def redir_characters():
|
||||
return redirect('/p/characters')
|
||||
|
||||
|
||||
@blueprint.route('/vrview')
|
||||
def vrview():
|
||||
"""Call this from iframes to render sperical content (video and images)"""
|
||||
if 'image' not in request.args:
|
||||
return redirect('/')
|
||||
return render_template('vrview.html')
|
||||
|
||||
|
||||
@blueprint.route('/403')
|
||||
def error_403():
|
||||
"""Custom entry point to display the not allowed template"""
|
||||
return render_template('errors/403_embed.html')
|
||||
|
||||
|
||||
@blueprint.route('/join-agent')
|
||||
def join_agent():
|
||||
"""Custom page to support Agent 327 barbershop campaign"""
|
||||
return render_template('join_agent.html')
|
||||
|
||||
|
||||
# Shameful redirects
|
||||
@blueprint.route('/p/blender-cloud/')
|
||||
def redirect_cloud_blog():
|
||||
return redirect('/blog')
|
||||
|
||||
|
||||
@blueprint.route('/feeds/blogs.atom')
|
||||
def feeds_blogs():
|
||||
"""Global feed generator for latest blogposts across all projects"""
|
||||
@current_app.cache.cached(60*5)
|
||||
def render_page():
|
||||
feed = AtomFeed('Blender Cloud - Latest updates',
|
||||
feed_url=request.url, url=request.url_root)
|
||||
# Get latest blog posts
|
||||
api = system_util.pillar_api()
|
||||
latest_posts = Node.all({
|
||||
'where': {'node_type': 'post', 'properties.status': 'published'},
|
||||
'embedded': {'user': 1},
|
||||
'sort': '-_created',
|
||||
'max_results': '15'
|
||||
}, api=api)
|
||||
|
||||
# Populate the feed
|
||||
for post in latest_posts._items:
|
||||
author = post.user.fullname
|
||||
updated = post._updated if post._updated else post._created
|
||||
url = url_for_node(node=post)
|
||||
content = post.properties.content[:500]
|
||||
content = u'<p>{0}... <a href="{1}">Read more</a></p>'.format(content, url)
|
||||
feed.add(post.name, unicode(content),
|
||||
content_type='html',
|
||||
author=author,
|
||||
url=url,
|
||||
updated=updated,
|
||||
published=post._created)
|
||||
return feed.get_response()
|
||||
return render_page()
|
||||
|
||||
|
||||
@blueprint.route('/search')
|
||||
def nodes_search_index():
|
||||
return render_template('nodes/search.html')
|
8
pillar/web/nodes/__init__.py
Normal file
8
pillar/web/nodes/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from .routes import blueprint
|
||||
|
||||
|
||||
def setup_app(app, url_prefix=None):
|
||||
from . import custom
|
||||
|
||||
custom.setup_app(app)
|
||||
app.register_blueprint(blueprint, url_prefix=url_prefix)
|
161
pillar/web/nodes/attachments.py
Normal file
161
pillar/web/nodes/attachments.py
Normal file
@@ -0,0 +1,161 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
from bson import ObjectId
|
||||
import flask
|
||||
import pillarsdk
|
||||
import wtforms
|
||||
|
||||
from pillar.api.node_types import ATTACHMENT_SLUG_REGEX
|
||||
from pillar.web.utils import system_util
|
||||
from pillar.web.utils.forms import build_file_select_form, CustomFormField
|
||||
|
||||
shortcode_re = re.compile(r'@\[(%s)\]' % ATTACHMENT_SLUG_REGEX)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def render_attachments(node, field_value):
|
||||
"""Renders attachments referenced in the field value.
|
||||
|
||||
Returns the rendered field.
|
||||
"""
|
||||
|
||||
# TODO: cache this based on the node's etag and attachment links expiry.
|
||||
|
||||
node_attachments = node.properties.attachments or {}
|
||||
if isinstance(node_attachments, list):
|
||||
log.warning('Old-style attachments property found on node %s. Ignoring them, '
|
||||
'will result in attachments not being found.', node[u'_id'])
|
||||
return field_value
|
||||
|
||||
if not node_attachments:
|
||||
return field_value
|
||||
|
||||
def replace(match):
|
||||
slug = match.group(1)
|
||||
|
||||
try:
|
||||
att = node_attachments[slug]
|
||||
except KeyError:
|
||||
return u'[attachment "%s" not found]' % slug
|
||||
return render_attachment(att)
|
||||
|
||||
return shortcode_re.sub(replace, field_value)
|
||||
|
||||
|
||||
def render_attachment(attachment):
|
||||
"""Renders an attachment as HTML"""
|
||||
|
||||
oid = ObjectId(attachment[u'oid'])
|
||||
collection = attachment.collection or u'files'
|
||||
|
||||
renderers = {
|
||||
'files': render_attachment_file
|
||||
}
|
||||
|
||||
try:
|
||||
renderer = renderers[collection]
|
||||
except KeyError:
|
||||
log.error(u'Unable to render attachment from collection %s', collection)
|
||||
return u'Unable to render attachment'
|
||||
|
||||
return renderer(attachment)
|
||||
|
||||
|
||||
def render_attachment_file(attachment):
|
||||
"""Renders a file attachment."""
|
||||
|
||||
api = system_util.pillar_api()
|
||||
sdk_file = pillarsdk.File.find(attachment[u'oid'], api=api)
|
||||
|
||||
file_renderers = {
|
||||
'image': render_attachment_file_image
|
||||
}
|
||||
|
||||
mime_type_cat, _ = sdk_file.content_type.split('/', 1)
|
||||
try:
|
||||
renderer = file_renderers[mime_type_cat]
|
||||
except KeyError:
|
||||
return flask.render_template('nodes/attachments/file_generic.html', file=sdk_file)
|
||||
|
||||
return renderer(sdk_file, attachment)
|
||||
|
||||
|
||||
def render_attachment_file_image(sdk_file, attachment):
|
||||
"""Renders an image file."""
|
||||
|
||||
variations = {var.size: var for var in sdk_file.variations}
|
||||
return flask.render_template('nodes/attachments/file_image.html',
|
||||
file=sdk_file, vars=variations, attachment=attachment)
|
||||
|
||||
|
||||
def attachment_form_group_create(schema_prop):
|
||||
"""Creates a wtforms.FieldList for attachments."""
|
||||
|
||||
file_select_form_group = _attachment_build_single_field(schema_prop)
|
||||
field = wtforms.FieldList(CustomFormField(file_select_form_group), min_entries=1)
|
||||
|
||||
return field
|
||||
|
||||
|
||||
def _attachment_build_single_field(schema_prop):
|
||||
# Ugly hard-coded schema.
|
||||
fake_schema = {
|
||||
'slug': schema_prop['propertyschema'],
|
||||
'oid': schema_prop['valueschema']['schema']['oid'],
|
||||
'link': schema_prop['valueschema']['schema']['link'],
|
||||
'link_custom': schema_prop['valueschema']['schema']['link_custom'],
|
||||
}
|
||||
file_select_form_group = build_file_select_form(fake_schema)
|
||||
return file_select_form_group
|
||||
|
||||
|
||||
def attachment_form_group_set_data(db_prop_value, schema_prop, field_list):
|
||||
"""Populates the attachment form group with data from MongoDB."""
|
||||
|
||||
assert isinstance(db_prop_value, dict)
|
||||
|
||||
# Extra entries are caused by min_entries=1 in the form creation.
|
||||
while len(field_list):
|
||||
field_list.pop_entry()
|
||||
|
||||
for slug, att_data in sorted(db_prop_value.iteritems()):
|
||||
file_select_form_group = _attachment_build_single_field(schema_prop)
|
||||
subform = file_select_form_group()
|
||||
|
||||
# Even uglier hard-coded
|
||||
subform.slug = slug
|
||||
subform.oid = att_data['oid']
|
||||
subform.link = 'self'
|
||||
subform.link_custom = None
|
||||
if 'link' in att_data:
|
||||
subform.link = att_data['link']
|
||||
if 'link_custom' in att_data:
|
||||
subform.link_custom = att_data['link_custom']
|
||||
field_list.append_entry(subform)
|
||||
|
||||
|
||||
def attachment_form_parse_post_data(data):
|
||||
"""Returns a dict that can be stored in the node.properties.attachments."""
|
||||
|
||||
attachments = {}
|
||||
|
||||
# 'allprops' contains all properties, including the slug (which should be a key).
|
||||
for allprops in data:
|
||||
oid = allprops['oid']
|
||||
slug = allprops['slug']
|
||||
link = allprops['link']
|
||||
link_custom = allprops['link_custom']
|
||||
|
||||
if not allprops['slug'] and not oid:
|
||||
continue
|
||||
|
||||
if slug in attachments:
|
||||
raise ValueError('Slug "%s" is used more than once' % slug)
|
||||
attachments[slug] = {'oid': oid}
|
||||
attachments[slug]['link'] = link
|
||||
|
||||
if link == 'custom':
|
||||
attachments[slug]['link_custom'] = link_custom
|
||||
|
||||
return attachments
|
8
pillar/web/nodes/custom/__init__.py
Normal file
8
pillar/web/nodes/custom/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
def append_custom_node_endpoints():
|
||||
pass
|
||||
|
||||
|
||||
def setup_app(app):
|
||||
from . import posts
|
||||
|
||||
posts.setup_app(app)
|
262
pillar/web/nodes/custom/comments.py
Normal file
262
pillar/web/nodes/custom/comments.py
Normal file
@@ -0,0 +1,262 @@
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from flask import jsonify
|
||||
from flask import render_template
|
||||
from flask_login import login_required, current_user
|
||||
from pillarsdk import Node
|
||||
from pillarsdk import Project
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
from pillar.web import subquery
|
||||
from pillar.web.nodes.routes import blueprint
|
||||
from pillar.web.utils import gravatar
|
||||
from pillar.web.utils import pretty_date, datetime_now
|
||||
from pillar.web.utils import system_util
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@blueprint.route('/comments/create', methods=['POST'])
|
||||
@login_required
|
||||
def comments_create():
|
||||
content = request.form['content']
|
||||
parent_id = request.form.get('parent_id')
|
||||
|
||||
if not parent_id:
|
||||
log.warning('User %s tried to create comment without parent_id', current_user.objectid)
|
||||
raise wz_exceptions.UnprocessableEntity()
|
||||
|
||||
api = system_util.pillar_api()
|
||||
parent_node = Node.find(parent_id, api=api)
|
||||
if not parent_node:
|
||||
log.warning('Unable to create comment for user %s, parent node %r not found',
|
||||
current_user.objectid, parent_id)
|
||||
raise wz_exceptions.UnprocessableEntity()
|
||||
|
||||
log.info('Creating comment for user %s on parent node %r',
|
||||
current_user.objectid, parent_id)
|
||||
|
||||
comment_props = dict(
|
||||
project=parent_node.project,
|
||||
name='Comment',
|
||||
user=current_user.objectid,
|
||||
node_type='comment',
|
||||
properties=dict(
|
||||
content=content,
|
||||
status='published',
|
||||
confidence=0,
|
||||
rating_positive=0,
|
||||
rating_negative=0))
|
||||
|
||||
if parent_id:
|
||||
comment_props['parent'] = parent_id
|
||||
|
||||
# Get the parent node and check if it's a comment. In which case we flag
|
||||
# the current comment as a reply.
|
||||
parent_node = Node.find(parent_id, api=api)
|
||||
if parent_node.node_type == 'comment':
|
||||
comment_props['properties']['is_reply'] = True
|
||||
|
||||
comment = Node(comment_props)
|
||||
comment.create(api=api)
|
||||
|
||||
return jsonify({'node_id': comment._id}), 201
|
||||
|
||||
|
||||
@blueprint.route('/comments/<string(length=24):comment_id>', methods=['POST'])
|
||||
@login_required
|
||||
def comment_edit(comment_id):
|
||||
"""Allows a user to edit their comment."""
|
||||
|
||||
api = system_util.pillar_api()
|
||||
|
||||
comment = Node({'_id': comment_id})
|
||||
result = comment.patch({'op': 'edit', 'content': request.form['content']}, api=api)
|
||||
assert result['_status'] == 'OK'
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'content_html': result.properties.content_html,
|
||||
}})
|
||||
|
||||
|
||||
def format_comment(comment, is_reply=False, is_team=False, replies=None):
|
||||
"""Format a comment node into a simpler dictionary.
|
||||
|
||||
:param comment: the comment object
|
||||
:param is_reply: True if the comment is a reply to another comment
|
||||
:param is_team: True if the author belongs to the group that owns the node
|
||||
:param replies: list of replies (formatted with this function)
|
||||
"""
|
||||
try:
|
||||
is_own = (current_user.objectid == comment.user._id) \
|
||||
if current_user.is_authenticated else False
|
||||
except AttributeError:
|
||||
current_app.bugsnag.notify(Exception(
|
||||
'Missing user for embedded user ObjectId'),
|
||||
meta_data={'nodes_info': {'node_id': comment['_id']}})
|
||||
return
|
||||
is_rated = False
|
||||
is_rated_positive = None
|
||||
if comment.properties.ratings:
|
||||
for rating in comment.properties.ratings:
|
||||
if current_user.is_authenticated and rating.user == current_user.objectid:
|
||||
is_rated = True
|
||||
is_rated_positive = rating.is_positive
|
||||
break
|
||||
|
||||
return dict(_id=comment._id,
|
||||
gravatar=gravatar(comment.user.email, size=32),
|
||||
time_published=pretty_date(comment._created or datetime_now(), detail=True),
|
||||
rating=comment.properties.rating_positive - comment.properties.rating_negative,
|
||||
author=comment.user.full_name,
|
||||
author_username=comment.user.username,
|
||||
content=comment.properties.content,
|
||||
is_reply=is_reply,
|
||||
is_own=is_own,
|
||||
is_rated=is_rated,
|
||||
is_rated_positive=is_rated_positive,
|
||||
is_team=is_team,
|
||||
replies=replies)
|
||||
|
||||
|
||||
@blueprint.route("/comments/")
|
||||
def comments_index():
|
||||
warnings.warn('comments_index() is deprecated in favour of comments_for_node()')
|
||||
|
||||
parent_id = request.args.get('parent_id')
|
||||
# Get data only if we format it
|
||||
api = system_util.pillar_api()
|
||||
if request.args.get('format') == 'json':
|
||||
nodes = Node.all({
|
||||
'where': '{"node_type" : "comment", "parent": "%s"}' % (parent_id),
|
||||
'embedded': '{"user":1}'}, api=api)
|
||||
|
||||
comments = []
|
||||
for comment in nodes._items:
|
||||
# Query for first level children (comment replies)
|
||||
replies = Node.all({
|
||||
'where': '{"node_type" : "comment", "parent": "%s"}' % (comment._id),
|
||||
'embedded': '{"user":1}'}, api=api)
|
||||
replies = replies._items if replies._items else None
|
||||
if replies:
|
||||
replies = [format_comment(reply, is_reply=True) for reply in replies]
|
||||
|
||||
comments.append(
|
||||
format_comment(comment, is_reply=False, replies=replies))
|
||||
|
||||
return_content = jsonify(items=[c for c in comments if c is not None])
|
||||
else:
|
||||
parent_node = Node.find(parent_id, api=api)
|
||||
project = Project({'_id': parent_node.project})
|
||||
has_method_POST = project.node_type_has_method('comment', 'POST', api=api)
|
||||
# Data will be requested via javascript
|
||||
return_content = render_template('nodes/custom/_comments.html',
|
||||
parent_id=parent_id,
|
||||
has_method_POST=has_method_POST)
|
||||
return return_content
|
||||
|
||||
|
||||
@blueprint.route('/<string(length=24):node_id>/comments')
|
||||
def comments_for_node(node_id):
|
||||
"""Shows the comments attached to the given node."""
|
||||
|
||||
api = system_util.pillar_api()
|
||||
|
||||
node = Node.find(node_id, api=api)
|
||||
project = Project({'_id': node.project})
|
||||
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
|
||||
can_comment_override = request.args.get('can_comment', 'True') == 'True'
|
||||
can_post_comments = can_post_comments and can_comment_override
|
||||
|
||||
# Query for all children, i.e. comments on the node.
|
||||
comments = Node.all({
|
||||
'where': {'node_type': 'comment', 'parent': node_id},
|
||||
}, api=api)
|
||||
|
||||
def enrich(some_comment):
|
||||
some_comment['_user'] = subquery.get_user_info(some_comment['user'])
|
||||
some_comment['_is_own'] = some_comment['user'] == current_user.objectid
|
||||
some_comment['_current_user_rating'] = None # tri-state boolean
|
||||
some_comment['_rating'] = some_comment.properties.rating_positive - some_comment.properties.rating_negative
|
||||
|
||||
if current_user.is_authenticated:
|
||||
for rating in some_comment.properties.ratings or ():
|
||||
if rating.user != current_user.objectid:
|
||||
continue
|
||||
|
||||
some_comment['_current_user_rating'] = rating.is_positive
|
||||
|
||||
for comment in comments['_items']:
|
||||
# Query for all grandchildren, i.e. replies to comments on the node.
|
||||
comment['_replies'] = Node.all({
|
||||
'where': {'node_type': 'comment', 'parent': comment['_id']},
|
||||
}, api=api)
|
||||
|
||||
enrich(comment)
|
||||
for reply in comment['_replies']['_items']:
|
||||
enrich(reply)
|
||||
|
||||
nr_of_comments = sum(1 + comment['_replies']['_meta']['total']
|
||||
for comment in comments['_items'])
|
||||
|
||||
return render_template('nodes/custom/comment/list_embed.html',
|
||||
node_id=node_id,
|
||||
comments=comments,
|
||||
nr_of_comments=nr_of_comments,
|
||||
show_comments=True,
|
||||
can_post_comments=can_post_comments)
|
||||
|
||||
|
||||
@blueprint.route('/<string(length=24):node_id>/commentform')
|
||||
def commentform_for_node(node_id):
|
||||
"""Shows only the comment for for comments attached to the given node.
|
||||
|
||||
i.e. does not show the comments themselves, just the form to post a new comment.
|
||||
"""
|
||||
|
||||
api = system_util.pillar_api()
|
||||
|
||||
node = Node.find(node_id, api=api)
|
||||
project = Project({'_id': node.project})
|
||||
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
|
||||
|
||||
return render_template('nodes/custom/comment/list_embed.html',
|
||||
node_id=node_id,
|
||||
show_comments=False,
|
||||
can_post_comments=can_post_comments)
|
||||
|
||||
|
||||
@blueprint.route("/comments/<comment_id>/rate/<operation>", methods=['POST'])
|
||||
@login_required
|
||||
def comments_rate(comment_id, operation):
|
||||
"""Comment rating function
|
||||
|
||||
:param comment_id: the comment id
|
||||
:type comment_id: str
|
||||
:param rating: the rating (is cast from 0 to False and from 1 to True)
|
||||
:type rating: int
|
||||
|
||||
"""
|
||||
|
||||
if operation not in {u'revoke', u'upvote', u'downvote'}:
|
||||
raise wz_exceptions.BadRequest('Invalid operation')
|
||||
|
||||
api = system_util.pillar_api()
|
||||
|
||||
# PATCH the node and return the result.
|
||||
comment = Node({'_id': comment_id})
|
||||
result = comment.patch({'op': operation}, api=api)
|
||||
assert result['_status'] == 'OK'
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'op': operation,
|
||||
'rating_positive': result.properties.rating_positive,
|
||||
'rating_negative': result.properties.rating_negative,
|
||||
}})
|
35
pillar/web/nodes/custom/groups.py
Normal file
35
pillar/web/nodes/custom/groups.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from flask import request
|
||||
from flask import jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from pillarsdk import Node
|
||||
from pillar.web.utils import system_util
|
||||
from ..routes import blueprint
|
||||
|
||||
|
||||
@blueprint.route('/groups/create', methods=['POST'])
|
||||
@login_required
|
||||
def groups_create():
|
||||
# Use current_project_id from the session instead of the cookie
|
||||
name = request.form['name']
|
||||
project_id = request.form['project_id']
|
||||
parent_id = request.form.get('parent_id')
|
||||
|
||||
api = system_util.pillar_api()
|
||||
# We will create the Node object later on, after creating the file object
|
||||
node_asset_props = dict(
|
||||
name=name,
|
||||
user=current_user.objectid,
|
||||
node_type='group',
|
||||
project=project_id,
|
||||
properties=dict(
|
||||
status='published'))
|
||||
# Add parent_id only if provided (we do not provide it when creating groups
|
||||
# at the Project root)
|
||||
if parent_id:
|
||||
node_asset_props['parent'] = parent_id
|
||||
|
||||
node_asset = Node(node_asset_props)
|
||||
node_asset.create(api=api)
|
||||
return jsonify(
|
||||
status='success',
|
||||
data=dict(name=name, asset_id=node_asset._id))
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user