Introducing Pillar Framework

Refactor of pillar-server and pillar-web into a single python package. This
simplifies the overall architecture of pillar applications.

Special thanks @sybren and @venomgfx
This commit is contained in:
Francesco Siddi 2016-08-19 09:19:06 +02:00
parent a5e92e1d87
commit 2c5dc34ea2
232 changed files with 79508 additions and 2232 deletions

18
.gitignore vendored
View File

@ -6,14 +6,24 @@
*.ropeproject*
*.swp
/pillar/config_local.py
config_local.py
.ropeproject/*
/pillar/application/static/storage/
/build
/.cache
/pillar/pillar.egg-info/
/pillar/google_app.json
/*.egg-info/
profile.stats
/dump/
/.eggs
/node_modules
/.sass-cache
*.css.map
*.js.map
pillar/web/static/assets/css/*.css
pillar/web/static/assets/js/*.min.js
pillar/web/static/storage/
pillar/web/static/uploads/
pillar/web/templates/

View File

@ -1,57 +0,0 @@
#!/bin/bash -e
# Deploys the current production branch to the production machine.
PROJECT_NAME="pillar"
DOCKER_NAME="pillar"
REMOTE_ROOT="/data/git/${PROJECT_NAME}"
SSH="ssh -o ClearAllForwardings=yes cloud.blender.org"
ROOT="$(dirname "$(readlink -f "$0")")"
cd ${ROOT}
# Check that we're on production branch.
if [ $(git rev-parse --abbrev-ref HEAD) != "production" ]; then
echo "You are NOT on the production branch, refusing to deploy." >&2
exit 1
fi
# Check that production branch has been pushed.
if [ -n "$(git log origin/production..production --oneline)" ]; then
echo "WARNING: not all changes to the production branch have been pushed."
echo "Press [ENTER] to continue deploying current origin/production, CTRL+C to abort."
read dummy
fi
# SSH to cloud to pull all files in
echo "==================================================================="
echo "UPDATING FILES ON ${PROJECT_NAME}"
${SSH} git -C ${REMOTE_ROOT} fetch origin production
${SSH} git -C ${REMOTE_ROOT} log origin/production..production --oneline
${SSH} git -C ${REMOTE_ROOT} merge --ff-only origin/production
# Update the virtualenv
${SSH} -t docker exec ${DOCKER_NAME} /data/venv/bin/pip install -U -r ${REMOTE_ROOT}/requirements.txt --exists-action w
# Notify Bugsnag of this new deploy.
echo
echo "==================================================================="
GIT_REVISION=$(${SSH} git -C ${REMOTE_ROOT} describe --always)
echo "Notifying Bugsnag of this new deploy of revision ${GIT_REVISION}."
BUGSNAG_API_KEY=$(${SSH} python -c "\"import sys; sys.path.append('${REMOTE_ROOT}/${PROJECT_NAME}'); import config_local; print(config_local.BUGSNAG_API_KEY)\"")
curl --data "apiKey=${BUGSNAG_API_KEY}&revision=${GIT_REVISION}" https://notify.bugsnag.com/deploy
echo
# Wait for [ENTER] to restart the server
echo
echo "==================================================================="
echo "NOTE: If you want to edit config_local.py on the server, do so now."
echo "NOTE: Press [ENTER] to continue and restart the server process."
read dummy
${SSH} docker exec ${DOCKER_NAME} kill -HUP 1
echo "Server process restarted"
echo
echo "==================================================================="
echo "Deploy of ${PROJECT_NAME} is done."
echo "==================================================================="

View File

@ -1,17 +0,0 @@
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
echo $DIR
if [[ $1 == 'pro' || $1 == 'dev' ]]; then
# Copy requirements.txt into pro folder
cp ../requirements.txt $1/requirements.txt
# Build image
docker build -t armadillica/pillar_$1 $1
# Remove requirements.txt
rm $1/requirements.txt
else
echo "POS. Your options are 'pro' or 'dev'"
fi

View File

@ -1,48 +0,0 @@
FROM ubuntu:14.04
MAINTAINER Francesco Siddi <francesco@blender.org>
RUN apt-get update && apt-get install -y \
python \
python-dev \
python-pip \
vim \
nano \
zlib1g-dev \
libjpeg-dev \
python-crypto \
python-openssl \
libssl-dev \
libffi-dev \
software-properties-common \
git
RUN add-apt-repository ppa:mc3man/trusty-media \
&& apt-get update && apt-get install -y \
ffmpeg
RUN mkdir -p /data/git/pillar \
&& mkdir -p /data/storage/shared \
&& mkdir -p /data/storage/pillar \
&& mkdir -p /data/config \
&& mkdir -p /data/storage/logs
RUN pip install virtualenv \
&& virtualenv /data/venv
ENV PIP_PACKAGES_VERSION = 2
ADD requirements.txt /requirements.txt
RUN . /data/venv/bin/activate && pip install -r /requirements.txt
VOLUME /data/git/pillar
VOLUME /data/config
VOLUME /data/storage/shared
VOLUME /data/storage/pillar
ENV MONGO_HOST mongo_pillar
EXPOSE 5000
ADD runserver.sh /runserver.sh
ENTRYPOINT ["bash", "/runserver.sh"]

View File

@ -1,3 +0,0 @@
#!/bin/bash
. /data/venv/bin/activate && python /data/git/pillar/pillar/manage.py runserver

View File

@ -1,47 +0,0 @@
<VirtualHost *:80>
# The ServerName directive sets the request scheme, hostname and port that
# the server uses to identify itself. This is used when creating
# redirection URLs. In the context of virtual hosts, the ServerName
# specifies what hostname must appear in the request's Host: header to
# match this virtual host. For the default virtual host (this file) this
# value is not decisive as it is used as a last resort host regardless.
# However, you must set it for any further virtual host explicitly.
#ServerName 127.0.0.1
# EnableSendfile on
XSendFile on
XSendFilePath /data/storage/pillar
ServerAdmin webmaster@localhost
DocumentRoot /var/www/html
# Available loglevels: trace8, ..., trace1, debug, info, notice, warn,
# error, crit, alert, emerg.
# It is also possible to configure the loglevel for particular
# modules, e.g.
#LogLevel info ssl:warn
ErrorLog ${APACHE_LOG_DIR}/error.log
CustomLog ${APACHE_LOG_DIR}/access.log combined
# For most configuration files from conf-available/, which are
# enabled or disabled at a global level, it is possible to
# include a line for only one particular virtual host. For example the
# following line enables the CGI configuration for this host only
# after it has been globally disabled with "a2disconf".
#Include conf-available/serve-cgi-bin.conf
WSGIDaemonProcess pillar
WSGIPassAuthorization On
WSGIScriptAlias / /data/git/pillar/pillar/runserver.wsgi \
process-group=pillar application-group=%{GLOBAL}
<Directory /data/git/pillar/pillar>
<Files runserver.wsgi>
Require all granted
</Files>
</Directory>
</VirtualHost>
# vim: syntax=apache ts=4 sw=4 sts=4 sr noet

View File

@ -1,61 +0,0 @@
FROM ubuntu:14.04
MAINTAINER Francesco Siddi <francesco@blender.org>
RUN apt-get update && apt-get install -y \
python \
python-dev \
python-pip \
vim \
nano \
zlib1g-dev \
libjpeg-dev \
python-crypto \
python-openssl \
libssl-dev \
libffi-dev \
software-properties-common \
apache2-mpm-event \
libapache2-mod-wsgi \
libapache2-mod-xsendfile \
git
RUN add-apt-repository ppa:mc3man/trusty-media \
&& apt-get update && apt-get install -y \
ffmpeg
RUN mkdir -p /data/git/pillar \
&& mkdir -p /data/storage/shared \
&& mkdir -p /data/storage/pillar \
&& mkdir -p /data/config \
&& mkdir -p /data/storage/logs
ENV APACHE_RUN_USER www-data
ENV APACHE_RUN_GROUP www-data
ENV APACHE_LOG_DIR /var/log/apache2
ENV APACHE_PID_FILE /var/run/apache2.pid
ENV APACHE_RUN_DIR /var/run/apache2
ENV APACHE_LOCK_DIR /var/lock/apache2
RUN mkdir -p $APACHE_RUN_DIR $APACHE_LOCK_DIR $APACHE_LOG_DIR
RUN pip install virtualenv \
&& virtualenv /data/venv
ENV PIP_PACKAGES_VERSION = 2
ADD requirements.txt /requirements.txt
RUN . /data/venv/bin/activate \
&& pip install -r /requirements.txt
VOLUME /data/git/pillar
VOLUME /data/config
VOLUME /data/storage/shared
VOLUME /data/storage/pillar
ENV MONGO_HOST mongo_pillar
EXPOSE 80
ADD 000-default.conf /etc/apache2/sites-available/000-default.conf
CMD ["/usr/sbin/apache2", "-D", "FOREGROUND"]

104
gulpfile.js Normal file
View File

@ -0,0 +1,104 @@
var argv = require('minimist')(process.argv.slice(2));
var autoprefixer = require('gulp-autoprefixer');
var chmod = require('gulp-chmod');
var concat = require('gulp-concat');
var gulp = require('gulp');
var gulpif = require('gulp-if');
var jade = require('gulp-jade');
var livereload = require('gulp-livereload');
var plumber = require('gulp-plumber');
var rename = require('gulp-rename');
var sass = require('gulp-sass');
var sourcemaps = require('gulp-sourcemaps');
var uglify = require('gulp-uglify');
var enabled = {
uglify: argv.production,
maps: argv.production,
failCheck: argv.production,
prettyPug: !argv.production,
liveReload: !argv.production
};
/* CSS */
gulp.task('styles', function() {
gulp.src('src/styles/**/*.sass')
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(sass({
outputStyle: 'compressed'}
))
.pipe(autoprefixer("last 3 versions"))
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(gulp.dest('pillar/web/static/assets/css'))
.pipe(gulpif(enabled.liveReload, livereload()));
});
/* Templates - Jade */
gulp.task('templates', function() {
gulp.src('src/templates/**/*.jade')
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(jade({
pretty: enabled.prettyPug
}))
.pipe(gulp.dest('pillar/web/templates/'))
.pipe(gulpif(enabled.liveReload, livereload()));
});
/* Individual Uglified Scripts */
gulp.task('scripts', function() {
gulp.src('src/scripts/*.js')
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(gulpif(enabled.uglify, uglify()))
.pipe(rename({suffix: '.min'}))
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(chmod(644))
.pipe(gulp.dest('pillar/web/static/assets/js/'))
.pipe(gulpif(enabled.liveReload, livereload()));
});
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js */
/* Since it's always loaded, it's only for functions that we want site-wide */
gulp.task('scripts_concat_tutti', function() {
gulp.src('src/scripts/tutti/**/*.js')
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(concat("tutti.min.js"))
.pipe(gulpif(enabled.uglify, uglify()))
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(chmod(644))
.pipe(gulp.dest('pillar/web/static/assets/js/'))
.pipe(gulpif(enabled.liveReload, livereload()));
});
gulp.task('scripts_concat_markdown', function() {
gulp.src('src/scripts/markdown/**/*.js')
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(concat("markdown.min.js"))
.pipe(gulpif(enabled.uglify, uglify()))
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(chmod(644))
.pipe(gulp.dest('pillar/web/static/assets/js/'))
.pipe(gulpif(enabled.liveReload, livereload()));
});
// While developing, run 'gulp watch'
gulp.task('watch',function() {
livereload.listen();
gulp.watch('src/styles/**/*.sass',['styles']);
gulp.watch('src/templates/**/*.jade',['templates']);
gulp.watch('src/scripts/*.js',['scripts']);
gulp.watch('src/scripts/tutti/**/*.js',['scripts_concat_tutti']);
gulp.watch('src/scripts/markdown/**/*.js',['scripts_concat_markdown']);
});
// Run 'gulp' to build everything at once
gulp.task('default', ['styles', 'templates', 'scripts', 'scripts_concat_tutti', 'scripts_concat_markdown']);

430
pillar/manage.py → old-src/manage.py Executable file → Normal file
View File

@ -1,36 +1,33 @@
#!/usr/bin/env python
from __future__ import print_function
from __future__ import division
from __future__ import print_function
import copy
import os
import logging
from bson.objectid import ObjectId, InvalidId
from eve.methods.put import put_internal
import os
from bson.objectid import ObjectId
from eve.methods.post import post_internal
from eve.methods.put import put_internal
from flask.ext.script import Manager
# Use a sensible default when running manage.py commands.
if not os.environ.get('EVE_SETTINGS'):
settings_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'settings.py')
'pillar', 'eve_settings.py')
os.environ['EVE_SETTINGS'] = settings_path
from application import app
from application.utils.gcs import GoogleCloudStorageBucket
from manage_extra.node_types.asset import node_type_asset
from manage_extra.node_types.blog import node_type_blog
from manage_extra.node_types.comment import node_type_comment
from manage_extra.node_types.group import node_type_group
from manage_extra.node_types.post import node_type_post
from manage_extra.node_types.project import node_type_project
from manage_extra.node_types.storage import node_type_storage
from manage_extra.node_types.texture import node_type_texture
from manage_extra.node_types.group_texture import node_type_group_texture
# from pillar import app
from pillar.api.node_types.asset import node_type_asset
from pillar.api.node_types import node_type_blog
from pillar.api.node_types.comment import node_type_comment
from pillar.api.node_types.group import node_type_group
from pillar.api.node_types.post import node_type_post
from pillar.api.node_types import node_type_storage
from pillar.api.node_types.texture import node_type_texture
manager = Manager(app)
manager = Manager()
log = logging.getLogger('manage')
log.setLevel(logging.INFO)
@ -132,7 +129,7 @@ def setup_db(admin_email):
# Create a default project by faking a POST request.
with app.test_request_context(data={'project_name': u'Default Project'}):
from flask import g
from application.modules import projects
from pillar.api import projects
g.current_user = {'user_id': user['_id'],
'groups': user['groups'],
@ -141,29 +138,6 @@ def setup_db(admin_email):
projects.create_project(overrides={'url': 'default-project',
'is_private': False})
@manager.command
def setup_db_indices():
"""Adds missing database indices."""
from application import setup_db_indices
import pymongo
log.info('Adding missing database indices.')
log.warning('This does NOT drop and recreate existing indices, '
'nor does it reconfigure existing indices. '
'If you want that, drop them manually first.')
setup_db_indices()
coll_names = db.collection_names(include_system_collections=False)
for coll_name in sorted(coll_names):
stats = db.command('collStats', coll_name)
log.info('Collection %25s takes up %.3f MiB index space',
coll_name, stats['totalIndexSize'] / 2 ** 20)
def _default_permissions():
"""Returns a dict of default permissions.
@ -172,7 +146,7 @@ def _default_permissions():
:rtype: dict
"""
from application.modules.projects import DEFAULT_ADMIN_GROUP_PERMISSIONS
from pillar.api.projects import DEFAULT_ADMIN_GROUP_PERMISSIONS
groups_collection = app.data.driver.db['groups']
admin_group = groups_collection.find_one({'name': 'admin'})
@ -200,9 +174,9 @@ def setup_for_attract(project_uuid, replace=False):
:type replace: bool
"""
from manage_extra.node_types.act import node_type_act
from manage_extra.node_types.scene import node_type_scene
from manage_extra.node_types.shot import node_type_shot
from pillar.api.node_types import node_type_act
from pillar.api.node_types.scene import node_type_scene
from pillar.api.node_types import node_type_shot
# Copy permissions from the project, then give everyone with PUT
# access also DELETE access.
@ -274,7 +248,7 @@ def _update_project(project_uuid, project):
:rtype: dict
"""
from application.utils import remove_private_keys
from pillar.api.utils import remove_private_keys
project_id = ObjectId(project_uuid)
project = remove_private_keys(project)
@ -289,7 +263,7 @@ def _update_project(project_uuid, project):
def refresh_project_permissions():
"""Replaces the admin group permissions of each project with the defaults."""
from application.modules.projects import DEFAULT_ADMIN_GROUP_PERMISSIONS
from pillar.api.projects import DEFAULT_ADMIN_GROUP_PERMISSIONS
proj_coll = app.data.driver.db['projects']
result = proj_coll.update_many({}, {'$set': {
@ -306,8 +280,8 @@ def refresh_home_project_permissions():
proj_coll = app.data.driver.db['projects']
from application.modules.blender_cloud import home_project
from application.modules import service
from pillar.api.blender_cloud import home_project
from pillar.api import service
service.fetch_role_to_group_id_map()
@ -398,7 +372,7 @@ def set_attachment_names():
"""Loop through all existing nodes and assign proper ContentDisposition
metadata to referenced files that are using GCS.
"""
from application.utils.gcs import update_file_name
from pillar.api.utils.gcs import update_file_name
nodes_collection = app.data.driver.db['nodes']
for n in nodes_collection.find():
print("Updating node {0}".format(n['_id']))
@ -496,7 +470,7 @@ def test_post_internal(node_id):
@manager.command
def algolia_push_users():
"""Loop through all users and push them to Algolia"""
from application.utils.algolia import algolia_index_user_save
from pillar.api.utils.algolia import algolia_index_user_save
users_collection = app.data.driver.db['users']
for user in users_collection.find():
print("Pushing {0}".format(user['username']))
@ -506,7 +480,7 @@ def algolia_push_users():
@manager.command
def algolia_push_nodes():
"""Loop through all nodes and push them to Algolia"""
from application.utils.algolia import algolia_index_node_save
from pillar.api.utils.algolia import algolia_index_node_save
nodes_collection = app.data.driver.db['nodes']
for node in nodes_collection.find():
print(u"Pushing {0}: {1}".format(node['_id'], node['name'].encode(
@ -520,7 +494,7 @@ def files_make_public_t():
public
"""
from gcloud.exceptions import InternalServerError
from application.utils.gcs import GoogleCloudStorageBucket
from pillar.api.utils.gcs import GoogleCloudStorageBucket
files_collection = app.data.driver.db['files']
for f in files_collection.find({'backend': 'gcs'}):
@ -550,7 +524,7 @@ def subscribe_node_owners():
"""Automatically subscribe node owners to notifications for items created
in the past.
"""
from application.modules.nodes import after_inserting_nodes
from pillar.api.nodes import after_inserting_nodes
nodes_collection = app.data.driver.db['nodes']
for n in nodes_collection.find():
if 'parent' in n:
@ -563,65 +537,19 @@ def refresh_project_links(project, chunk_size=50, quiet=False):
if quiet:
import logging
from application import log
from pillar import log
logging.getLogger().setLevel(logging.WARNING)
log.setLevel(logging.WARNING)
chunk_size = int(chunk_size) # CLI parameters are passed as strings
from application.modules import file_storage
from pillar.api import file_storage
file_storage.refresh_links_for_project(project, chunk_size, 2 * 3600)
@manager.command
@manager.option('-c', '--chunk', dest='chunk_size', default=50)
@manager.option('-q', '--quiet', dest='quiet', action='store_true', default=False)
@manager.option('-w', '--window', dest='window', default=12)
def refresh_backend_links(backend_name, chunk_size=50, quiet=False, window=12):
"""Refreshes all file links that are using a certain storage backend."""
chunk_size = int(chunk_size)
window = int(window)
if quiet:
import logging
from application import log
logging.getLogger().setLevel(logging.WARNING)
log.setLevel(logging.WARNING)
chunk_size = int(chunk_size) # CLI parameters are passed as strings
from application.modules import file_storage
file_storage.refresh_links_for_backend(backend_name, chunk_size, window * 3600)
@manager.command
def expire_all_project_links(project_uuid):
"""Expires all file links for a certain project without refreshing.
This is just for testing.
"""
import datetime
import bson.tz_util
files_collection = app.data.driver.db['files']
now = datetime.datetime.now(tz=bson.tz_util.utc)
expires = now - datetime.timedelta(days=1)
result = files_collection.update_many(
{'project': ObjectId(project_uuid)},
{'$set': {'link_expires': expires}}
)
print('Expired %i links' % result.matched_count)
@manager.command
def register_local_user(email, password):
from application.modules.local_auth import create_local_user
from pillar.api.local_auth import create_local_user
create_local_user(email, password)
@ -687,7 +615,7 @@ def add_license_props():
def refresh_file_sizes():
"""Computes & stores the 'length_aggregate_in_bytes' fields of all files."""
from application.modules import file_storage
from pillar.api import file_storage
matched = 0
unmatched = 0
@ -721,7 +649,7 @@ def project_stats():
from collections import defaultdict
from functools import partial
from application.modules import projects
from pillar.api import projects
proj_coll = app.data.driver.db['projects']
nodes = app.data.driver.db['nodes']
@ -794,9 +722,9 @@ def project_stats():
@manager.command
def add_node_types():
"""Add texture and group_texture node types to all projects"""
from manage_extra.node_types.texture import node_type_texture
from manage_extra.node_types.group_texture import node_type_group_texture
from application.utils import project_get_node_type
from pillar.api.node_types.texture import node_type_texture
from pillar.api.node_types.group_texture import node_type_group_texture
from pillar.api.utils import project_get_node_type
projects_collections = app.data.driver.db['projects']
for project in projects_collections.find():
print("Processing {}".format(project['_id']))
@ -851,291 +779,5 @@ def update_texture_nodes_maps():
print("Skipping {}".format(v['map_type']))
nodes_collection.update({'_id': node['_id']}, node)
def _create_service_account(email, service_roles, service_definition):
from application.modules import service
from application.utils import dumps
account, token = service.create_service_account(
email,
service_roles,
service_definition
)
print('Account created:')
print(dumps(account, indent=4, sort_keys=True))
print()
print('Access token: %s' % token['token'])
print(' expires on: %s' % token['expire_time'])
@manager.command
def create_badger_account(email, badges):
"""
Creates a new service account that can give badges (i.e. roles).
:param email: email address associated with the account
:param badges: single space-separated argument containing the roles
this account can assign and revoke.
"""
_create_service_account(email, [u'badger'], {'badger': badges.strip().split()})
@manager.command
def create_urler_account(email):
"""Creates a new service account that can fetch all project URLs."""
_create_service_account(email, [u'urler'], {})
@manager.command
def find_duplicate_users():
"""Finds users that have the same BlenderID user_id."""
from collections import defaultdict
users_coll = app.data.driver.db['users']
nodes_coll = app.data.driver.db['nodes']
projects_coll = app.data.driver.db['projects']
found_users = defaultdict(list)
for user in users_coll.find():
blender_ids = [auth['user_id'] for auth in user['auth']
if auth['provider'] == 'blender-id']
if not blender_ids:
continue
blender_id = blender_ids[0]
found_users[blender_id].append(user)
for blender_id, users in found_users.iteritems():
if len(users) == 1:
continue
usernames = ', '.join(user['username'] for user in users)
print('Blender ID: %5s has %i users: %s' % (
blender_id, len(users), usernames))
for user in users:
print(' %s owns %i nodes and %i projects' % (
user['username'],
nodes_coll.count({'user': user['_id']}),
projects_coll.count({'user': user['_id']}),
))
@manager.command
def sync_role_groups(do_revoke_groups):
"""For each user, synchronizes roles and group membership.
This ensures that everybody with the 'subscriber' role is also member of the 'subscriber'
group, and people without the 'subscriber' role are not member of that group. Same for
admin and demo groups.
When do_revoke_groups=False (the default), people are only added to groups.
when do_revoke_groups=True, people are also removed from groups.
"""
from application.modules import service
if do_revoke_groups not in {'true', 'false'}:
print('Use either "true" or "false" as first argument.')
print('When passing "false", people are only added to groups.')
print('when passing "true", people are also removed from groups.')
raise SystemExit()
do_revoke_groups = do_revoke_groups == 'true'
service.fetch_role_to_group_id_map()
users_coll = app.data.driver.db['users']
groups_coll = app.data.driver.db['groups']
group_names = {}
def gname(gid):
try:
return group_names[gid]
except KeyError:
name = groups_coll.find_one(gid, projection={'name': 1})['name']
name = str(name)
group_names[gid] = name
return name
ok_users = bad_users = 0
for user in users_coll.find():
grant_groups = set()
revoke_groups = set()
current_groups = set(user.get('groups', []))
user_roles = user.get('roles', set())
for role in service.ROLES_WITH_GROUPS:
action = 'grant' if role in user_roles else 'revoke'
groups = service.manage_user_group_membership(user, role, action)
if groups is None:
# No changes required
continue
if groups == current_groups:
continue
grant_groups.update(groups.difference(current_groups))
revoke_groups.update(current_groups.difference(groups))
if grant_groups or revoke_groups:
bad_users += 1
expected_groups = current_groups.union(grant_groups).difference(revoke_groups)
print('Discrepancy for user %s/%s:' % (user['_id'], user['full_name'].encode('utf8')))
print(' - actual groups :', sorted(gname(gid) for gid in user.get('groups')))
print(' - expected groups:', sorted(gname(gid) for gid in expected_groups))
print(' - will grant :', sorted(gname(gid) for gid in grant_groups))
if do_revoke_groups:
label = 'WILL REVOKE '
else:
label = 'could revoke'
print(' - %s :' % label, sorted(gname(gid) for gid in revoke_groups))
if grant_groups and revoke_groups:
print(' ------ CAREFUL this one has BOTH grant AND revoke -----')
# Determine which changes we'll apply
final_groups = current_groups.union(grant_groups)
if do_revoke_groups:
final_groups.difference_update(revoke_groups)
print(' - final groups :', sorted(gname(gid) for gid in final_groups))
# Perform the actual update
users_coll.update_one({'_id': user['_id']},
{'$set': {'groups': list(final_groups)}})
else:
ok_users += 1
print('%i bad and %i ok users seen.' % (bad_users, ok_users))
@manager.command
def sync_project_groups(user_email, fix):
"""Gives the user access to their self-created projects."""
if fix.lower() not in {'true', 'false'}:
print('Use either "true" or "false" as second argument.')
print('When passing "false", only a report is produced.')
print('when passing "true", group membership is fixed.')
raise SystemExit()
fix = fix.lower() == 'true'
users_coll = app.data.driver.db['users']
proj_coll = app.data.driver.db['projects']
groups_coll = app.data.driver.db['groups']
# Find by email or by user ID
if '@' in user_email:
where = {'email': user_email}
else:
try:
where = {'_id': ObjectId(user_email)}
except InvalidId:
log.warning('Invalid ObjectID: %s', user_email)
return
user = users_coll.find_one(where, projection={'_id': 1, 'groups': 1})
if user is None:
log.error('User %s not found', where)
raise SystemExit()
user_groups = set(user['groups'])
user_id = user['_id']
log.info('Updating projects for user %s', user_id)
ok_groups = missing_groups = 0
for proj in proj_coll.find({'user': user_id}):
project_id = proj['_id']
log.info('Investigating project %s (%s)', project_id, proj['name'])
# Find the admin group
admin_group = groups_coll.find_one({'name': str(project_id)}, projection={'_id': 1})
if admin_group is None:
log.warning('No admin group for project %s', project_id)
continue
group_id = admin_group['_id']
# Check membership
if group_id not in user_groups:
log.info('Missing group membership')
missing_groups += 1
user_groups.add(group_id)
else:
ok_groups += 1
log.info('User %s was missing %i group memberships; %i projects were ok.',
user_id, missing_groups, ok_groups)
if missing_groups > 0 and fix:
log.info('Updating database.')
result = users_coll.update_one({'_id': user_id},
{'$set': {'groups': list(user_groups)}})
log.info('Updated %i user.', result.modified_count)
@manager.command
def badger(action, user_email, role):
from application.modules import service
with app.app_context():
service.fetch_role_to_group_id_map()
response, status = service.do_badger(action, user_email, role)
if status == 204:
log.info('Done.')
else:
log.info('Response: %s', response)
log.info('Status : %i', status)
@manager.command
def hdri_sort(project_url):
"""Sorts HDRi images by image resolution."""
proj_coll = app.data.driver.db['projects']
nodes_coll = app.data.driver.db['nodes']
files_coll = app.data.driver.db['files']
proj = proj_coll.find_one({'url': project_url})
if not proj:
log.warning('Project url=%r not found.' % project_url)
return
proj_id = proj['_id']
log.info('Processing project %r', proj_id)
nodes = nodes_coll.find({'project': proj_id, 'node_type': 'hdri'})
if nodes.count() == 0:
log.warning('Project has no hdri nodes')
return
for node in nodes:
log.info('Processing node %s', node['name'])
def sort_key(file_ref):
file_doc = files_coll.find_one(file_ref['file'], projection={'length': 1})
return file_doc['length']
files = sorted(node['properties']['files'], key=sort_key)
log.info('Files pre-sort: %s',
[file['resolution'] for file in node['properties']['files']])
log.info('Files post-sort: %s',
[file['resolution'] for file in files])
result = nodes_coll.update_one({'_id': node['_id']},
{'$set': {'properties.files': files}})
if result.matched_count != 1:
log.warning('Matched count = %i, expected 1, aborting', result.matched_count)
return
if __name__ == '__main__':
manager.run()

24
package.json Normal file
View File

@ -0,0 +1,24 @@
{
"name": "pillar",
"repository": {
"type": "git",
"url": "https://github.com/armadillica/pillar.git"
},
"author": "Blender Institute",
"license": "GPL",
"devDependencies": {
"gulp": "~3.9.1",
"gulp-sass": "~2.3.1",
"gulp-autoprefixer": "~2.3.1",
"gulp-if": "^2.0.1",
"gulp-jade": "~1.1.0",
"gulp-sourcemaps": "~1.6.0",
"gulp-plumber": "~1.1.0",
"gulp-livereload": "~3.8.1",
"gulp-concat": "~2.6.0",
"gulp-uglify": "~1.5.3",
"gulp-rename": "~1.2.2",
"gulp-chmod": "~1.3.0",
"minimist": "^1.2.0"
}
}

374
pillar/__init__.py Normal file
View File

@ -0,0 +1,374 @@
"""Pillar server."""
import copy
import logging
import logging.config
import subprocess
import tempfile
import jinja2
import os
import os.path
from eve import Eve
from pillar.api import custom_field_validation
from pillar.api.utils import authentication
from pillar.api.utils import gravatar
from pillar.web.utils import pretty_date
from pillar.web.nodes.routes import url_for_node
from . import api
from . import web
from . import auth
empty_settings = {
# Use a random URL prefix when booting Eve, to ensure that any
# Flask route that's registered *before* we load our own config
# won't interfere with Pillar itself.
'URL_PREFIX': 'pieQui4vah9euwieFai6naivaV4thahchoochiiwazieBe5o',
'DOMAIN': {},
}
class PillarServer(Eve):
def __init__(self, app_root, **kwargs):
kwargs.setdefault('validator', custom_field_validation.ValidateCustomFields)
super(PillarServer, self).__init__(settings=empty_settings, **kwargs)
self.app_root = os.path.abspath(app_root)
self._load_flask_config()
self._config_logging()
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
self.log.info('Creating new instance from %r', self.app_root)
self._config_tempdirs()
self._config_git()
self._config_bugsnag()
self._config_google_cloud_storage()
self.algolia_index_users = None
self.algolia_index_nodes = None
self.algolia_client = None
self._config_algolia()
self.encoding_service_client = None
self._config_encoding_backend()
try:
self.settings = os.environ['EVE_SETTINGS']
except KeyError:
self.settings = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'api', 'eve_settings.py')
# self.settings = self.config['EVE_SETTINGS_PATH']
self.load_config()
# Configure authentication
self._login_manager = auth.config_login_manager(self)
self.oauth_blender_id = auth.config_oauth_login(self)
self._config_caching()
self.before_first_request(self.setup_db_indices)
def _load_flask_config(self):
# Load configuration from different sources, to make it easy to override
# settings with secrets, as well as for development & testing.
self.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.py'), silent=False)
self.config.from_pyfile(os.path.join(self.app_root, 'config.py'), silent=True)
self.config.from_pyfile(os.path.join(self.app_root, 'config_local.py'), silent=True)
from_envvar = os.environ.get('PILLAR_CONFIG')
if from_envvar:
# Don't use from_envvar, as we want different behaviour. If the envvar
# is not set, it's fine (i.e. silent=True), but if it is set and the
# configfile doesn't exist, it should error out (i.e. silent=False).
self.config.from_pyfile(from_envvar, silent=False)
def _config_logging(self):
# Configure logging
logging.config.dictConfig(self.config['LOGGING'])
log = logging.getLogger(__name__)
if self.config['DEBUG']:
log.info('Pillar starting, debug=%s', self.config['DEBUG'])
def _config_tempdirs(self):
storage_dir = self.config['STORAGE_DIR']
if not os.path.exists(storage_dir):
self.log.info('Creating storage directory %r', storage_dir)
os.makedirs(storage_dir)
# Set the TMP environment variable to manage where uploads are stored.
# These are all used by tempfile.mkstemp(), but we don't knwow in whic
# order. As such, we remove all used variables but the one we set.
tempfile.tempdir = storage_dir
os.environ['TMP'] = storage_dir
os.environ.pop('TEMP', None)
os.environ.pop('TMPDIR', None)
def _config_git(self):
# Get the Git hash
try:
git_cmd = ['git', '-C', self.app_root, 'describe', '--always']
description = subprocess.check_output(git_cmd)
self.config['GIT_REVISION'] = description.strip()
except (subprocess.CalledProcessError, OSError) as ex:
self.log.warning('Unable to run "git describe" to get git revision: %s', ex)
self.config['GIT_REVISION'] = 'unknown'
self.log.info('Git revision %r', self.config['GIT_REVISION'])
def _config_bugsnag(self):
# Configure Bugsnag
if self.config.get('TESTING') or not self.config.get('BUGSNAG_API_KEY'):
self.log.info('Bugsnag NOT configured.')
return
import bugsnag
from bugsnag.flask import handle_exceptions
from bugsnag.handlers import BugsnagHandler
bugsnag.configure(
api_key=self.config['BUGSNAG_API_KEY'],
project_root="/data/git/pillar/pillar",
)
handle_exceptions(self)
bs_handler = BugsnagHandler()
bs_handler.setLevel(logging.ERROR)
self.log.addHandler(bs_handler)
def _config_google_cloud_storage(self):
# Google Cloud project
try:
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = \
self.config['GCLOUD_APP_CREDENTIALS']
except KeyError:
raise SystemExit('GCLOUD_APP_CREDENTIALS configuration is missing')
# Storage backend (GCS)
try:
os.environ['GCLOUD_PROJECT'] = self.config['GCLOUD_PROJECT']
except KeyError:
raise SystemExit('GCLOUD_PROJECT configuration value is missing')
def _config_algolia(self):
# Algolia search
if self.config['SEARCH_BACKEND'] != 'algolia':
return
from algoliasearch import algoliasearch
client = algoliasearch.Client(self.config['ALGOLIA_USER'],
self.config['ALGOLIA_API_KEY'])
self.algolia_client = client
self.algolia_index_users = client.init_index(self.config['ALGOLIA_INDEX_USERS'])
self.algolia_index_nodes = client.init_index(self.config['ALGOLIA_INDEX_NODES'])
def _config_encoding_backend(self):
# Encoding backend
if self.config['ENCODING_BACKEND'] != 'zencoder':
return
from zencoder import Zencoder
self.encoding_service_client = Zencoder(self.config['ZENCODER_API_KEY'])
def _config_caching(self):
from flask_cache import Cache
self.cache = Cache(self)
def load_extension(self, pillar_extension, url_prefix):
from .extension import PillarExtension
self.log.info('Initialising extension %r', pillar_extension)
assert isinstance(pillar_extension, PillarExtension)
# Load extension Flask configuration
for key, value in pillar_extension.flask_config():
self.config.setdefault(key, value)
# Load extension blueprint(s)
for blueprint in pillar_extension.blueprints():
self.register_blueprint(blueprint, url_prefix=url_prefix)
# Load extension Eve settings
eve_settings = pillar_extension.eve_settings()
for key, collection in eve_settings['DOMAIN'].items():
source = '%s.%s' % (pillar_extension.name, key)
url = '%s/%s' % (pillar_extension.name, key)
collection.setdefault('datasource', {}).setdefault('source', source)
collection.setdefault('url', url)
self.config['DOMAIN'].update(eve_settings['DOMAIN'])
def _config_jinja_env(self):
pillar_dir = os.path.dirname(os.path.realpath(__file__))
parent_theme_path = os.path.join(pillar_dir, 'web', 'templates')
current_path = os.path.join(self.app_root, 'templates')
paths_list = [
jinja2.FileSystemLoader(current_path),
jinja2.FileSystemLoader(parent_theme_path),
self.jinja_loader
]
# Set up a custom loader, so that Jinja searches for a theme file first
# in the current theme dir, and if it fails it searches in the default
# location.
custom_jinja_loader = jinja2.ChoiceLoader(paths_list)
self.jinja_loader = custom_jinja_loader
def format_pretty_date(d):
return pretty_date(d)
def format_pretty_date_time(d):
return pretty_date(d, detail=True)
self.jinja_env.filters['pretty_date'] = format_pretty_date
self.jinja_env.filters['pretty_date_time'] = format_pretty_date_time
self.jinja_env.globals['url_for_node'] = url_for_node
def _config_static_dirs(self):
pillar_dir = os.path.dirname(os.path.realpath(__file__))
# Setup static folder for the instanced app
self.static_folder = os.path.join(self.app_root, 'static')
# Setup static folder for Pillar
self.pillar_static_folder = os.path.join(pillar_dir, 'web', 'static')
from flask.views import MethodView
from flask import send_from_directory
from flask import current_app
class PillarStaticFile(MethodView):
def get(self, filename):
return send_from_directory(current_app.pillar_static_folder,
filename)
self.add_url_rule('/static/pillar/<path:filename>',
view_func=PillarStaticFile.as_view('static_pillar'))
def process_extensions(self):
# Re-initialise Eve after we allowed Pillar submodules to be loaded.
# EVIL STARTS HERE. It just copies part of the Eve.__init__() method.
self.set_defaults()
self.validate_config()
self.validate_domain_struct()
self._init_url_rules()
self._init_media_endpoint()
self._init_schema_endpoint()
if self.config['OPLOG'] is True:
self._init_oplog()
domain_copy = copy.deepcopy(self.config['DOMAIN'])
for resource, settings in domain_copy.items():
self.register_resource(resource, settings)
self.register_error_handlers()
# EVIL ENDS HERE. No guarantees, though.
self.finish_startup()
def finish_startup(self):
self.log.info('Using MongoDB database %r', self.config['MONGO_DBNAME'])
api.setup_app(self)
web.setup_app(self)
authentication.setup_app(self)
self._config_jinja_env()
self._config_static_dirs()
# Only enable this when debugging.
# self._list_routes()
def setup_db_indices(self):
"""Adds missing database indices.
This does NOT drop and recreate existing indices,
nor does it reconfigure existing indices.
If you want that, drop them manually first.
"""
self.log.debug('Adding any missing database indices.')
import pymongo
db = self.data.driver.db
coll = db['tokens']
coll.create_index([('user', pymongo.ASCENDING)])
coll.create_index([('token', pymongo.ASCENDING)])
coll = db['notifications']
coll.create_index([('user', pymongo.ASCENDING)])
coll = db['activities-subscriptions']
coll.create_index([('context_object', pymongo.ASCENDING)])
coll = db['nodes']
# This index is used for queries on project, and for queries on
# the combination (project, node type).
coll.create_index([('project', pymongo.ASCENDING),
('node_type', pymongo.ASCENDING)])
coll.create_index([('parent', pymongo.ASCENDING)])
coll.create_index([('short_code', pymongo.ASCENDING)],
sparse=True, unique=True)
def register_api_blueprint(self, blueprint, url_prefix):
# TODO: use Eve config variable instead of hard-coded '/api'
self.register_blueprint(blueprint, url_prefix='/api' + url_prefix)
def make_header(self, username, subclient_id=''):
"""Returns a Basic HTTP Authentication header value."""
import base64
return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
def post_internal(self, resource, payl=None, skip_validation=False):
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.post import post_internal
with self.test_request_context(method='POST', path='%s/%s' % (self.api_prefix, resource)):
return post_internal(resource, payl=payl, skip_validation=skip_validation)
def put_internal(self, resource, payload=None, concurrency_check=False,
skip_validation=False, **lookup):
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.put import put_internal
path = '%s/%s/%s' % (self.api_prefix, resource, lookup['_id'])
with self.test_request_context(method='PUT', path=path):
return put_internal(resource, payload=payload, concurrency_check=concurrency_check,
skip_validation=skip_validation, **lookup)
def patch_internal(self, resource, payload=None, concurrency_check=False,
skip_validation=False, **lookup):
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.patch import patch_internal
path = '%s/%s/%s' % (self.api_prefix, resource, lookup['_id'])
with self.test_request_context(method='PATCH', path=path):
return patch_internal(resource, payload=payload, concurrency_check=concurrency_check,
skip_validation=skip_validation, **lookup)
def _list_routes(self):
from pprint import pprint
from flask import url_for
def has_no_empty_params(rule):
defaults = rule.defaults if rule.defaults is not None else ()
arguments = rule.arguments if rule.arguments is not None else ()
return len(defaults) >= len(arguments)
links = []
with self.test_request_context():
for rule in self.url_map.iter_rules():
# Filter out rules we can't navigate to in a browser
# and rules that require parameters
if "GET" in rule.methods and has_no_empty_params(rule):
url = url_for(rule.endpoint, **(rule.defaults or {}))
links.append((url, rule.endpoint))
links.sort(key=lambda t: len(t[0]) + 100 * ('/api/' in t[0]))
pprint(links)

15
pillar/api/__init__.py Normal file
View File

@ -0,0 +1,15 @@
def setup_app(app):
from . import encoding, blender_id, projects, local_auth, file_storage
from . import users, nodes, latest, blender_cloud, service, activities
encoding.setup_app(app, url_prefix='/encoding')
blender_id.setup_app(app, url_prefix='/blender_id')
projects.setup_app(app, api_prefix='/p')
local_auth.setup_app(app, url_prefix='/auth')
file_storage.setup_app(app, url_prefix='/storage')
latest.setup_app(app, url_prefix='/latest')
blender_cloud.setup_app(app, url_prefix='/bcloud')
users.setup_app(app, api_prefix='/users')
service.setup_app(app, api_prefix='/service')
nodes.setup_app(app, url_prefix='/nodes')
activities.setup_app(app)

View File

@ -1,7 +1,5 @@
from flask import g
from flask import current_app
from eve.methods.post import post_internal
from application.modules.users import gravatar
from flask import g, request, current_app
from pillar.api.utils import gravatar
def notification_parse(notification):
@ -111,7 +109,7 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
# If no subscription exists, we create one
if not subscription:
post_internal('activities-subscriptions', lookup)
current_app.post_internal('activities-subscriptions', lookup)
def activity_object_add(actor_user_id, verb, object_type, object_id,
@ -143,7 +141,7 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
context_object=context_object_id
)
activity = post_internal('activities', activity)
activity = current_app.post_internal('activities', activity)
if activity[3] != 201:
# If creation failed for any reason, do not create a any notifcation
return
@ -151,4 +149,20 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
notification = dict(
user=subscription['user'],
activity=activity[0]['_id'])
post_internal('notifications', notification)
current_app.post_internal('notifications', notification)
def before_returning_item_notifications(response):
if request.args.get('parse'):
notification_parse(response)
def before_returning_resource_notifications(response):
for item in response['_items']:
if request.args.get('parse'):
notification_parse(item)
def setup_app(app):
app.on_fetched_item_notifications += before_returning_item_notifications
app.on_fetched_resource_notifications += before_returning_resource_notifications

View File

@ -1,17 +1,15 @@
import copy
import logging
import datetime
import datetime
from bson import ObjectId, tz_util
from eve.methods.post import post_internal
from eve.methods.put import put_internal
from eve.methods.get import get
from flask import Blueprint, g, current_app, request
from pillar.api import utils
from pillar.api.utils import authentication, authorization
from werkzeug import exceptions as wz_exceptions
from application.modules import projects
from application import utils
from application.utils import authentication, authorization
from pillar.api.projects import utils as proj_utils
blueprint = Blueprint('blender_cloud.home_project', __name__)
log = logging.getLogger(__name__)
@ -73,7 +71,7 @@ def create_blender_sync_node(project_id, admin_group_id, user_id):
}
}
r, _, _, status = post_internal('nodes', node)
r, _, _, status = current_app.post_internal('nodes', node)
if status != 201:
log.warning('Unable to create Blender Sync node for home project %s: %s',
project_id, r)
@ -109,7 +107,7 @@ def create_home_project(user_id, write_access):
project = deleted_proj
else:
log.debug('User %s does not have a deleted project', user_id)
project = projects.create_new_project(project_name='Home',
project = proj_utils.create_new_project(project_name='Home',
user_id=ObjectId(user_id),
overrides=overrides)
@ -124,10 +122,10 @@ def create_home_project(user_id, write_access):
# Set up the correct node types. No need to set permissions for them,
# as the inherited project permissions are fine.
from manage_extra.node_types.group import node_type_group
from manage_extra.node_types.asset import node_type_asset
# from manage_extra.node_types.text import node_type_text
from manage_extra.node_types.comment import node_type_comment
from pillar.api.node_types.group import node_type_group
from pillar.api.node_types.asset import node_type_asset
# from pillar.api.node_types.text import node_type_text
from pillar.api.node_types.comment import node_type_comment
# For non-subscribers: take away write access from the admin group,
# and grant it to certain node types.
@ -147,7 +145,7 @@ def create_home_project(user_id, write_access):
node_type_comment,
]
result, _, _, status = put_internal('projects', utils.remove_private_keys(project),
result, _, _, status = current_app.put_internal('projects', utils.remove_private_keys(project),
_id=project['_id'])
if status != 200:
log.error('Unable to update home project %s for user %s: %s',
@ -166,7 +164,7 @@ def create_home_project(user_id, write_access):
def assign_permissions(node_type, subscriber_methods, world_methods):
"""Assigns permissions to the node type object.
:param node_type: a node type from manage_extra.node_types.
:param node_type: a node type from pillar.api.node_types.
:type node_type: dict
:param subscriber_methods: allowed HTTP methods for users of role 'subscriber',
'demo' and 'admin'.
@ -177,7 +175,7 @@ def assign_permissions(node_type, subscriber_methods, world_methods):
:rtype: dict
"""
from application.modules import service
from pillar.api import service
nt_with_perms = copy.deepcopy(node_type)
@ -391,7 +389,7 @@ def user_changed_role(sender, user):
user_id = user['_id']
if not has_home_project(user_id):
log.debug('User %s does not have a home project', user_id)
log.debug('User %s does not have a home project, not changing access permissions', user_id)
return
proj_coll = current_app.data.driver.db['projects']
@ -414,12 +412,12 @@ def user_changed_role(sender, user):
def setup_app(app, url_prefix):
app.register_blueprint(blueprint, url_prefix=url_prefix)
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
app.on_insert_nodes += check_home_project_nodes_permissions
app.on_inserted_nodes += mark_parents_as_updated
app.on_updated_nodes += mark_parent_as_updated
app.on_replaced_nodes += mark_parent_as_updated
from application.modules import service
from pillar.api import service
service.signal_user_changed_role.connect(user_changed_role)

View File

@ -1,16 +1,15 @@
import functools
import logging
from flask import Blueprint, request, current_app, g
from eve.methods.get import get
from eve.utils import config as eve_config
from flask import Blueprint, request, current_app, g
from pillar.api import utils
from pillar.api.utils.authentication import current_user_id
from pillar.api.utils.authorization import require_login
from werkzeug.datastructures import MultiDict
from werkzeug.exceptions import InternalServerError
from application import utils
from application.utils.authentication import current_user_id
from application.utils.authorization import require_login
FIRST_ADDON_VERSION_WITH_HDRI = (1, 4, 0)
TL_PROJECTION = utils.dumps({'name': 1, 'url': 1, 'permissions': 1,})
TL_SORT = utils.dumps([('name', 1)])
@ -144,4 +143,4 @@ def setup_app(app, url_prefix):
app.on_replace_nodes += sort_by_image_width
app.on_insert_nodes += sort_nodes_by_image_width
app.register_blueprint(blueprint, url_prefix=url_prefix)
app.register_api_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -5,18 +5,15 @@ with Blender ID.
"""
import logging
import datetime
from bson import tz_util
import requests
from bson import tz_util
from flask import Blueprint, request, current_app, jsonify
from pillar.api.utils import authentication, remove_private_keys
from requests.adapters import HTTPAdapter
from flask import Blueprint, request, current_app, abort, jsonify
from eve.methods.post import post_internal
from eve.methods.put import put_internal
from werkzeug import exceptions as wz_exceptions
from application.utils import authentication, remove_private_keys
blender_id = Blueprint('blender_id', __name__)
log = logging.getLogger(__name__)
@ -99,7 +96,7 @@ def upsert_user(db_user, blender_id_user_id):
# Update the existing user
attempted_eve_method = 'PUT'
db_id = db_user['_id']
r, _, _, status = put_internal('users', remove_private_keys(db_user),
r, _, _, status = current_app.put_internal('users', remove_private_keys(db_user),
_id=db_id)
if status == 422:
log.error('Status %i trying to PUT user %s with values %s, should not happen! %s',
@ -107,7 +104,7 @@ def upsert_user(db_user, blender_id_user_id):
else:
# Create a new user, retry for non-unique usernames.
attempted_eve_method = 'POST'
r, _, _, status = post_internal('users', db_user)
r, _, _, status = current_app.post_internal('users', db_user)
if status not in {200, 201}:
log.error('Status %i trying to create user for BlenderID %s with values %s: %s',
@ -174,8 +171,7 @@ def validate_token(user_id, token, oauth_subclient_id):
# POST to Blender ID, handling errors as negative verification results.
try:
r = s.post(url, data=payload, timeout=5,
verify=current_app.config['TLS_CERT_FILE'])
r = s.post(url, data=payload, timeout=5)
except requests.exceptions.ConnectionError as e:
log.error('Connection error trying to POST to %s, handling as invalid token.', url)
return None, None
@ -238,3 +234,7 @@ def find_user_in_db(blender_id_user_id, user_info):
db_user['full_name'] = db_user['username']
return db_user
def setup_app(app, url_prefix):
app.register_api_blueprint(blender_id, url_prefix=url_prefix)

View File

@ -0,0 +1,82 @@
import logging
from bson import ObjectId
from datetime import datetime
from eve.io.mongo import Validator
from flask import current_app
log = logging.getLogger(__name__)
class ValidateCustomFields(Validator):
def convert_properties(self, properties, node_schema):
date_format = current_app.config['RFC1123_DATE_FORMAT']
for prop in node_schema:
if not prop in properties:
continue
schema_prop = node_schema[prop]
prop_type = schema_prop['type']
if prop_type == 'dict':
properties[prop] = self.convert_properties(
properties[prop], schema_prop['schema'])
if prop_type == 'list':
if properties[prop] in ['', '[]']:
properties[prop] = []
for k, val in enumerate(properties[prop]):
if not 'schema' in schema_prop:
continue
item_schema = {'item': schema_prop['schema']}
item_prop = {'item': properties[prop][k]}
properties[prop][k] = self.convert_properties(
item_prop, item_schema)['item']
# Convert datetime string to RFC1123 datetime
elif prop_type == 'datetime':
prop_val = properties[prop]
properties[prop] = datetime.strptime(prop_val, date_format)
elif prop_type == 'objectid':
prop_val = properties[prop]
if prop_val:
properties[prop] = ObjectId(prop_val)
else:
properties[prop] = None
return properties
def _validate_valid_properties(self, valid_properties, field, value):
from pillar.api.utils import project_get_node_type
projects_collection = current_app.data.driver.db['projects']
lookup = {'_id': ObjectId(self.document['project'])}
project = projects_collection.find_one(lookup, {
'node_types.name': 1,
'node_types.dyn_schema': 1,
})
if project is None:
log.warning('Unknown project %s, declared by node %s',
lookup, self.document.get('_id'))
self._error(field, 'Unknown project')
return False
node_type_name = self.document['node_type']
node_type = project_get_node_type(project, node_type_name)
if node_type is None:
log.warning('Project %s has no node type %s, declared by node %s',
project, node_type_name, self.document.get('_id'))
self._error(field, 'Unknown node type')
return False
try:
value = self.convert_properties(value, node_type['dyn_schema'])
except Exception as e:
log.warning("Error converting form properties", exc_info=True)
v = Validator(node_type['dyn_schema'])
val = v.validate(value)
if val:
return True
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
self._error(field, "Error validating properties")

View File

@ -2,16 +2,14 @@ import logging
import datetime
import os
from bson import ObjectId, tz_util
from eve.methods.put import put_internal
from flask import Blueprint
from flask import abort
from flask import request
from flask import current_app
from application import utils
from application.utils import skip_when_testing
from application.utils.gcs import GoogleCloudStorageBucket
from flask import request
from pillar.api import utils
from pillar.api.utils.gcs import GoogleCloudStorageBucket
from pillar.api.utils import skip_when_testing
encoding = Blueprint('encoding', __name__)
log = logging.getLogger(__name__)
@ -115,7 +113,7 @@ def zencoder_notifications():
log.info(' %s: %s', key, output[key])
file_doc['status'] = 'failed'
put_internal('files', file_doc, _id=file_id)
current_app.put_internal('files', file_doc, _id=file_id)
return "You failed, but that's okay.", 200
log.info('Zencoder job %s for file %s completed with status %s.', zencoder_job_id, file_id,
@ -171,6 +169,10 @@ def zencoder_notifications():
# Force an update of the links on the next load of the file.
file_doc['link_expires'] = datetime.datetime.now(tz=tz_util.utc) - datetime.timedelta(days=1)
put_internal('files', file_doc, _id=file_id)
current_app.put_internal('files', file_doc, _id=file_id)
return '', 204
def setup_app(app, url_prefix):
app.register_api_blueprint(encoding, url_prefix=url_prefix)

View File

@ -1,5 +1,7 @@
import os
URL_PREFIX = 'api'
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
# (if you omit this line, the API will default to ['GET'] and provide
# read-only access to the endpoint).
@ -375,14 +377,15 @@ files_schema = {
},
'length_aggregate_in_bytes': { # Size of file + all variations
'type': 'integer',
'required': False, # it's computed on the fly anyway, so clients don't need to provide it.
'required': False,
# it's computed on the fly anyway, so clients don't need to provide it.
},
'md5': {
'type': 'string',
'required': True,
},
# Original filename as given by the user, possibly cleaned-up to make it safe.
# Original filename as given by the user, cleaned-up to make it safe.
'filename': {
'type': 'string',
'required': True,
@ -692,7 +695,7 @@ users = {
'cache_expires': 10,
'resource_methods': ['GET'],
'item_methods': ['GET', 'PUT'],
'item_methods': ['GET', 'PUT', 'PATCH'],
'public_item_methods': ['GET'],
# By default don't include the 'auth' field. It can still be obtained
@ -713,6 +716,7 @@ tokens = {
files = {
'resource_methods': ['GET', 'POST'],
'item_methods': ['GET', 'PATCH'],
'public_methods': ['GET'],
'public_item_methods': ['GET'],
'schema': files_schema
@ -763,9 +767,9 @@ DOMAIN = {
'notifications': notifications
}
MONGO_HOST = os.environ.get('MONGO_HOST', 'localhost')
MONGO_PORT = os.environ.get('MONGO_PORT', 27017)
MONGO_DBNAME = os.environ.get('MONGO_DBNAME', 'eve')
MONGO_HOST = os.environ.get('PILLAR_MONGO_HOST', 'localhost')
MONGO_PORT = int(os.environ.get('PILLAR_MONGO_PORT', 27017))
MONGO_DBNAME = os.environ.get('PILLAR_MONGO_DBNAME', 'eve')
CACHE_EXPIRES = 60
HATEOAS = False
UPSET_ON_PUT = False # do not create new document on PUT of non-existant URL.

View File

@ -1,37 +1,32 @@
import datetime
import io
import logging
import mimetypes
import os
import tempfile
import uuid
import io
from hashlib import md5
import bson.tz_util
import datetime
import eve.utils
import os
import pymongo
import werkzeug.exceptions as wz_exceptions
from bson import ObjectId
from bson.errors import InvalidId
from eve.methods.patch import patch_internal
from eve.methods.post import post_internal
from eve.methods.put import put_internal
from flask import Blueprint
from flask import current_app
from flask import g
from flask import jsonify
from flask import request
from flask import send_from_directory
from flask import url_for, helpers
from flask import current_app
from flask import g
from flask import make_response
import werkzeug.exceptions as wz_exceptions
from application import utils
from application.utils import remove_private_keys, authentication
from application.utils.authorization import require_login, user_has_role, user_matches_roles
from application.utils.cdn import hash_file_path
from application.utils.encoding import Encoder
from application.utils.gcs import GoogleCloudStorageBucket
from application.utils.imaging import generate_local_thumbnails
from pillar.api import utils
from pillar.api.utils.imaging import generate_local_thumbnails
from pillar.api.utils import remove_private_keys, authentication
from pillar.api.utils.authorization import require_login, user_has_role, \
user_matches_roles
from pillar.api.utils.cdn import hash_file_path
from pillar.api.utils.encoding import Encoder
from pillar.api.utils.gcs import GoogleCloudStorageBucket
log = logging.getLogger(__name__)
@ -39,15 +34,9 @@ file_storage = Blueprint('file_storage', __name__,
template_folder='templates',
static_folder='../../static/storage', )
# Overrides for browser-specified mimetypes
OVERRIDE_MIMETYPES = {
# We don't want to thumbnail EXR files right now, so don't handle as image/...
'image/x-exr': 'application/x-exr',
}
# Add our own extensions to the mimetypes package
mimetypes.add_type('application/x-blender', '.blend')
mimetypes.add_type('application/x-radiance-hdr', '.hdr')
mimetypes.add_type('application/x-exr', '.exr')
@file_storage.route('/gcs/<bucket_name>/<subdir>/')
@ -93,7 +82,8 @@ def index(file_name=None):
# Determine & create storage directory
folder_name = file_name[:2]
file_folder_path = helpers.safe_join(current_app.config['STORAGE_DIR'], folder_name)
file_folder_path = helpers.safe_join(current_app.config['STORAGE_DIR'],
folder_name)
if not os.path.exists(file_folder_path):
log.info('Creating folder path %r', file_folder_path)
os.mkdir(file_folder_path)
@ -121,8 +111,8 @@ def _process_image(gcs, file_id, local_file, src_file):
local_file.name)
# Send those previews to Google Cloud Storage.
log.info('Uploading %i thumbnails for file %s to Google Cloud Storage (GCS)',
len(src_file['variations']), file_id)
log.info('Uploading %i thumbnails for file %s to Google Cloud Storage '
'(GCS)', len(src_file['variations']), file_id)
# TODO: parallelize this at some point.
for variation in src_file['variations']:
@ -141,8 +131,8 @@ def _process_image(gcs, file_id, local_file, src_file):
try:
os.unlink(variation['local_path'])
except OSError:
log.warning('Unable to unlink %s, ignoring this but it will need cleanup later.',
variation['local_path'])
log.warning('Unable to unlink %s, ignoring this but it will need '
'cleanup later.', variation['local_path'])
del variation['local_path']
@ -177,17 +167,19 @@ def _process_video(gcs, file_id, local_file, src_file):
src_file['variations'].append(file_variation)
if current_app.config['TESTING']:
log.warning('_process_video: NOT sending out encoding job due to TESTING=%r',
current_app.config['TESTING'])
log.warning('_process_video: NOT sending out encoding job due to '
'TESTING=%r', current_app.config['TESTING'])
j = type('EncoderJob', (), {'process_id': 'fake-process-id',
'backend': 'fake'})
else:
j = Encoder.job_create(src_file)
if j is None:
log.warning('_process_video: unable to create encoder job for file %s.', file_id)
log.warning('_process_video: unable to create encoder job for file '
'%s.', file_id)
return
log.info('Created asynchronous Zencoder job %s for file %s', j['process_id'], file_id)
log.info('Created asynchronous Zencoder job %s for file %s',
j['process_id'], file_id)
# Add the processing status to the file object
src_file['processing'] = {
@ -201,7 +193,8 @@ def process_file(gcs, file_id, local_file):
:param file_id: '_id' key of the file
:type file_id: ObjectId or str
:param local_file: locally stored file, or None if no local processing is needed.
:param local_file: locally stored file, or None if no local processing is
needed.
:type local_file: file
"""
@ -239,26 +232,30 @@ def process_file(gcs, file_id, local_file):
try:
processor = processors[mime_category]
except KeyError:
log.info("POSTed file %s was of type %r, which isn't thumbnailed/encoded.", file_id,
log.info("POSTed file %s was of type %r, which isn't "
"thumbnailed/encoded.", file_id,
mime_category)
src_file['status'] = 'complete'
else:
log.debug('process_file(%s): marking file status as "processing"', file_id)
log.debug('process_file(%s): marking file status as "processing"',
file_id)
src_file['status'] = 'processing'
update_file_doc(file_id, status='processing')
try:
processor(gcs, file_id, local_file, src_file)
except Exception:
log.warning('process_file(%s): error when processing file, resetting status to '
log.warning('process_file(%s): error when processing file, '
'resetting status to '
'"queued_for_processing"', file_id, exc_info=True)
update_file_doc(file_id, status='queued_for_processing')
return
# Update the original file with additional info, e.g. image resolution
r, _, _, status = put_internal('files', src_file, _id=file_id)
r, _, _, status = current_app.put_internal('files', src_file, _id=file_id)
if status not in (200, 201):
log.warning('process_file(%s): status %i when saving processed file info to MongoDB: %s',
log.warning('process_file(%s): status %i when saving processed file '
'info to MongoDB: %s',
file_id, status, r)
@ -296,6 +293,11 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
"""
if backend == 'gcs':
if current_app.config['TESTING']:
log.info('Skipping GCS link generation, and returning a fake link '
'instead.')
return '/path/to/testing/gcs/%s' % file_path
storage = GoogleCloudStorageBucket(project_id)
blob = storage.Get(file_path)
if blob is None:
@ -306,8 +308,8 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
return blob['signed_url']
if backend == 'pillar':
return url_for('file_storage.index', file_name=file_path, _external=True,
_scheme=current_app.config['SCHEME'])
return url_for('file_storage.index', file_name=file_path,
_external=True, _scheme=current_app.config['SCHEME'])
if backend == 'cdnsun':
return hash_file_path(file_path, None)
if backend == 'unittest':
@ -319,7 +321,8 @@ def generate_link(backend, file_path, project_id=None, is_public=False):
def before_returning_file(response):
ensure_valid_link(response)
# Enable this call later, when we have implemented the is_public field on files.
# Enable this call later, when we have implemented the is_public field on
# files.
# strip_link_and_variations(response)
@ -352,7 +355,7 @@ def ensure_valid_link(response):
"""Ensures the file item has valid file links using generate_link(...)."""
# Log to function-specific logger, so we can easily turn it off.
log = logging.getLogger('%s.ensure_valid_link' % __name__)
log_link = logging.getLogger('%s.ensure_valid_link' % __name__)
# log.debug('Inspecting link for file %s', response['_id'])
# Check link expiry.
@ -361,13 +364,14 @@ def ensure_valid_link(response):
link_expires = response['link_expires']
if now < link_expires:
# Not expired yet, so don't bother regenerating anything.
log.debug('Link expires at %s, which is in the future, so not generating new link',
link_expires)
log_link.debug('Link expires at %s, which is in the future, so not '
'generating new link', link_expires)
return
log.debug('Link expired at %s, which is in the past; generating new link', link_expires)
log_link.debug('Link expired at %s, which is in the past; generating '
'new link', link_expires)
else:
log.debug('No expiry date for link; generating new link')
log_link.debug('No expiry date for link; generating new link')
_generate_all_links(response, now)
@ -380,14 +384,16 @@ def _generate_all_links(response, now):
"""
project_id = str(
response['project']) if 'project' in response else None # TODO: add project id to all files
response['project']) if 'project' in response else None
# TODO: add project id to all files
backend = response['backend']
response['link'] = generate_link(backend, response['file_path'], project_id)
variations = response.get('variations')
if variations:
for variation in variations:
variation['link'] = generate_link(backend, variation['file_path'], project_id)
variation['link'] = generate_link(backend, variation['file_path'],
project_id)
# Construct the new expiry datetime.
validity_secs = current_app.config['FILE_LINK_VALIDITY'][backend]
@ -395,16 +401,19 @@ def _generate_all_links(response, now):
patch_info = remove_private_keys(response)
file_id = ObjectId(response['_id'])
(patch_resp, _, _, _) = patch_internal('files', patch_info, _id=file_id)
(patch_resp, _, _, _) = current_app.patch_internal('files', patch_info,
_id=file_id)
if patch_resp.get('_status') == 'ERR':
log.warning('Unable to save new links for file %s: %r', response['_id'], patch_resp)
log.warning('Unable to save new links for file %s: %r',
response['_id'], patch_resp)
# TODO: raise a snag.
response['_updated'] = now
else:
response['_updated'] = patch_resp['_updated']
# Be silly and re-fetch the etag ourselves. TODO: handle this better.
etag_doc = current_app.data.driver.db['files'].find_one({'_id': file_id}, {'_etag': 1})
etag_doc = current_app.data.driver.db['files'].find_one({'_id': file_id},
{'_etag': 1})
response['_etag'] = etag_doc['_etag']
@ -413,7 +422,8 @@ def before_deleting_file(item):
def on_pre_get_files(_, lookup):
# Override the HTTP header, we always want to fetch the document from MongoDB.
# Override the HTTP header, we always want to fetch the document from
# MongoDB.
parsed_req = eve.utils.parse_request('files')
parsed_req.if_modified_since = None
@ -430,7 +440,8 @@ def on_pre_get_files(_, lookup):
def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
if chunk_size:
log.info('Refreshing the first %i links for project %s', chunk_size, project_uuid)
log.info('Refreshing the first %i links for project %s',
chunk_size, project_uuid)
else:
log.info('Refreshing all links for project %s', project_uuid)
@ -470,9 +481,11 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
to_refresh = files_collection.find(
{'$or': [{'backend': backend_name, 'link_expires': None},
{'backend': backend_name, 'link_expires': {'$lt': expire_before}},
{'backend': backend_name, 'link_expires': {
'$lt': expire_before}},
{'backend': backend_name, 'link': None}]
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size).batch_size(5)
}).sort([('link_expires', pymongo.ASCENDING)]).limit(
chunk_size).batch_size(5)
if to_refresh.count() == 0:
log.info('No links to refresh.')
@ -493,11 +506,13 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
]})
if count == 0:
log.debug('Skipping file %s, project %s does not exist.', file_id, project_id)
log.debug('Skipping file %s, project %s does not exist.',
file_id, project_id)
continue
if 'file_path' not in file_doc:
log.warning("Skipping file %s, missing 'file_path' property.", file_id)
log.warning("Skipping file %s, missing 'file_path' property.",
file_id)
continue
log.debug('Refreshing links for file %s', file_id)
@ -505,21 +520,21 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
try:
_generate_all_links(file_doc, now)
except gcloud.exceptions.Forbidden:
log.warning('Skipping file %s, GCS forbids us access to project %s bucket.',
file_id, project_id)
log.warning('Skipping file %s, GCS forbids us access to '
'project %s bucket.', file_id, project_id)
continue
refreshed += 1
except KeyboardInterrupt:
log.warning('Aborting due to KeyboardInterrupt after refreshing %i links',
refreshed)
log.warning('Aborting due to KeyboardInterrupt after refreshing %i '
'links', refreshed)
return
log.info('Refreshed %i links', refreshed)
@require_login()
def create_file_doc(name, filename, content_type, length, project, backend='gcs',
**extra_fields):
def create_file_doc(name, filename, content_type, length, project,
backend='gcs', **extra_fields):
"""Creates a minimal File document for storage in MongoDB.
Doesn't save it to MongoDB yet.
@ -550,12 +565,6 @@ def override_content_type(uploaded_file):
# Possibly use the browser-provided mime type
mimetype = uploaded_file.mimetype
try:
mimetype = OVERRIDE_MIMETYPES[mimetype]
except KeyError:
pass
if '/' in mimetype:
mimecat = mimetype.split('/')[0]
if mimecat in {'video', 'audio', 'image'}:
@ -571,7 +580,8 @@ def override_content_type(uploaded_file):
# content_type property can't be set directly
uploaded_file.headers['content-type'] = mimetype
# It has this, because we used uploaded_file.mimetype earlier this function.
# It has this, because we used uploaded_file.mimetype earlier this
# function.
del uploaded_file._parsed_content_type
@ -590,10 +600,13 @@ def assert_file_size_allowed(file_size):
return
filesize_limit_mb = filesize_limit / 2.0 ** 20
log.info('User %s tried to upload a %.3f MiB file, but is only allowed %.3f MiB.',
authentication.current_user_id(), file_size / 2.0 ** 20, filesize_limit_mb)
log.info('User %s tried to upload a %.3f MiB file, but is only allowed '
'%.3f MiB.',
authentication.current_user_id(), file_size / 2.0 ** 20,
filesize_limit_mb)
raise wz_exceptions.RequestEntityTooLarge(
'To upload files larger than %i MiB, subscribe to Blender Cloud' % filesize_limit_mb)
'To upload files larger than %i MiB, subscribe to Blender Cloud' %
filesize_limit_mb)
@file_storage.route('/stream/<string:project_id>', methods=['POST', 'OPTIONS'])
@ -613,10 +626,10 @@ def stream_to_gcs(project_id):
uploaded_file = request.files['file']
# Not every upload has a Content-Length header. If it was passed, we might as
# well check for its value before we require the user to upload the entire file.
# (At least I hope that this part of the code is processed before the body is
# read in its entirety)
# Not every upload has a Content-Length header. If it was passed, we might
# as well check for its value before we require the user to upload the
# entire file. (At least I hope that this part of the code is processed
# before the body is read in its entirety)
if uploaded_file.content_length:
assert_file_size_allowed(uploaded_file.content_length)
@ -756,10 +769,10 @@ def create_file_doc_for_upload(project_id, uploaded_file):
if file_doc is None:
# Create a file document on MongoDB for this file.
file_doc = create_file_doc(name=internal_filename, **new_props)
file_fields, _, _, status = post_internal('files', file_doc)
file_fields, _, _, status = current_app.post_internal('files', file_doc)
else:
file_doc.update(new_props)
file_fields, _, _, status = put_internal('files', remove_private_keys(file_doc))
file_fields, _, _, status = current_app.put_internal('files', remove_private_keys(file_doc))
if status not in (200, 201):
log.error('Unable to create new file document in MongoDB, status=%i: %s',
@ -799,4 +812,4 @@ def setup_app(app, url_prefix):
app.on_replace_files += compute_aggregate_length
app.on_insert_files += compute_aggregate_length_items
app.register_blueprint(file_storage, url_prefix=url_prefix)
app.register_api_blueprint(file_storage, url_prefix=url_prefix)

View File

@ -3,12 +3,13 @@ import itertools
import pymongo
from flask import Blueprint, current_app
from application.utils import jsonify
from pillar.api.utils import jsonify
blueprint = Blueprint('latest', __name__)
def keep_fetching(collection, db_filter, projection, sort, py_filter, batch_size=12):
def keep_fetching(collection, db_filter, projection, sort, py_filter,
batch_size=12):
"""Yields results for which py_filter returns True"""
projection['_deleted'] = 1
@ -47,7 +48,7 @@ def has_public_project(node_doc):
return is_project_public(project_id)
# TODO: cache result, at least for a limited amt. of time, or for this HTTP request.
# TODO: cache result, for a limited amt. of time, or for this HTTP request.
def is_project_public(project_id):
"""Returns True iff the project is public."""
@ -60,7 +61,8 @@ def is_project_public(project_id):
@blueprint.route('/assets')
def latest_assets():
latest = latest_nodes({'node_type': 'asset', 'properties.status': 'published'},
latest = latest_nodes({'node_type': 'asset',
'properties.status': 'published'},
{'name': 1, 'project': 1, 'user': 1, 'node_type': 1,
'parent': 1, 'picture': 1, 'properties.status': 1,
'properties.content_type': 1,
@ -78,8 +80,8 @@ def embed_user(latest):
for comment in latest:
user_id = comment['user']
comment['user'] = users.find_one(user_id, {'auth': 0, 'groups': 0, 'roles': 0,
'settings': 0, 'email': 0,
comment['user'] = users.find_one(user_id, {
'auth': 0, 'groups': 0, 'roles': 0, 'settings': 0, 'email': 0,
'_created': 0, '_updated': 0, '_etag': 0})
@ -88,14 +90,17 @@ def embed_project(latest):
for comment in latest:
project_id = comment['project']
comment['project'] = projects.find_one(project_id, {'_id': 1, 'name': 1, 'url': 1})
comment['project'] = projects.find_one(project_id, {'_id': 1, 'name': 1,
'url': 1})
@blueprint.route('/comments')
def latest_comments():
latest = latest_nodes({'node_type': 'comment', 'properties.status': 'published'},
latest = latest_nodes({'node_type': 'comment',
'properties.status': 'published'},
{'project': 1, 'parent': 1, 'user': 1,
'properties.content': 1, 'node_type': 1, 'properties.status': 1,
'properties.content': 1, 'node_type': 1,
'properties.status': 1,
'properties.is_reply': 1},
has_public_project, 6)
@ -120,4 +125,4 @@ def latest_comments():
def setup_app(app, url_prefix):
app.register_blueprint(blueprint, url_prefix=url_prefix)
app.register_api_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -1,17 +1,15 @@
import base64
import datetime
import hashlib
import logging
import rsa.randnum
import bcrypt
import datetime
import rsa.randnum
from bson import tz_util
from eve.methods.post import post_internal
from flask import abort, Blueprint, current_app, jsonify, request
from application.utils.authentication import store_token
from application.utils.authentication import create_new_user_document
from application.utils.authentication import make_unique_username
from pillar.api.utils.authentication import create_new_user_document
from pillar.api.utils.authentication import make_unique_username
from pillar.api.utils.authentication import store_token
blueprint = Blueprint('authentication', __name__)
log = logging.getLogger(__name__)
@ -31,7 +29,7 @@ def create_local_user(email, password):
# Make username unique
db_user['username'] = make_unique_username(email)
# Create the user
r, _, _, status = post_internal('users', db_user)
r, _, _, status = current_app.post_internal('users', db_user)
if status != 201:
log.error('internal response: %r %r', status, r)
return abort(500)
@ -96,4 +94,4 @@ def hash_password(password, salt):
def setup_app(app, url_prefix):
app.register_blueprint(blueprint, url_prefix=url_prefix)
app.register_api_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -1,4 +1,4 @@
from manage_extra.node_types import _file_embedded_schema
from pillar.api.node_types import _file_embedded_schema
node_type_asset = {
'name': 'asset',

View File

@ -1,4 +1,4 @@
from manage_extra.node_types import _file_embedded_schema
from pillar.api.node_types import _file_embedded_schema
node_type_hdri = {
# When adding this node type, make sure to enable CORS from * on the GCS

View File

@ -1,4 +1,4 @@
from manage_extra.node_types import _file_embedded_schema
from pillar.api.node_types import _file_embedded_schema
node_type_page = {
'name': 'page',

View File

@ -1,4 +1,4 @@
from manage_extra.node_types import _file_embedded_schema
from pillar.api.node_types import _file_embedded_schema
node_type_post = {
'name': 'post',

View File

@ -1,4 +1,4 @@
from manage_extra.node_types import _file_embedded_schema
from pillar.api.node_types import _file_embedded_schema
node_type_project = {
'name': 'project',

View File

@ -1,4 +1,4 @@
from manage_extra.node_types import _file_embedded_schema
from pillar.api.node_types import _file_embedded_schema
node_type_texture = {
'name': 'texture',

View File

@ -4,20 +4,19 @@ import urlparse
import pymongo.errors
import rsa.randnum
import werkzeug.exceptions as wz_exceptions
from bson import ObjectId
from flask import current_app, g, Blueprint, request
import werkzeug.exceptions as wz_exceptions
from application.modules import file_storage
from application.utils import str2id, jsonify
from application.utils.authorization import check_permissions, require_login
from application.utils.gcs import update_file_name
from application.utils.activities import activity_subscribe, activity_object_add
from application.utils.algolia import algolia_index_node_delete
from application.utils.algolia import algolia_index_node_save
from pillar.api import file_storage
from pillar.api.activities import activity_subscribe, activity_object_add
from pillar.api.utils.algolia import algolia_index_node_delete
from pillar.api.utils.algolia import algolia_index_node_save
from pillar.api.utils import str2id, jsonify
from pillar.api.utils.authorization import check_permissions, require_login
from pillar.api.utils.gcs import update_file_name
log = logging.getLogger(__name__)
blueprint = Blueprint('nodes', __name__)
blueprint = Blueprint('nodes_api', __name__)
ROLES_FOR_SHARING = {u'subscriber', u'demo'}
@ -415,4 +414,4 @@ def setup_app(app, url_prefix):
app.on_deleted_item_nodes += after_deleting_node
app.register_blueprint(blueprint, url_prefix=url_prefix)
app.register_api_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -1,10 +1,10 @@
"""PATCH support for comment nodes."""
import logging
from flask import current_app
import werkzeug.exceptions as wz_exceptions
from flask import current_app
from pillar.api.utils import authorization, authentication, jsonify
from application.utils import authorization, authentication, jsonify
from . import register_patch_handler
log = logging.getLogger(__name__)

View File

@ -5,11 +5,11 @@ Depends on node_type-specific patch handlers in submodules.
import logging
from flask import Blueprint, request
import werkzeug.exceptions as wz_exceptions
from application.utils import str2id
from application.utils import authorization, mongo, authentication
from flask import Blueprint, request
from pillar.api.utils import mongo
from pillar.api.utils import authorization, authentication
from pillar.api.utils import str2id
from . import custom
@ -48,4 +48,4 @@ def patch_node(node_id):
def setup_app(app, url_prefix):
app.register_blueprint(blueprint, url_prefix=url_prefix)
app.register_api_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -0,0 +1,22 @@
from . import hooks
from .routes import blueprint_api
def setup_app(app, api_prefix):
app.on_replace_projects += hooks.override_is_private_field
app.on_replace_projects += hooks.before_edit_check_permissions
app.on_replace_projects += hooks.protect_sensitive_fields
app.on_update_projects += hooks.override_is_private_field
app.on_update_projects += hooks.before_edit_check_permissions
app.on_update_projects += hooks.protect_sensitive_fields
app.on_delete_item_projects += hooks.before_delete_project
app.on_insert_projects += hooks.before_inserting_override_is_private_field
app.on_insert_projects += hooks.before_inserting_projects
app.on_inserted_projects += hooks.after_inserting_projects
app.on_fetched_item_projects += hooks.before_returning_project_permissions
app.on_fetched_resource_projects += hooks.before_returning_project_resource_permissions
app.on_fetched_item_projects += hooks.project_node_type_has_method
app.on_fetched_resource_projects += hooks.projects_node_type_has_method
app.register_api_blueprint(blueprint_api, url_prefix=api_prefix)

View File

@ -0,0 +1,246 @@
import copy
import logging
from flask import request, abort, current_app
from gcloud import exceptions as gcs_exceptions
from pillar.api.node_types.asset import node_type_asset
from pillar.api.node_types.comment import node_type_comment
from pillar.api.node_types.group import node_type_group
from pillar.api.node_types.group_texture import node_type_group_texture
from pillar.api.node_types.texture import node_type_texture
from pillar.api.utils.gcs import GoogleCloudStorageBucket
from pillar.api.utils import authorization, authentication
from pillar.api.utils import remove_private_keys
from pillar.api.utils.authorization import user_has_role, check_permissions
from .utils import abort_with_error
log = logging.getLogger(__name__)
# Default project permissions for the admin group.
DEFAULT_ADMIN_GROUP_PERMISSIONS = ['GET', 'PUT', 'POST', 'DELETE']
def before_inserting_projects(items):
"""Strip unwanted properties, that will be assigned after creation. Also,
verify permission to create a project (check quota, check role).
:param items: List of project docs that have been inserted (normally one)
"""
# Allow admin users to do whatever they want.
if user_has_role(u'admin'):
return
for item in items:
item.pop('url', None)
def override_is_private_field(project, original):
"""Override the 'is_private' property from the world permissions.
:param project: the project, which will be updated
"""
# No permissions, no access.
if 'permissions' not in project:
project['is_private'] = True
return
world_perms = project['permissions'].get('world', [])
is_private = 'GET' not in world_perms
project['is_private'] = is_private
def before_inserting_override_is_private_field(projects):
for project in projects:
override_is_private_field(project, None)
def before_edit_check_permissions(document, original):
# Allow admin users to do whatever they want.
# TODO: possibly move this into the check_permissions function.
if user_has_role(u'admin'):
return
check_permissions('projects', original, request.method)
def before_delete_project(document):
"""Checks permissions before we allow deletion"""
# Allow admin users to do whatever they want.
# TODO: possibly move this into the check_permissions function.
if user_has_role(u'admin'):
return
check_permissions('projects', document, request.method)
def protect_sensitive_fields(document, original):
"""When not logged in as admin, prevents update to certain fields."""
# Allow admin users to do whatever they want.
if user_has_role(u'admin'):
return
def revert(name):
if name not in original:
try:
del document[name]
except KeyError:
pass
return
document[name] = original[name]
revert('status')
revert('category')
revert('user')
if 'url' in original:
revert('url')
def after_inserting_projects(projects):
"""After inserting a project in the collection we do some processing such as:
- apply the right permissions
- define basic node types
- optionally generate a url
- initialize storage space
:param projects: List of project docs that have been inserted (normally one)
"""
users_collection = current_app.data.driver.db['users']
for project in projects:
owner_id = project.get('user', None)
owner = users_collection.find_one(owner_id)
after_inserting_project(project, owner)
def after_inserting_project(project, db_user):
project_id = project['_id']
user_id = db_user['_id']
# Create a project-specific admin group (with name matching the project id)
result, _, _, status = current_app.post_internal('groups', {'name': str(project_id)})
if status != 201:
log.error('Unable to create admin group for new project %s: %s',
project_id, result)
return abort_with_error(status)
admin_group_id = result['_id']
log.debug('Created admin group %s for project %s', admin_group_id, project_id)
# Assign the current user to the group
db_user.setdefault('groups', []).append(admin_group_id)
result, _, _, status = current_app.patch_internal('users', {'groups': db_user['groups']},
_id=user_id)
if status != 200:
log.error('Unable to add user %s as member of admin group %s for new project %s: %s',
user_id, admin_group_id, project_id, result)
return abort_with_error(status)
log.debug('Made user %s member of group %s', user_id, admin_group_id)
# Assign the group to the project with admin rights
is_admin = authorization.is_admin(db_user)
world_permissions = ['GET'] if is_admin else []
permissions = {
'world': world_permissions,
'users': [],
'groups': [
{'group': admin_group_id,
'methods': DEFAULT_ADMIN_GROUP_PERMISSIONS[:]},
]
}
def with_permissions(node_type):
copied = copy.deepcopy(node_type)
copied['permissions'] = permissions
return copied
# Assign permissions to the project itself, as well as to the node_types
project['permissions'] = permissions
project['node_types'] = [
with_permissions(node_type_group),
with_permissions(node_type_asset),
with_permissions(node_type_comment),
with_permissions(node_type_texture),
with_permissions(node_type_group_texture),
]
# Allow admin users to use whatever url they want.
if not is_admin or not project.get('url'):
if project.get('category', '') == 'home':
project['url'] = 'home'
else:
project['url'] = "p-{!s}".format(project_id)
# Initialize storage page (defaults to GCS)
if current_app.config.get('TESTING'):
log.warning('Not creating Google Cloud Storage bucket while running unit tests!')
else:
try:
gcs_storage = GoogleCloudStorageBucket(str(project_id))
if gcs_storage.bucket.exists():
log.info('Created GCS instance for project %s', project_id)
else:
log.warning('Unable to create GCS instance for project %s', project_id)
except gcs_exceptions.Forbidden as ex:
log.warning('GCS forbids me to create CGS instance for project %s: %s', project_id, ex)
# Commit the changes directly to the MongoDB; a PUT is not allowed yet,
# as the project doesn't have a valid permission structure.
projects_collection = current_app.data.driver.db['projects']
result = projects_collection.update_one({'_id': project_id},
{'$set': remove_private_keys(project)})
if result.matched_count != 1:
log.warning('Unable to update project %s: %s', project_id, result.raw_result)
abort_with_error(500)
def before_returning_project_permissions(response):
# Run validation process, since GET on nodes entry point is public
check_permissions('projects', response, 'GET', append_allowed_methods=True)
def before_returning_project_resource_permissions(response):
# Return only those projects the user has access to.
allow = []
for project in response['_items']:
if authorization.has_permissions('projects', project,
'GET', append_allowed_methods=True):
allow.append(project)
else:
log.debug('User %s requested project %s, but has no access to it; filtered out.',
authentication.current_user_id(), project['_id'])
response['_items'] = allow
def project_node_type_has_method(response):
"""Check for a specific request arg, and check generate the allowed_methods
list for the required node_type.
"""
node_type_name = request.args.get('node_type', '')
# Proceed only node_type has been requested
if not node_type_name:
return
# Look up the node type in the project document
if not any(node_type.get('name') == node_type_name
for node_type in response['node_types']):
return abort(404)
# Check permissions and append the allowed_methods to the node_type
check_permissions('projects', response, 'GET', append_allowed_methods=True,
check_node_type=node_type_name)
def projects_node_type_has_method(response):
for project in response['_items']:
project_node_type_has_method(project)

View File

@ -0,0 +1,138 @@
import json
import logging
from bson import ObjectId
from flask import Blueprint, g, request, current_app, make_response, url_for
from pillar.api.utils import authorization, jsonify, str2id
from pillar.api.utils import mongo
from pillar.api.utils.authorization import require_login, check_permissions
from werkzeug import exceptions as wz_exceptions
from . import utils
log = logging.getLogger(__name__)
blueprint_api = Blueprint('projects_api', __name__)
@blueprint_api.route('/create', methods=['POST'])
@authorization.require_login(require_roles={u'admin', u'subscriber', u'demo'})
def create_project(overrides=None):
"""Creates a new project."""
if request.mimetype == 'application/json':
project_name = request.json['name']
else:
project_name = request.form['project_name']
user_id = g.current_user['user_id']
project = utils.create_new_project(project_name, user_id, overrides)
# Return the project in the response.
loc = url_for('projects|item_lookup', _id=project['_id'])
return jsonify(project, status=201, headers={'Location': loc})
@blueprint_api.route('/users', methods=['GET', 'POST'])
@authorization.require_login()
def project_manage_users():
"""Manage users of a project. In this initial implementation, we handle
addition and removal of a user to the admin group of a project.
No changes are done on the project itself.
"""
projects_collection = current_app.data.driver.db['projects']
users_collection = current_app.data.driver.db['users']
# TODO: check if user is admin of the project before anything
if request.method == 'GET':
project_id = request.args['project_id']
project = projects_collection.find_one({'_id': ObjectId(project_id)})
admin_group_id = project['permissions']['groups'][0]['group']
users = users_collection.find(
{'groups': {'$in': [admin_group_id]}},
{'username': 1, 'email': 1, 'full_name': 1})
return jsonify({'_status': 'OK', '_items': list(users)})
# The request is not a form, since it comes from the API sdk
data = json.loads(request.data)
project_id = ObjectId(data['project_id'])
target_user_id = ObjectId(data['user_id'])
action = data['action']
current_user_id = g.current_user['user_id']
project = projects_collection.find_one({'_id': project_id})
# Check if the current_user is owner of the project, or removing themselves.
remove_self = target_user_id == current_user_id and action == 'remove'
if project['user'] != current_user_id and not remove_self:
utils.abort_with_error(403)
admin_group = utils.get_admin_group(project)
# Get the user and add the admin group to it
if action == 'add':
operation = '$addToSet'
log.info('project_manage_users: Adding user %s to admin group of project %s',
target_user_id, project_id)
elif action == 'remove':
log.info('project_manage_users: Removing user %s from admin group of project %s',
target_user_id, project_id)
operation = '$pull'
else:
log.warning('project_manage_users: Unsupported action %r called by user %s',
action, current_user_id)
raise wz_exceptions.UnprocessableEntity()
users_collection.update({'_id': target_user_id},
{operation: {'groups': admin_group['_id']}})
user = users_collection.find_one({'_id': target_user_id},
{'username': 1, 'email': 1,
'full_name': 1})
if not user:
return jsonify({'_status': 'ERROR'}), 404
user['_status'] = 'OK'
return jsonify(user)
@blueprint_api.route('/<string:project_id>/quotas')
@require_login()
def project_quotas(project_id):
"""Returns information about the project's limits."""
# Check that the user has GET permissions on the project itself.
project = mongo.find_one_or_404('projects', project_id)
check_permissions('projects', project, 'GET')
file_size_used = utils.project_total_file_size(project_id)
info = {
'file_size_quota': None, # TODO: implement this later.
'file_size_used': file_size_used,
}
return jsonify(info)
@blueprint_api.route('/<project_id>/<node_type>', methods=['OPTIONS', 'GET'])
def get_allowed_methods(project_id=None, node_type=None):
"""Returns allowed methods to create a node of a certain type.
Either project_id or parent_node_id must be given. If the latter is given,
the former is deducted from it.
"""
project = mongo.find_one_or_404('projects', str2id(project_id))
proj_methods = authorization.compute_allowed_methods('projects', project, node_type)
resp = make_response()
resp.headers['Allowed'] = ', '.join(sorted(proj_methods))
resp.status_code = 204
return resp

View File

@ -0,0 +1,92 @@
import logging
from bson import ObjectId
from flask import current_app
from werkzeug import exceptions as wz_exceptions
from werkzeug.exceptions import abort
log = logging.getLogger(__name__)
def project_total_file_size(project_id):
"""Returns the total number of bytes used by files of this project."""
files = current_app.data.driver.db['files']
file_size_used = files.aggregate([
{'$match': {'project': ObjectId(project_id)}},
{'$project': {'length_aggregate_in_bytes': 1}},
{'$group': {'_id': None,
'all_files': {'$sum': '$length_aggregate_in_bytes'}}}
])
# The aggregate function returns a cursor, not a document.
try:
return next(file_size_used)['all_files']
except StopIteration:
# No files used at all.
return 0
def get_admin_group(project):
"""Returns the admin group for the project."""
groups_collection = current_app.data.driver.db['groups']
# TODO: search through all groups to find the one with the project ID as its name.
admin_group_id = ObjectId(project['permissions']['groups'][0]['group'])
group = groups_collection.find_one({'_id': admin_group_id})
if group is None:
raise ValueError('Unable to handle project without admin group.')
if group['name'] != str(project['_id']):
return abort_with_error(403)
return group
def abort_with_error(status):
"""Aborts with the given status, or 500 if the status doesn't indicate an error.
If the status is < 400, status 500 is used instead.
"""
abort(status if status // 100 >= 4 else 500)
raise wz_exceptions.InternalServerError('abort() should have aborted!')
def create_new_project(project_name, user_id, overrides):
"""Creates a new project owned by the given user."""
log.info('Creating new project "%s" for user %s', project_name, user_id)
# Create the project itself, the rest will be done by the after-insert hook.
project = {'description': '',
'name': project_name,
'node_types': [],
'status': 'published',
'user': user_id,
'is_private': True,
'permissions': {},
'url': '',
'summary': '',
'category': 'assets', # TODO: allow the user to choose this.
}
if overrides is not None:
project.update(overrides)
result, _, _, status = current_app.post_internal('projects', project)
if status != 201:
log.error('Unable to create project "%s": %s', project_name, result)
return abort_with_error(status)
project.update(result)
# Now re-fetch the project, as both the initial document and the returned
# result do not contain the same etag as the database. This also updates
# other fields set by hooks.
document = current_app.data.driver.db['projects'].find_one(project['_id'])
project.update(document)
log.info('Created project %s for user %s', project['_id'], user_id)
return project

View File

@ -3,12 +3,12 @@
import logging
import blinker
from flask import Blueprint, current_app, g, request
from flask import Blueprint, current_app, request
from pillar.api import local_auth
from pillar.api.utils import mongo
from pillar.api.utils import authorization, authentication, str2id, jsonify
from werkzeug import exceptions as wz_exceptions
from application.utils import authorization, authentication, str2id, mongo, jsonify
from application.modules import local_auth
blueprint = Blueprint('service', __name__)
log = logging.getLogger(__name__)
signal_user_changed_role = blinker.NamedSignal('badger:user_changed_role')
@ -172,7 +172,6 @@ def create_service_account(email, roles, service):
:type service: dict
:return: tuple (user doc, token doc)
"""
from eve.methods.post import post_internal
# Create a user with the correct roles.
roles = list(set(roles).union({u'service'}))
@ -184,7 +183,7 @@ def create_service_account(email, roles, service):
'full_name': email,
'email': email,
'service': service}
result, _, _, status = post_internal('users', user)
result, _, _, status = current_app.post_internal('users', user)
if status != 201:
raise SystemExit('Error creating user {}: {}'.format(email, result))
user.update(result)
@ -195,5 +194,5 @@ def create_service_account(email, roles, service):
return user, token
def setup_app(app, url_prefix):
app.register_blueprint(blueprint, url_prefix=url_prefix)
def setup_app(app, api_prefix):
app.register_api_blueprint(blueprint, url_prefix=api_prefix)

View File

@ -0,0 +1,15 @@
from . import hooks
from .routes import blueprint_api
def setup_app(app, api_prefix):
app.on_pre_GET_users += hooks.check_user_access
app.on_post_GET_users += hooks.post_GET_user
app.on_pre_PUT_users += hooks.check_put_access
app.on_pre_PUT_users += hooks.before_replacing_user
app.on_replaced_users += hooks.push_updated_user_to_algolia
app.on_replaced_users += hooks.send_blinker_signal_roles_changed
app.on_fetched_item_users += hooks.after_fetching_user
app.on_fetched_resource_users += hooks.after_fetching_user_resource
app.register_api_blueprint(blueprint_api, url_prefix=api_prefix)

View File

@ -1,45 +1,11 @@
import copy
import hashlib
import json
import logging
import urllib
from flask import g, current_app, Blueprint
from werkzeug.exceptions import Forbidden
from eve.utils import parse_request
from eve.methods.get import get
from application.utils.authorization import user_has_role, require_login
from application.utils import jsonify
log = logging.getLogger(__name__)
blueprint = Blueprint('users', __name__)
@blueprint.route('/me')
@require_login()
def my_info():
eve_resp, _, _, status, _ = get('users', {'_id': g.current_user['user_id']})
resp = jsonify(eve_resp['_items'][0], status=status)
return resp
def gravatar(email, size=64):
parameters = {'s': str(size), 'd': 'mm'}
return "https://www.gravatar.com/avatar/" + \
hashlib.md5(str(email)).hexdigest() + \
"?" + urllib.urlencode(parameters)
def post_GET_user(request, payload):
json_data = json.loads(payload.data)
# Check if we are querying the users endpoint (instead of the single user)
if json_data.get('_id') is None:
return
# json_data['computed_permissions'] = \
# compute_permissions(json_data['_id'], app.data.driver)
payload.data = json.dumps(json_data)
from flask import current_app, g
from pillar.api.users.routes import log
from pillar.api.utils.authorization import user_has_role
from werkzeug.exceptions import Forbidden
def before_replacing_user(request, lookup):
@ -64,7 +30,7 @@ def push_updated_user_to_algolia(user, original):
"""Push an update to the Algolia index when a user item is updated"""
from algoliasearch.client import AlgoliaException
from application.utils.algolia import algolia_index_user_save
from pillar.api.utils.algolia import algolia_index_user_save
try:
algolia_index_user_save(user)
@ -79,7 +45,7 @@ def send_blinker_signal_roles_changed(user, original):
if user.get('roles') == original.get('roles'):
return
from application.modules.service import signal_user_changed_role
from pillar.api.service import signal_user_changed_role
log.info('User %s changed roles to %s, sending Blinker signal',
user.get('_id'), user.get('roles'))
@ -147,14 +113,11 @@ def after_fetching_user_resource(response):
after_fetching_user(user)
def setup_app(app, url_prefix):
app.on_pre_GET_users += check_user_access
app.on_post_GET_users += post_GET_user
app.on_pre_PUT_users += check_put_access
app.on_pre_PUT_users += before_replacing_user
app.on_replaced_users += push_updated_user_to_algolia
app.on_replaced_users += send_blinker_signal_roles_changed
app.on_fetched_item_users += after_fetching_user
app.on_fetched_resource_users += after_fetching_user_resource
app.register_blueprint(blueprint, url_prefix=url_prefix)
def post_GET_user(request, payload):
json_data = json.loads(payload.data)
# Check if we are querying the users endpoint (instead of the single user)
if json_data.get('_id') is None:
return
# json_data['computed_permissions'] = \
# compute_permissions(json_data['_id'], app.data.driver)
payload.data = json.dumps(json_data)

View File

@ -0,0 +1,19 @@
import logging
from eve.methods.get import get
from flask import g, Blueprint
from pillar.api.utils import jsonify
from pillar.api.utils.authorization import require_login
log = logging.getLogger(__name__)
blueprint_api = Blueprint('users_api', __name__)
@blueprint_api.route('/me')
@require_login()
def my_info():
eve_resp, _, _, status, _ = get('users', {'_id': g.current_user['user_id']})
resp = jsonify(eve_resp['_items'][0], status=status)
return resp

View File

@ -1,5 +1,8 @@
import copy
import hashlib
import json
import urllib
import datetime
import functools
import logging
@ -104,3 +107,10 @@ def str2id(document_id):
except bson.objectid.InvalidId:
log.debug('str2id(%r): Invalid Object ID', document_id)
raise wz_exceptions.BadRequest('Invalid object ID %r' % document_id)
def gravatar(email, size=64):
parameters = {'s': str(size), 'd': 'mm'}
return "https://www.gravatar.com/avatar/" + \
hashlib.md5(str(email)).hexdigest() + \
"?" + urllib.urlencode(parameters)

View File

@ -0,0 +1,98 @@
import logging
from bson import ObjectId
from flask import current_app
from pillar.api.file_storage import generate_link
from . import skip_when_testing
log = logging.getLogger(__name__)
INDEX_ALLOWED_USER_ROLES = {'admin', 'subscriber', 'demo'}
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
@skip_when_testing
def algolia_index_user_save(user):
if current_app.algolia_index_users is None:
return
# Strip unneeded roles
if 'roles' in user:
roles = set(user['roles']).intersection(INDEX_ALLOWED_USER_ROLES)
else:
roles = set()
if current_app.algolia_index_users:
# Create or update Algolia index for the user
current_app.algolia_index_users.save_object({
'objectID': user['_id'],
'full_name': user['full_name'],
'username': user['username'],
'roles': list(roles),
'groups': user['groups'],
'email': user['email']
})
@skip_when_testing
def algolia_index_node_save(node):
if not current_app.algolia_index_nodes:
return
if node['node_type'] not in INDEX_ALLOWED_NODE_TYPES:
return
# If a nodes does not have status published, do not index
if node['properties'].get('status') != 'published':
return
projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': ObjectId(node['project'])})
users_collection = current_app.data.driver.db['users']
user = users_collection.find_one({'_id': ObjectId(node['user'])})
node_ob = {
'objectID': node['_id'],
'name': node['name'],
'project': {
'_id': project['_id'],
'name': project['name']
},
'created': node['_created'],
'updated': node['_updated'],
'node_type': node['node_type'],
'user': {
'_id': user['_id'],
'full_name': user['full_name']
},
}
if 'description' in node and node['description']:
node_ob['description'] = node['description']
if 'picture' in node and node['picture']:
files_collection = current_app.data.driver.db['files']
lookup = {'_id': ObjectId(node['picture'])}
picture = files_collection.find_one(lookup)
if picture['backend'] == 'gcs':
variation_t = next((item for item in picture['variations'] \
if item['size'] == 't'), None)
if variation_t:
node_ob['picture'] = generate_link(picture['backend'],
variation_t['file_path'], project_id=str(picture['project']),
is_public=True)
# If the node has world permissions, compute the Free permission
if 'permissions' in node and 'world' in node['permissions']:
if 'GET' in node['permissions']['world']:
node_ob['is_free'] = True
# Append the media key if the node is of node_type 'asset'
if node['node_type'] == 'asset':
node_ob['media'] = node['properties']['content_type']
# Add tags
if 'tags' in node['properties']:
node_ob['tags'] = node['properties']['tags']
current_app.algolia_index_nodes.save_object(node_ob)
@skip_when_testing
def algolia_index_node_delete(node):
if current_app.algolia_index_nodes is None:
return
current_app.algolia_index_nodes.delete_object(node['_id'])

View File

@ -1,7 +1,7 @@
"""Generic authentication.
Contains functionality to validate tokens, create users and tokens, and make
unique usernames from emails. Calls out to the application.modules.blender_id
unique usernames from emails. Calls out to the pillar_server.modules.blender_id
module for Blender ID communication.
"""
@ -12,7 +12,6 @@ from bson import tz_util
from flask import g
from flask import request
from flask import current_app
from eve.methods.post import post_internal
log = logging.getLogger(__name__)
@ -28,21 +27,39 @@ def validate_token():
@returns True iff the user is logged in with a valid Blender ID token.
"""
# Default to no user at all.
g.current_user = None
if request.authorization:
token = request.authorization.username
oauth_subclient = request.authorization.password
else:
# Check the session, the user might be logged in through Flask-Login.
from pillar import auth
_delete_expired_tokens()
token = auth.get_blender_id_oauth_token()
if token and isinstance(token, (tuple, list)):
token = token[0]
oauth_subclient = None
if not request.authorization:
if not token:
# If no authorization headers are provided, we are getting a request
# from a non logged in user. Proceed accordingly.
log.debug('No authentication headers, so not logged in.')
g.current_user = None
return False
# Check the users to see if there is one with this Blender ID token.
token = request.authorization.username
oauth_subclient = request.authorization.password
return validate_this_token(token, oauth_subclient) is not None
def validate_this_token(token, oauth_subclient=None):
"""Validates a given token, and sets g.current_user.
:returns: the user in MongoDB, or None if not a valid token.
:rtype: dict
"""
g.current_user = None
_delete_expired_tokens()
# Check the users to see if there is one with this Blender ID token.
db_token = find_token(token, oauth_subclient)
if not db_token:
log.debug('Token %s not found in our local database.', token)
@ -51,7 +68,7 @@ def validate_token():
# request to the Blender ID server to verify the validity of the token
# passed via the HTTP header. We will get basic user info if the user
# is authorized, and we will store the token in our local database.
from application.modules import blender_id
from pillar.api import blender_id
db_user, status = blender_id.validate_create_user('', token, oauth_subclient)
else:
@ -61,13 +78,13 @@ def validate_token():
if db_user is None:
log.debug('Validation failed, user not logged in')
return False
return None
g.current_user = {'user_id': db_user['_id'],
'groups': db_user['groups'],
'roles': set(db_user.get('roles', []))}
return True
return db_user
def find_token(token, is_subclient_token=False, **extra_filters):
@ -91,6 +108,8 @@ def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
:returns: the token document from MongoDB
"""
assert isinstance(token, (str, unicode)), 'token must be string type, not %r' % type(token)
token_data = {
'user': user_id,
'token': token,
@ -99,7 +118,7 @@ def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
if oauth_subclient_id:
token_data['is_subclient_token'] = True
r, _, _, status = post_internal('tokens', token_data)
r, _, _, status = current_app.post_internal('tokens', token_data)
if status not in {200, 201}:
log.error('Unable to store authentication token: %s', r)
@ -119,7 +138,7 @@ def create_new_user(email, username, user_id):
"""
user_data = create_new_user_document(email, user_id, username)
r = post_internal('users', user_data)
r = current_app.post_internal('users', user_data)
user_id = r[0]['_id']
return user_id
@ -196,3 +215,10 @@ def current_user_id():
current_user = g.get('current_user') or {}
return current_user.get('user_id')
def setup_app(app):
@app.before_request
def validate_token_at_each_request():
validate_token()
return None

View File

@ -3,8 +3,6 @@ import os
from flask import current_app
from application import encoding_service_client
log = logging.getLogger(__name__)
@ -18,7 +16,7 @@ class Encoder:
"""Create an encoding job. Return the backend used as well as an id.
"""
if current_app.config['ENCODING_BACKEND'] != 'zencoder' or \
encoding_service_client is None:
current_app.encoding_service_client is None:
log.error('I can only work with Zencoder, check the config file.')
return None
@ -35,7 +33,7 @@ class Encoder:
outputs = [{'format': v['format'],
'url': os.path.join(storage_base, v['file_path'])}
for v in src_file['variations']]
r = encoding_service_client.job.create(file_input,
r = current_app.encoding_service_client.job.create(file_input,
outputs=outputs,
options=options)
if r.code != 201:
@ -47,8 +45,10 @@ class Encoder:
@staticmethod
def job_progress(job_id):
if isinstance(encoding_service_client, Zencoder):
r = encoding_service_client.job.progress(int(job_id))
from zencoder import Zencoder
if isinstance(current_app.encoding_service_client, Zencoder):
r = current_app.encoding_service_client.job.progress(int(job_id))
return r.body
else:
return None

View File

@ -16,8 +16,6 @@ def get_client():
"""Stores the GCS client on the global Flask object.
The GCS client is not user-specific anyway.
:rtype: Client
"""
_gcs = getattr(g, '_gcs_client', None)

View File

@ -1,8 +1,8 @@
import os
import subprocess
import os
from flask import current_app
from application.utils.gcs import GoogleCloudStorageBucket
from pillar.api.utils.gcs import GoogleCloudStorageBucket
def get_sizedata(filepath):

View File

@ -1,268 +0,0 @@
import logging.config
import os
import subprocess
import tempfile
from bson import ObjectId
from datetime import datetime
from flask import g
from flask import request
from flask import abort
from eve import Eve
from eve.auth import TokenAuth
from eve.io.mongo import Validator
from application.utils import project_get_node_type
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
class ValidateCustomFields(Validator):
def convert_properties(self, properties, node_schema):
for prop in node_schema:
if not prop in properties:
continue
schema_prop = node_schema[prop]
prop_type = schema_prop['type']
if prop_type == 'dict':
properties[prop] = self.convert_properties(
properties[prop], schema_prop['schema'])
if prop_type == 'list':
if properties[prop] in ['', '[]']:
properties[prop] = []
for k, val in enumerate(properties[prop]):
if not 'schema' in schema_prop:
continue
item_schema = {'item': schema_prop['schema']}
item_prop = {'item': properties[prop][k]}
properties[prop][k] = self.convert_properties(
item_prop, item_schema)['item']
# Convert datetime string to RFC1123 datetime
elif prop_type == 'datetime':
prop_val = properties[prop]
properties[prop] = datetime.strptime(prop_val, RFC1123_DATE_FORMAT)
elif prop_type == 'objectid':
prop_val = properties[prop]
if prop_val:
properties[prop] = ObjectId(prop_val)
else:
properties[prop] = None
return properties
def _validate_valid_properties(self, valid_properties, field, value):
projects_collection = app.data.driver.db['projects']
lookup = {'_id': ObjectId(self.document['project'])}
project = projects_collection.find_one(lookup, {
'node_types.name': 1,
'node_types.dyn_schema': 1,
})
if project is None:
log.warning('Unknown project %s, declared by node %s',
lookup, self.document.get('_id'))
self._error(field, 'Unknown project')
return False
node_type_name = self.document['node_type']
node_type = project_get_node_type(project, node_type_name)
if node_type is None:
log.warning('Project %s has no node type %s, declared by node %s',
project, node_type_name, self.document.get('_id'))
self._error(field, 'Unknown node type')
return False
try:
value = self.convert_properties(value, node_type['dyn_schema'])
except Exception as e:
log.warning("Error converting form properties", exc_info=True)
v = Validator(node_type['dyn_schema'])
val = v.validate(value)
if val:
return True
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
self._error(field, "Error validating properties")
# We specify a settings.py file because when running on wsgi we can't detect it
# automatically. The default path (which works in Docker) can be overridden with
# an env variable.
settings_path = os.environ.get(
'EVE_SETTINGS', '/data/git/pillar/pillar/settings.py')
app = Eve(settings=settings_path, validator=ValidateCustomFields)
# Load configuration from three different sources, to make it easy to override
# settings with secrets, as well as for development & testing.
app_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
app.config.from_pyfile(os.path.join(app_root, 'config.py'), silent=False)
app.config.from_pyfile(os.path.join(app_root, 'config_local.py'), silent=True)
from_envvar = os.environ.get('PILLAR_CONFIG')
if from_envvar:
# Don't use from_envvar, as we want different behaviour. If the envvar
# is not set, it's fine (i.e. silent=True), but if it is set and the
# configfile doesn't exist, it should error out (i.e. silent=False).
app.config.from_pyfile(from_envvar, silent=False)
# Set the TMP environment variable to manage where uploads are stored.
# These are all used by tempfile.mkstemp(), but we don't knwow in whic
# order. As such, we remove all used variables but the one we set.
tempfile.tempdir = app.config['STORAGE_DIR']
os.environ['TMP'] = app.config['STORAGE_DIR']
os.environ.pop('TEMP', None)
os.environ.pop('TMPDIR', None)
# Configure logging
logging.config.dictConfig(app.config['LOGGING'])
log = logging.getLogger(__name__)
if app.config['DEBUG']:
log.info('Pillar starting, debug=%s', app.config['DEBUG'])
# Get the Git hash
try:
git_cmd = ['git', '-C', app_root, 'describe', '--always']
description = subprocess.check_output(git_cmd)
app.config['GIT_REVISION'] = description.strip()
except (subprocess.CalledProcessError, OSError) as ex:
log.warning('Unable to run "git describe" to get git revision: %s', ex)
app.config['GIT_REVISION'] = 'unknown'
log.info('Git revision %r', app.config['GIT_REVISION'])
# Configure Bugsnag
if not app.config.get('TESTING') and app.config.get('BUGSNAG_API_KEY'):
import bugsnag
import bugsnag.flask
import bugsnag.handlers
bugsnag.configure(
api_key=app.config['BUGSNAG_API_KEY'],
project_root="/data/git/pillar/pillar",
)
bugsnag.flask.handle_exceptions(app)
bs_handler = bugsnag.handlers.BugsnagHandler()
bs_handler.setLevel(logging.ERROR)
log.addHandler(bs_handler)
else:
log.info('Bugsnag NOT configured.')
# Google Cloud project
try:
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = \
app.config['GCLOUD_APP_CREDENTIALS']
except KeyError:
raise SystemExit('GCLOUD_APP_CREDENTIALS configuration is missing')
# Storage backend (GCS)
try:
os.environ['GCLOUD_PROJECT'] = app.config['GCLOUD_PROJECT']
except KeyError:
raise SystemExit('GCLOUD_PROJECT configuration value is missing')
# Algolia search
if app.config['SEARCH_BACKEND'] == 'algolia':
from algoliasearch import algoliasearch
client = algoliasearch.Client(
app.config['ALGOLIA_USER'],
app.config['ALGOLIA_API_KEY'])
algolia_index_users = client.init_index(app.config['ALGOLIA_INDEX_USERS'])
algolia_index_nodes = client.init_index(app.config['ALGOLIA_INDEX_NODES'])
else:
algolia_index_users = None
algolia_index_nodes = None
# Encoding backend
if app.config['ENCODING_BACKEND'] == 'zencoder':
from zencoder import Zencoder
encoding_service_client = Zencoder(app.config['ZENCODER_API_KEY'])
else:
encoding_service_client = None
from utils.authentication import validate_token
from utils.authorization import check_permissions
from utils.activities import notification_parse
from modules.projects import before_inserting_projects
from modules.projects import after_inserting_projects
@app.before_request
def validate_token_at_every_request():
validate_token()
def before_returning_item_notifications(response):
if request.args.get('parse'):
notification_parse(response)
def before_returning_resource_notifications(response):
for item in response['_items']:
if request.args.get('parse'):
notification_parse(item)
app.on_fetched_item_notifications += before_returning_item_notifications
app.on_fetched_resource_notifications += before_returning_resource_notifications
@app.before_first_request
def setup_db_indices():
"""Adds missing database indices.
This does NOT drop and recreate existing indices,
nor does it reconfigure existing indices.
If you want that, drop them manually first.
"""
log.debug('Adding missing database indices.')
import pymongo
db = app.data.driver.db
coll = db['tokens']
coll.create_index([('user', pymongo.ASCENDING)])
coll.create_index([('token', pymongo.ASCENDING)])
coll = db['notifications']
coll.create_index([('user', pymongo.ASCENDING)])
coll = db['activities-subscriptions']
coll.create_index([('context_object', pymongo.ASCENDING)])
coll = db['nodes']
# This index is used for queries on project, and for queries on
# the combination (project, node type).
coll.create_index([('project', pymongo.ASCENDING),
('node_type', pymongo.ASCENDING)])
coll.create_index([('parent', pymongo.ASCENDING)])
coll.create_index([('short_code', pymongo.ASCENDING)],
sparse=True, unique=True)
# The encoding module (receive notification and report progress)
from modules.encoding import encoding
from modules.blender_id import blender_id
from modules import projects
from modules import local_auth
from modules import file_storage
from modules import users
from modules import nodes
from modules import latest
from modules import blender_cloud
from modules import service
app.register_blueprint(encoding, url_prefix='/encoding')
app.register_blueprint(blender_id, url_prefix='/blender_id')
projects.setup_app(app, url_prefix='/p')
local_auth.setup_app(app, url_prefix='/auth')
file_storage.setup_app(app, url_prefix='/storage')
latest.setup_app(app, url_prefix='/latest')
blender_cloud.setup_app(app, url_prefix='/bcloud')
users.setup_app(app, url_prefix='/users')
service.setup_app(app, url_prefix='/service')
nodes.setup_app(app, url_prefix='/nodes')

View File

@ -1,472 +0,0 @@
import copy
import logging
import json
from bson import ObjectId
from eve.methods.post import post_internal
from eve.methods.patch import patch_internal
from flask import g, Blueprint, request, abort, current_app, make_response
from gcloud import exceptions as gcs_exceptions
from werkzeug import exceptions as wz_exceptions
from application.utils import remove_private_keys, jsonify, mongo, str2id
from application.utils import authorization, authentication
from application.utils.gcs import GoogleCloudStorageBucket
from application.utils.authorization import user_has_role, check_permissions, require_login
from manage_extra.node_types.asset import node_type_asset
from manage_extra.node_types.comment import node_type_comment
from manage_extra.node_types.group import node_type_group
from manage_extra.node_types.texture import node_type_texture
from manage_extra.node_types.group_texture import node_type_group_texture
log = logging.getLogger(__name__)
blueprint = Blueprint('projects', __name__)
# Default project permissions for the admin group.
DEFAULT_ADMIN_GROUP_PERMISSIONS = ['GET', 'PUT', 'POST', 'DELETE']
def before_inserting_projects(items):
"""Strip unwanted properties, that will be assigned after creation. Also,
verify permission to create a project (check quota, check role).
:param items: List of project docs that have been inserted (normally one)
"""
# Allow admin users to do whatever they want.
if user_has_role(u'admin'):
return
for item in items:
item.pop('url', None)
def override_is_private_field(project, original):
"""Override the 'is_private' property from the world permissions.
:param project: the project, which will be updated
"""
# No permissions, no access.
if 'permissions' not in project:
project['is_private'] = True
return
world_perms = project['permissions'].get('world', [])
is_private = 'GET' not in world_perms
project['is_private'] = is_private
def before_inserting_override_is_private_field(projects):
for project in projects:
override_is_private_field(project, None)
def before_edit_check_permissions(document, original):
# Allow admin users to do whatever they want.
# TODO: possibly move this into the check_permissions function.
if user_has_role(u'admin'):
return
check_permissions('projects', original, request.method)
def before_delete_project(document):
"""Checks permissions before we allow deletion"""
# Allow admin users to do whatever they want.
# TODO: possibly move this into the check_permissions function.
if user_has_role(u'admin'):
return
check_permissions('projects', document, request.method)
def protect_sensitive_fields(document, original):
"""When not logged in as admin, prevents update to certain fields."""
# Allow admin users to do whatever they want.
if user_has_role(u'admin'):
return
def revert(name):
if name not in original:
try:
del document[name]
except KeyError:
pass
return
document[name] = original[name]
revert('status')
revert('category')
revert('user')
if 'url' in original:
revert('url')
def after_inserting_projects(projects):
"""After inserting a project in the collection we do some processing such as:
- apply the right permissions
- define basic node types
- optionally generate a url
- initialize storage space
:param projects: List of project docs that have been inserted (normally one)
"""
users_collection = current_app.data.driver.db['users']
for project in projects:
owner_id = project.get('user', None)
owner = users_collection.find_one(owner_id)
after_inserting_project(project, owner)
def after_inserting_project(project, db_user):
project_id = project['_id']
user_id = db_user['_id']
# Create a project-specific admin group (with name matching the project id)
result, _, _, status = post_internal('groups', {'name': str(project_id)})
if status != 201:
log.error('Unable to create admin group for new project %s: %s',
project_id, result)
return abort_with_error(status)
admin_group_id = result['_id']
log.debug('Created admin group %s for project %s', admin_group_id, project_id)
# Assign the current user to the group
db_user.setdefault('groups', []).append(admin_group_id)
result, _, _, status = patch_internal('users', {'groups': db_user['groups']}, _id=user_id)
if status != 200:
log.error('Unable to add user %s as member of admin group %s for new project %s: %s',
user_id, admin_group_id, project_id, result)
return abort_with_error(status)
log.debug('Made user %s member of group %s', user_id, admin_group_id)
# Assign the group to the project with admin rights
is_admin = authorization.is_admin(db_user)
world_permissions = ['GET'] if is_admin else []
permissions = {
'world': world_permissions,
'users': [],
'groups': [
{'group': admin_group_id,
'methods': DEFAULT_ADMIN_GROUP_PERMISSIONS[:]},
]
}
def with_permissions(node_type):
copied = copy.deepcopy(node_type)
copied['permissions'] = permissions
return copied
# Assign permissions to the project itself, as well as to the node_types
project['permissions'] = permissions
project['node_types'] = [
with_permissions(node_type_group),
with_permissions(node_type_asset),
with_permissions(node_type_comment),
with_permissions(node_type_texture),
with_permissions(node_type_group_texture),
]
# Allow admin users to use whatever url they want.
if not is_admin or not project.get('url'):
if project.get('category', '') == 'home':
project['url'] = 'home'
else:
project['url'] = "p-{!s}".format(project_id)
# Initialize storage page (defaults to GCS)
if current_app.config.get('TESTING'):
log.warning('Not creating Google Cloud Storage bucket while running unit tests!')
else:
try:
gcs_storage = GoogleCloudStorageBucket(str(project_id))
if gcs_storage.bucket.exists():
log.info('Created GCS instance for project %s', project_id)
else:
log.warning('Unable to create GCS instance for project %s', project_id)
except gcs_exceptions.Forbidden as ex:
log.warning('GCS forbids me to create CGS instance for project %s: %s', project_id, ex)
# Commit the changes directly to the MongoDB; a PUT is not allowed yet,
# as the project doesn't have a valid permission structure.
projects_collection = current_app.data.driver.db['projects']
result = projects_collection.update_one({'_id': project_id},
{'$set': remove_private_keys(project)})
if result.matched_count != 1:
log.warning('Unable to update project %s: %s', project_id, result.raw_result)
abort_with_error(500)
def create_new_project(project_name, user_id, overrides):
"""Creates a new project owned by the given user."""
log.info('Creating new project "%s" for user %s', project_name, user_id)
# Create the project itself, the rest will be done by the after-insert hook.
project = {'description': '',
'name': project_name,
'node_types': [],
'status': 'published',
'user': user_id,
'is_private': True,
'permissions': {},
'url': '',
'summary': '',
'category': 'assets', # TODO: allow the user to choose this.
}
if overrides is not None:
project.update(overrides)
result, _, _, status = post_internal('projects', project)
if status != 201:
log.error('Unable to create project "%s": %s', project_name, result)
return abort_with_error(status)
project.update(result)
# Now re-fetch the project, as both the initial document and the returned
# result do not contain the same etag as the database. This also updates
# other fields set by hooks.
document = current_app.data.driver.db['projects'].find_one(project['_id'])
project.update(document)
log.info('Created project %s for user %s', project['_id'], user_id)
return project
@blueprint.route('/create', methods=['POST'])
@authorization.require_login(require_roles={u'admin', u'subscriber', u'demo'})
def create_project(overrides=None):
"""Creates a new project."""
if request.mimetype == 'application/json':
project_name = request.json['name']
else:
project_name = request.form['project_name']
user_id = g.current_user['user_id']
project = create_new_project(project_name, user_id, overrides)
# Return the project in the response.
return jsonify(project, status=201, headers={'Location': '/projects/%s' % project['_id']})
@blueprint.route('/users', methods=['GET', 'POST'])
@authorization.require_login()
def project_manage_users():
"""Manage users of a project. In this initial implementation, we handle
addition and removal of a user to the admin group of a project.
No changes are done on the project itself.
"""
projects_collection = current_app.data.driver.db['projects']
users_collection = current_app.data.driver.db['users']
# TODO: check if user is admin of the project before anything
if request.method == 'GET':
project_id = request.args['project_id']
project = projects_collection.find_one({'_id': ObjectId(project_id)})
admin_group_id = project['permissions']['groups'][0]['group']
users = users_collection.find(
{'groups': {'$in': [admin_group_id]}},
{'username': 1, 'email': 1, 'full_name': 1})
return jsonify({'_status': 'OK', '_items': list(users)})
# The request is not a form, since it comes from the API sdk
data = json.loads(request.data)
project_id = ObjectId(data['project_id'])
target_user_id = ObjectId(data['user_id'])
action = data['action']
current_user_id = g.current_user['user_id']
project = projects_collection.find_one({'_id': project_id})
# Check if the current_user is owner of the project, or removing themselves.
remove_self = target_user_id == current_user_id and action == 'remove'
if project['user'] != current_user_id and not remove_self:
return abort_with_error(403)
admin_group = get_admin_group(project)
# Get the user and add the admin group to it
if action == 'add':
operation = '$addToSet'
log.info('project_manage_users: Adding user %s to admin group of project %s',
target_user_id, project_id)
elif action == 'remove':
log.info('project_manage_users: Removing user %s from admin group of project %s',
target_user_id, project_id)
operation = '$pull'
else:
log.warning('project_manage_users: Unsupported action %r called by user %s',
action, current_user_id)
raise wz_exceptions.UnprocessableEntity()
users_collection.update({'_id': target_user_id},
{operation: {'groups': admin_group['_id']}})
user = users_collection.find_one({'_id': target_user_id},
{'username': 1, 'email': 1,
'full_name': 1})
if not user:
return jsonify({'_status': 'ERROR'}), 404
user['_status'] = 'OK'
return jsonify(user)
def get_admin_group(project):
"""Returns the admin group for the project."""
groups_collection = current_app.data.driver.db['groups']
# TODO: search through all groups to find the one with the project ID as its name.
admin_group_id = ObjectId(project['permissions']['groups'][0]['group'])
group = groups_collection.find_one({'_id': admin_group_id})
if group is None:
raise ValueError('Unable to handle project without admin group.')
if group['name'] != str(project['_id']):
return abort_with_error(403)
return group
def abort_with_error(status):
"""Aborts with the given status, or 500 if the status doesn't indicate an error.
If the status is < 400, status 500 is used instead.
"""
abort(status if status // 100 >= 4 else 500)
@blueprint.route('/<string:project_id>/quotas')
@require_login()
def project_quotas(project_id):
"""Returns information about the project's limits."""
# Check that the user has GET permissions on the project itself.
project = mongo.find_one_or_404('projects', project_id)
check_permissions('projects', project, 'GET')
file_size_used = project_total_file_size(project_id)
info = {
'file_size_quota': None, # TODO: implement this later.
'file_size_used': file_size_used,
}
return jsonify(info)
def project_total_file_size(project_id):
"""Returns the total number of bytes used by files of this project."""
files = current_app.data.driver.db['files']
file_size_used = files.aggregate([
{'$match': {'project': ObjectId(project_id)}},
{'$project': {'length_aggregate_in_bytes': 1}},
{'$group': {'_id': None,
'all_files': {'$sum': '$length_aggregate_in_bytes'}}}
])
# The aggregate function returns a cursor, not a document.
try:
return next(file_size_used)['all_files']
except StopIteration:
# No files used at all.
return 0
def before_returning_project_permissions(response):
# Run validation process, since GET on nodes entry point is public
check_permissions('projects', response, 'GET', append_allowed_methods=True)
def before_returning_project_resource_permissions(response):
# Return only those projects the user has access to.
allow = []
for project in response['_items']:
if authorization.has_permissions('projects', project,
'GET', append_allowed_methods=True):
allow.append(project)
else:
log.debug('User %s requested project %s, but has no access to it; filtered out.',
authentication.current_user_id(), project['_id'])
response['_items'] = allow
def project_node_type_has_method(response):
"""Check for a specific request arg, and check generate the allowed_methods
list for the required node_type.
"""
node_type_name = request.args.get('node_type', '')
# Proceed only node_type has been requested
if not node_type_name:
return
# Look up the node type in the project document
if not any(node_type.get('name') == node_type_name
for node_type in response['node_types']):
return abort(404)
# Check permissions and append the allowed_methods to the node_type
check_permissions('projects', response, 'GET', append_allowed_methods=True,
check_node_type=node_type_name)
def projects_node_type_has_method(response):
for project in response['_items']:
project_node_type_has_method(project)
@blueprint.route('/<project_id>/<node_type>', methods=['OPTIONS', 'GET'])
def get_allowed_methods(project_id=None, node_type=None):
"""Returns allowed methods to create a node of a certain type.
Either project_id or parent_node_id must be given. If the latter is given,
the former is deducted from it.
"""
project = mongo.find_one_or_404('projects', str2id(project_id))
proj_methods = authorization.compute_allowed_methods('projects', project, node_type)
resp = make_response()
resp.headers['Allowed'] = ', '.join(sorted(proj_methods))
resp.status_code = 204
return resp
def setup_app(app, url_prefix):
app.on_replace_projects += override_is_private_field
app.on_replace_projects += before_edit_check_permissions
app.on_replace_projects += protect_sensitive_fields
app.on_update_projects += override_is_private_field
app.on_update_projects += before_edit_check_permissions
app.on_update_projects += protect_sensitive_fields
app.on_delete_item_projects += before_delete_project
app.on_insert_projects += before_inserting_override_is_private_field
app.on_insert_projects += before_inserting_projects
app.on_inserted_projects += after_inserting_projects
app.on_fetched_item_projects += before_returning_project_permissions
app.on_fetched_resource_projects += before_returning_project_resource_permissions
app.on_fetched_item_projects += project_node_type_has_method
app.on_fetched_resource_projects += projects_node_type_has_method
app.register_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -1,3 +0,0 @@
# Ignore everything but self
*
!.gitignore

View File

@ -1,98 +0,0 @@
import logging
from bson import ObjectId
from flask import current_app
from application import algolia_index_users
from application import algolia_index_nodes
from application.modules.file_storage import generate_link
from . import skip_when_testing
log = logging.getLogger(__name__)
INDEX_ALLOWED_USER_ROLES = {'admin', 'subscriber', 'demo'}
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
@skip_when_testing
def algolia_index_user_save(user):
if algolia_index_users is None:
return
# Strip unneeded roles
if 'roles' in user:
roles = set(user['roles']).intersection(INDEX_ALLOWED_USER_ROLES)
else:
roles = set()
if algolia_index_users:
# Create or update Algolia index for the user
algolia_index_users.save_object({
'objectID': user['_id'],
'full_name': user['full_name'],
'username': user['username'],
'roles': list(roles),
'groups': user['groups'],
'email': user['email']
})
@skip_when_testing
def algolia_index_node_save(node):
if node['node_type'] in INDEX_ALLOWED_NODE_TYPES and algolia_index_nodes:
# If a nodes does not have status published, do not index
if 'status' in node['properties'] \
and node['properties']['status'] != 'published':
return
projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': ObjectId(node['project'])})
users_collection = current_app.data.driver.db['users']
user = users_collection.find_one({'_id': ObjectId(node['user'])})
node_ob = {
'objectID': node['_id'],
'name': node['name'],
'project': {
'_id': project['_id'],
'name': project['name']
},
'created': node['_created'],
'updated': node['_updated'],
'node_type': node['node_type'],
'user': {
'_id': user['_id'],
'full_name': user['full_name']
},
}
if 'description' in node and node['description']:
node_ob['description'] = node['description']
if 'picture' in node and node['picture']:
files_collection = current_app.data.driver.db['files']
lookup = {'_id': ObjectId(node['picture'])}
picture = files_collection.find_one(lookup)
if picture['backend'] == 'gcs':
variation_t = next((item for item in picture['variations'] \
if item['size'] == 't'), None)
if variation_t:
node_ob['picture'] = generate_link(picture['backend'],
variation_t['file_path'], project_id=str(picture['project']),
is_public=True)
# If the node has world permissions, compute the Free permission
if 'permissions' in node and 'world' in node['permissions']:
if 'GET' in node['permissions']['world']:
node_ob['is_free'] = True
# Append the media key if the node is of node_type 'asset'
if node['node_type'] == 'asset':
node_ob['media'] = node['properties']['content_type']
# Add tags
if 'tags' in node['properties']:
node_ob['tags'] = node['properties']['tags']
algolia_index_nodes.save_object(node_ob)
@skip_when_testing
def algolia_index_node_delete(node):
if algolia_index_nodes is None:
return
algolia_index_nodes.delete_object(node['_id'])

104
pillar/auth/__init__.py Normal file
View File

@ -0,0 +1,104 @@
"""Authentication code common to the web and api modules."""
import logging
from flask import current_app, session
import flask_login
import flask_oauthlib.client
from ..api import utils, blender_id
from ..api.utils import authentication
log = logging.getLogger(__name__)
class UserClass(flask_login.UserMixin):
def __init__(self, token):
# We store the Token instead of ID
self.id = token
self.username = None
self.full_name = None
self.objectid = None
self.gravatar = None
self.email = None
self.roles = []
def has_role(self, *roles):
"""Returns True iff the user has one or more of the given roles."""
if not self.roles:
return False
return bool(set(self.roles).intersection(set(roles)))
class AnonymousUser(flask_login.AnonymousUserMixin):
def has_role(self, *roles):
return False
def _load_user(token):
"""Loads a user by their token.
:returns: returns a UserClass instance if logged in, or an AnonymousUser() if not.
:rtype: UserClass
"""
db_user = authentication.validate_this_token(token)
if not db_user:
return AnonymousUser()
login_user = UserClass(token)
login_user.email = db_user['email']
login_user.objectid = unicode(db_user['_id'])
login_user.username = db_user['username']
login_user.gravatar = utils.gravatar(db_user['email'])
login_user.roles = db_user.get('roles', [])
login_user.groups = [unicode(g) for g in db_user['groups'] or ()]
login_user.full_name = db_user.get('full_name', '')
return login_user
def config_login_manager(app):
"""Configures the Flask-Login manager, used for the web endpoints."""
login_manager = flask_login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = "users.login"
login_manager.anonymous_user = AnonymousUser
# noinspection PyTypeChecker
login_manager.user_loader(_load_user)
return login_manager
def get_blender_id_oauth_token():
"""Returns a tuple (token, ''), for use with flask_oauthlib."""
return session.get('blender_id_oauth_token')
def config_oauth_login(app):
config = app.config
if not config.get('SOCIAL_BLENDER_ID'):
log.info('OAuth Blender-ID login not setup.')
return None
oauth = flask_oauthlib.client.OAuth(app)
social_blender_id = config.get('SOCIAL_BLENDER_ID')
oauth_blender_id = oauth.remote_app(
'blender_id',
consumer_key=social_blender_id['app_id'],
consumer_secret=social_blender_id['app_secret'],
request_token_params={'scope': 'email'},
base_url=config['BLENDER_ID_OAUTH_URL'],
request_token_url=None,
access_token_url=config['BLENDER_ID_BASE_ACCESS_TOKEN_URL'],
authorize_url=config['BLENDER_ID_AUTHORIZE_URL']
)
oauth_blender_id.tokengetter(get_blender_id_oauth_token)
log.info('OAuth Blender-ID login setup as %s', social_blender_id['app_id'])
return oauth_blender_id

354
pillar/cli.py Normal file
View File

@ -0,0 +1,354 @@
"""Commandline interface.
Run commands with 'flask <command>'
"""
from __future__ import print_function, division
import logging
from bson.objectid import ObjectId, InvalidId
from flask import current_app
from flask.ext.script import Manager
log = logging.getLogger(__name__)
manager = Manager(current_app)
@manager.command
def setup_db(admin_email):
"""Setup the database
- Create admin, subscriber and demo Group collection
- Create admin user (must use valid blender-id credentials)
- Create one project
"""
# Create default groups
groups_list = []
for group in ['admin', 'subscriber', 'demo']:
g = {'name': group}
g = current_app.post_internal('groups', g)
groups_list.append(g[0]['_id'])
print("Creating group {0}".format(group))
# Create admin user
user = {'username': admin_email,
'groups': groups_list,
'roles': ['admin', 'subscriber', 'demo'],
'settings': {'email_communications': 1},
'auth': [],
'full_name': admin_email,
'email': admin_email}
result, _, _, status = current_app.post_internal('users', user)
if status != 201:
raise SystemExit('Error creating user {}: {}'.format(admin_email, result))
user.update(result)
print("Created user {0}".format(user['_id']))
# Create a default project by faking a POST request.
with current_app.test_request_context(data={'project_name': u'Default Project'}):
from flask import g
from pillar.api.projects import routes as proj_routes
g.current_user = {'user_id': user['_id'],
'groups': user['groups'],
'roles': set(user['roles'])}
proj_routes.create_project(overrides={'url': 'default-project',
'is_private': False})
@manager.command
def find_duplicate_users():
"""Finds users that have the same BlenderID user_id."""
from collections import defaultdict
users_coll = current_app.data.driver.db['users']
nodes_coll = current_app.data.driver.db['nodes']
projects_coll = current_app.data.driver.db['projects']
found_users = defaultdict(list)
for user in users_coll.find():
blender_ids = [auth['user_id'] for auth in user['auth']
if auth['provider'] == 'blender-id']
if not blender_ids:
continue
blender_id = blender_ids[0]
found_users[blender_id].append(user)
for blender_id, users in found_users.iteritems():
if len(users) == 1:
continue
usernames = ', '.join(user['username'] for user in users)
print('Blender ID: %5s has %i users: %s' % (
blender_id, len(users), usernames))
for user in users:
print(' %s owns %i nodes and %i projects' % (
user['username'],
nodes_coll.count({'user': user['_id']}),
projects_coll.count({'user': user['_id']}),
))
@manager.command
def sync_role_groups(do_revoke_groups):
"""For each user, synchronizes roles and group membership.
This ensures that everybody with the 'subscriber' role is also member of the 'subscriber'
group, and people without the 'subscriber' role are not member of that group. Same for
admin and demo groups.
When do_revoke_groups=False (the default), people are only added to groups.
when do_revoke_groups=True, people are also removed from groups.
"""
from pillar.api import service
if do_revoke_groups not in {'true', 'false'}:
print('Use either "true" or "false" as first argument.')
print('When passing "false", people are only added to groups.')
print('when passing "true", people are also removed from groups.')
raise SystemExit()
do_revoke_groups = do_revoke_groups == 'true'
service.fetch_role_to_group_id_map()
users_coll = current_app.data.driver.db['users']
groups_coll = current_app.data.driver.db['groups']
group_names = {}
def gname(gid):
try:
return group_names[gid]
except KeyError:
name = groups_coll.find_one(gid, projection={'name': 1})['name']
name = str(name)
group_names[gid] = name
return name
ok_users = bad_users = 0
for user in users_coll.find():
grant_groups = set()
revoke_groups = set()
current_groups = set(user.get('groups', []))
user_roles = user.get('roles', set())
for role in service.ROLES_WITH_GROUPS:
action = 'grant' if role in user_roles else 'revoke'
groups = service.manage_user_group_membership(user, role, action)
if groups is None:
# No changes required
continue
if groups == current_groups:
continue
grant_groups.update(groups.difference(current_groups))
revoke_groups.update(current_groups.difference(groups))
if grant_groups or revoke_groups:
bad_users += 1
expected_groups = current_groups.union(grant_groups).difference(revoke_groups)
print('Discrepancy for user %s/%s:' % (user['_id'], user['full_name'].encode('utf8')))
print(' - actual groups :', sorted(gname(gid) for gid in user.get('groups')))
print(' - expected groups:', sorted(gname(gid) for gid in expected_groups))
print(' - will grant :', sorted(gname(gid) for gid in grant_groups))
if do_revoke_groups:
label = 'WILL REVOKE '
else:
label = 'could revoke'
print(' - %s :' % label, sorted(gname(gid) for gid in revoke_groups))
if grant_groups and revoke_groups:
print(' ------ CAREFUL this one has BOTH grant AND revoke -----')
# Determine which changes we'll apply
final_groups = current_groups.union(grant_groups)
if do_revoke_groups:
final_groups.difference_update(revoke_groups)
print(' - final groups :', sorted(gname(gid) for gid in final_groups))
# Perform the actual update
users_coll.update_one({'_id': user['_id']},
{'$set': {'groups': list(final_groups)}})
else:
ok_users += 1
print('%i bad and %i ok users seen.' % (bad_users, ok_users))
@manager.command
def sync_project_groups(user_email, fix):
"""Gives the user access to their self-created projects."""
if fix.lower() not in {'true', 'false'}:
print('Use either "true" or "false" as second argument.')
print('When passing "false", only a report is produced.')
print('when passing "true", group membership is fixed.')
raise SystemExit()
fix = fix.lower() == 'true'
users_coll = current_app.data.driver.db['users']
proj_coll = current_app.data.driver.db['projects']
groups_coll = current_app.data.driver.db['groups']
# Find by email or by user ID
if '@' in user_email:
where = {'email': user_email}
else:
try:
where = {'_id': ObjectId(user_email)}
except InvalidId:
log.warning('Invalid ObjectID: %s', user_email)
return
user = users_coll.find_one(where, projection={'_id': 1, 'groups': 1})
if user is None:
log.error('User %s not found', where)
raise SystemExit()
user_groups = set(user['groups'])
user_id = user['_id']
log.info('Updating projects for user %s', user_id)
ok_groups = missing_groups = 0
for proj in proj_coll.find({'user': user_id}):
project_id = proj['_id']
log.info('Investigating project %s (%s)', project_id, proj['name'])
# Find the admin group
admin_group = groups_coll.find_one({'name': str(project_id)}, projection={'_id': 1})
if admin_group is None:
log.warning('No admin group for project %s', project_id)
continue
group_id = admin_group['_id']
# Check membership
if group_id not in user_groups:
log.info('Missing group membership')
missing_groups += 1
user_groups.add(group_id)
else:
ok_groups += 1
log.info('User %s was missing %i group memberships; %i projects were ok.',
user_id, missing_groups, ok_groups)
if missing_groups > 0 and fix:
log.info('Updating database.')
result = users_coll.update_one({'_id': user_id},
{'$set': {'groups': list(user_groups)}})
log.info('Updated %i user.', result.modified_count)
@manager.command
def badger(action, user_email, role):
from pillar.api import service
with current_app.app_context():
service.fetch_role_to_group_id_map()
response, status = service.do_badger(action, user_email, role)
if status == 204:
log.info('Done.')
else:
log.info('Response: %s', response)
log.info('Status : %i', status)
def _create_service_account(email, service_roles, service_definition):
from pillar.api import service
from pillar.api.utils import dumps
account, token = service.create_service_account(
email,
service_roles,
service_definition
)
print('Account created:')
print(dumps(account, indent=4, sort_keys=True))
print()
print('Access token: %s' % token['token'])
print(' expires on: %s' % token['expire_time'])
@manager.command
def create_badger_account(email, badges):
"""
Creates a new service account that can give badges (i.e. roles).
:param email: email address associated with the account
:param badges: single space-separated argument containing the roles
this account can assign and revoke.
"""
_create_service_account(email, [u'badger'], {'badger': badges.strip().split()})
@manager.command
def create_urler_account(email):
"""Creates a new service account that can fetch all project URLs."""
_create_service_account(email, [u'urler'], {})
@manager.command
def create_local_user_account(email, password):
from pillar.api.local_auth import create_local_user
create_local_user(email, password)
@manager.command
@manager.option('-c', '--chunk', dest='chunk_size', default=50)
@manager.option('-q', '--quiet', dest='quiet', action='store_true', default=False)
@manager.option('-w', '--window', dest='window', default=12)
def refresh_backend_links(backend_name, chunk_size=50, quiet=False, window=12):
"""Refreshes all file links that are using a certain storage backend."""
chunk_size = int(chunk_size)
window = int(window)
if quiet:
import logging
from pillar import log
logging.getLogger().setLevel(logging.WARNING)
log.setLevel(logging.WARNING)
chunk_size = int(chunk_size) # CLI parameters are passed as strings
from pillar.api import file_storage
file_storage.refresh_links_for_backend(backend_name, chunk_size, window * 3600)
@manager.command
def expire_all_project_links(project_uuid):
"""Expires all file links for a certain project without refreshing.
This is just for testing.
"""
import datetime
import bson.tz_util
files_collection = current_app.data.driver.db['files']
now = datetime.datetime.now(tz=bson.tz_util.utc)
expires = now - datetime.timedelta(days=1)
result = files_collection.update_many(
{'project': ObjectId(project_uuid)},
{'$set': {'link_expires': expires}}
)
print('Expired %i links' % result.matched_count)

View File

@ -1,23 +1,23 @@
import os.path
from os import getenv
from collections import defaultdict
import requests.certs
# Certificate file for communication with other systems.
TLS_CERT_FILE = requests.certs.where()
print('Loading TLS certificates from %s' % TLS_CERT_FILE)
RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
PILLAR_SERVER_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SCHEME = 'https'
STORAGE_DIR = '/data/storage/pillar'
SHARED_DIR = '/data/storage/shared'
STORAGE_DIR = getenv('PILLAR_STORAGE_DIR', '/data/storage/pillar')
PORT = 5000
HOST = '0.0.0.0'
DEBUG = False
SECRET_KEY = '123'
# Authentication settings
BLENDER_ID_ENDPOINT = 'http://blender_id:8000/'
PILLAR_SERVER_ENDPOINT = 'http://pillar:5001/api/'
CDN_USE_URL_SIGNING = True
CDN_SERVICE_DOMAIN_PROTOCOL = 'https'
CDN_SERVICE_DOMAIN = '-CONFIG-THIS-'
@ -44,7 +44,7 @@ BIN_FFMPEG = '/usr/bin/ffmpeg'
BIN_SSH = '/usr/bin/ssh'
BIN_RSYNC = '/usr/bin/rsync'
GCLOUD_APP_CREDENTIALS = os.path.join(os.path.dirname(__file__), 'google_app.json')
GCLOUD_APP_CREDENTIALS = 'google_app.json'
GCLOUD_PROJECT = '-SECRET-'
ADMIN_USER_GROUP = '5596e975ea893b269af85c0e'
@ -93,7 +93,7 @@ LOGGING = {
}
},
'loggers': {
'application': {'level': 'INFO'},
'pillar': {'level': 'INFO'},
'werkzeug': {'level': 'INFO'},
},
'root': {
@ -111,3 +111,29 @@ SHORT_CODE_LENGTH = 6 # characters
FILESIZE_LIMIT_BYTES_NONSUBS = 32 * 2 ** 20
# Unless they have one of those roles.
ROLES_FOR_UNLIMITED_UPLOADS = {u'subscriber', u'demo', u'admin'}
#############################################
# Old pillar-web config:
# Mapping from /{path} to URL to redirect to.
REDIRECTS = {}
GIT = 'git'
# Setting this to True can be useful for development.
# Note that it doesn't add the /p/home/{node-id} endpoint, so you will have to
# change the URL of the home project if you want to have direct access to nodes.
RENDER_HOME_AS_REGULAR_PROJECT = False
# Authentication token for the Urler service. If None, defaults
# to the authentication token of the current user.
URLER_SERVICE_AUTH_TOKEN = None
# Blender Cloud add-on version. This updates the value in all places in the
# front-end.
BLENDER_CLOUD_ADDON_VERSION = '1.4'
EXTERNAL_SUBSCRIPTIONS_MANAGEMENT_SERVER = 'https://store.blender.org/api/'

64
pillar/extension.py Normal file
View File

@ -0,0 +1,64 @@
"""Pillar extensions support.
Each Pillar extension should create a subclass of PillarExtension, which
can then be registered to the application at app creation time:
from pillar_server import PillarServer
from attract_server import AttractExtension
app = PillarServer('.')
app.load_extension(AttractExtension(), url_prefix='/attract')
app.process_extensions() # Always process extensions after the last one is loaded.
if __name__ == '__main__':
app.run('::0', 5000)
"""
import abc
class PillarExtension(object):
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def name(self):
"""The name of this extension.
The name determines the path at which Eve exposes the extension's
resources (/{extension name}/{resource name}), as well as the
MongoDB collection in which those resources are stored
({extensions name}.{resource name}).
:rtype: unicode
"""
@abc.abstractmethod
def flask_config(self):
"""Returns extension-specific defaults for the Flask configuration.
Use this to set sensible default values for configuration settings
introduced by the extension.
:rtype: dict
"""
@abc.abstractmethod
def blueprints(self):
"""Returns the list of top-level blueprints for the extension.
These blueprints will be mounted at the url prefix given to
app.load_extension().
:rtype: list of flask.Blueprint objects.
"""
@abc.abstractmethod
def eve_settings(self):
"""Returns extensions to the Eve settings.
Currently only the DOMAIN key is used to insert new resources into
Eve's configuration.
:rtype: dict
"""

View File

@ -1,182 +0,0 @@
def import_data(path):
import json
import pprint
from bson import json_util
if not os.path.isfile(path):
return "File does not exist"
with open(path, 'r') as infile:
d = json.load(infile)
def commit_object(collection, f, parent=None):
variation_id = f.get('variation_id')
if variation_id:
del f['variation_id']
asset_id = f.get('asset_id')
if asset_id:
del f['asset_id']
node_id = f.get('node_id')
if node_id:
del f['node_id']
if parent:
f['parent'] = parent
else:
if f.get('parent'):
del f['parent']
#r = [{'_status': 'OK', '_id': 'DRY-ID'}]
r = post_item(collection, f)
if r[0]['_status'] == 'ERR':
print r[0]['_issues']
print "Tried to commit the following object"
pprint.pprint(f)
# Assign the Mongo ObjectID
f['_id'] = str(r[0]['_id'])
# Restore variation_id
if variation_id:
f['variation_id'] = variation_id
if asset_id:
f['asset_id'] = asset_id
if node_id:
f['node_id'] = node_id
try:
print "{0} {1}".format(f['_id'], f['name'])
except UnicodeEncodeError:
print "{0}".format(f['_id'])
return f
# Build list of parent files
parent_files = [f for f in d['files'] if 'parent_asset_id' in f]
children_files = [f for f in d['files'] if 'parent_asset_id' not in f]
for p in parent_files:
# Store temp property
parent_asset_id = p['parent_asset_id']
# Remove from dict to prevent invalid submission
del p['parent_asset_id']
# Commit to database
p = commit_object('files', p)
# Restore temp property
p['parent_asset_id'] = parent_asset_id
# Find children of the current file
children = [c for c in children_files if c['parent'] == p['variation_id']]
for c in children:
# Commit to database with parent id
c = commit_object('files', c, p['_id'])
# Merge the dicts and replace the original one
d['files'] = parent_files + children_files
# Files for picture previews of folders (groups)
for f in d['files_group']:
item_id = f['item_id']
del f['item_id']
f = commit_object('files', f)
f['item_id'] = item_id
# Files for picture previews of assets
for f in d['files_asset']:
item_id = f['item_id']
del f['item_id']
f = commit_object('files',f)
f['item_id'] = item_id
nodes_asset = [n for n in d['nodes'] if 'asset_id' in n]
nodes_group = [n for n in d['nodes'] if 'node_id' in n]
def get_parent(node_id):
#print "Searching for {0}".format(node_id)
try:
parent = [p for p in nodes_group if p['node_id'] == node_id][0]
except IndexError:
return None
return parent
def traverse_nodes(parent_id):
parents_list = []
while True:
parent = get_parent(parent_id)
#print parent
if not parent:
break
else:
parents_list.append(parent['node_id'])
if parent.get('parent'):
parent_id = parent['parent']
else:
break
parents_list.reverse()
return parents_list
for n in nodes_asset:
node_type_asset = db.node_types.find_one({"name": "asset"})
if n.get('picture'):
filename = os.path.splitext(n['picture'])[0]
pictures = [p for p in d['files_asset'] if p['name'] == filename]
if pictures:
n['picture'] = pictures[0]['_id']
print "Adding picture link {0}".format(n['picture'])
n['node_type'] = node_type_asset['_id']
# An asset node must have a parent
# parent = [p for p in nodes_group if p['node_id'] == n['parent']][0]
parents_list = traverse_nodes(n['parent'])
tree_index = 0
for node_id in parents_list:
node = [p for p in nodes_group if p['node_id'] == node_id][0]
if node.get('_id') is None:
node_type_group = db.node_types.find_one({"name": "group"})
node['node_type'] = node_type_group['_id']
# Assign picture to the node group
if node.get('picture'):
filename = os.path.splitext(node['picture'])[0]
picture = [p for p in d['files_group'] if p['name'] == filename][0]
node['picture'] = picture['_id']
print "Adding picture link to node {0}".format(node['picture'])
if tree_index == 0:
# We are at the root of the tree (so we link to the project)
node_type_project = db.node_types.find_one({"name": "project"})
node['node_type'] = node_type_project['_id']
parent = None
if node['properties'].get('picture_square'):
filename = os.path.splitext(node['properties']['picture_square'])[0]
picture = [p for p in d['files_group'] if p['name'] == filename][0]
node['properties']['picture_square'] = picture['_id']
print "Adding picture_square link to node"
if node['properties'].get('picture_header'):
filename = os.path.splitext(node['properties']['picture_header'])[0]
picture = [p for p in d['files_group'] if p['name'] == filename][0]
node['properties']['picture_header'] = picture['_id']
print "Adding picture_header link to node"
else:
# Get the parent node id
parents_list_node_id = parents_list[tree_index - 1]
parent_node = [p for p in nodes_group if p['node_id'] == parents_list_node_id][0]
parent = parent_node['_id']
print "About to commit Node"
commit_object('nodes', node, parent)
tree_index += 1
# Commit the asset
print "About to commit Asset {0}".format(n['asset_id'])
parent_node = [p for p in nodes_group if p['node_id'] == parents_list[-1]][0]
try:
asset_file = [a for a in d['files'] if a['md5'] == n['properties']['file']][0]
n['properties']['file'] = str(asset_file['_id'])
commit_object('nodes', n, parent_node['_id'])
except IndexError:
pass
return
# New path with _
path = '_' + path
with open(path, 'w') as outfile:
json.dump(d, outfile, default=json_util.default)
return

View File

@ -1,11 +0,0 @@
import sys
activate_this = '/data/venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
from flup.server.fcgi import WSGIServer
sys.path.append('/data/git/pillar/pillar/')
from application import app as application
if __name__ == '__main__':
WSGIServer(application).run()

100
pillar/sdk.py Normal file
View File

@ -0,0 +1,100 @@
"""PillarSDK subclass for direct Flask-internal calls."""
import logging
import urlparse
from flask import current_app
import pillarsdk
from pillarsdk import exceptions
log = logging.getLogger(__name__)
class FlaskInternalApi(pillarsdk.Api):
"""SDK API subclass that calls Flask directly.
Can only be used from the same Python process the Pillar server itself is
running on.
"""
def http_call(self, url, method, **kwargs):
"""Fakes a http call through Flask/Werkzeug."""
client = current_app.test_client()
self.requests_to_flask_kwargs(kwargs)
url = urlparse.urlsplit(url)
path = url.scheme + "://" + url.netloc + url.path
query = url.query
try:
response = client.open(path=path, query_string=query, method=method,
**kwargs)
except Exception as ex:
log.warning('Error performing HTTP %s request to %s: %s', method,
url, str(ex))
raise
if method == 'OPTIONS':
return response
self.flask_to_requests_response(response)
try:
content = self.handle_response(response, response.data)
except:
log.warning("%s: Response[%s]: %s", url, response.status_code,
response.data)
raise
return content
def requests_to_flask_kwargs(self, kwargs):
"""Converts Requests arguments to Flask test client arguments."""
kwargs.pop('verify', None)
# No network connection, so nothing to verify.
# Files to upload need to be sent in the 'data' kwarg instead of the
# 'files' kwarg, and have a different order.
if 'files' in kwargs:
# By default, 'data' is there but None, so setdefault('data', {})
# won't work.
data = kwargs.get('data') or {}
for file_name, file_value in kwargs['files'].items():
fname, fobj, mimeytpe = file_value
data[file_name] = (fobj, fname)
del kwargs['files']
kwargs['data'] = data
def flask_to_requests_response(self, response):
"""Adds some properties to a Flask response object to mimick a Requests
object.
"""
# Our API always sends back UTF8, so we don't have to check headers for
# that.
if response.mimetype.startswith('text'):
response.text = response.data.decode('utf8')
else:
response.text = None
def OPTIONS(self, action, headers=None):
"""Make OPTIONS request.
Contrary to other requests, this method returns the raw requests.Response object.
:rtype: requests.Response
"""
import os
url = os.path.join(self.endpoint, action.strip('/'))
response = self.request(url, 'OPTIONS', headers=headers)
if 200 <= response.status_code <= 299:
return response
exception = exceptions.exception_for_status(response.status_code)
if exception:
raise exception(response, response.text)
raise exceptions.ConnectionError(response, response.text,
"Unknown response code: %s" % response.status_code)

View File

@ -1,12 +1,13 @@
# -*- encoding: utf-8 -*-
import json
import base64
import copy
import sys
import json
import logging
import datetime
import os
import base64
import sys
try:
from urllib.parse import urlencode
@ -16,16 +17,17 @@ except ImportError:
from bson import ObjectId, tz_util
# Override Eve settings before importing eve.tests.
import common_test_settings
from pillar.tests import eve_test_settings
common_test_settings.override_eve()
eve_test_settings.override_eve()
from eve.tests import TestMinimal
import pymongo.collection
from flask.testing import FlaskClient
import responses
from common_test_data import EXAMPLE_PROJECT, EXAMPLE_FILE
from pillar.tests.common_test_data import EXAMPLE_PROJECT, EXAMPLE_FILE
import pillar
# from six:
PY3 = sys.version_info[0] == 3
@ -49,32 +51,41 @@ BLENDER_ID_USER_RESPONSE = {'status': 'success',
'id': BLENDER_ID_TEST_USERID},
'token_expires': 'Mon, 1 Jan 2018 01:02:03 GMT'}
class TestPillarServer(pillar.PillarServer):
def _load_flask_config(self):
super(TestPillarServer, self)._load_flask_config()
pillar_config_file = os.path.join(MY_PATH, 'config_testing.py')
self.config.from_pyfile(pillar_config_file)
def _config_logging(self):
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)-15s %(levelname)8s %(name)s %(message)s')
logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('pillar').setLevel(logging.DEBUG)
logging.getLogger('werkzeug').setLevel(logging.DEBUG)
logging.getLogger('eve').setLevel(logging.DEBUG)
class AbstractPillarTest(TestMinimal):
pillar_server_class = TestPillarServer
def setUp(self, **kwargs):
eve_settings_file = os.path.join(MY_PATH, 'common_test_settings.py')
pillar_config_file = os.path.join(MY_PATH, 'config_testing.py')
eve_settings_file = os.path.join(MY_PATH, 'eve_test_settings.py')
kwargs['settings_file'] = eve_settings_file
os.environ['EVE_SETTINGS'] = eve_settings_file
os.environ['PILLAR_CONFIG'] = pillar_config_file
super(AbstractPillarTest, self).setUp(**kwargs)
from application import app
logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('application').setLevel(logging.DEBUG)
logging.getLogger('werkzeug').setLevel(logging.DEBUG)
logging.getLogger('eve').setLevel(logging.DEBUG)
from eve.utils import config
config.DEBUG = True
self.app = app
self.client = app.test_client()
self.app = self.pillar_server_class(os.path.dirname(os.path.dirname(__file__)))
self.app.process_extensions()
assert self.app.config['MONGO_DBNAME'] == 'pillar_test'
self.client = self.app.test_client()
assert isinstance(self.client, FlaskClient)
def tearDown(self):
@ -82,9 +93,9 @@ class AbstractPillarTest(TestMinimal):
# Not only delete self.app (like the superclass does),
# but also un-import the application.
del sys.modules['application']
del sys.modules['pillar']
remove = [modname for modname in sys.modules
if modname.startswith('application.')]
if modname.startswith('pillar.')]
for modname in remove:
del sys.modules[modname]
@ -126,7 +137,7 @@ class AbstractPillarTest(TestMinimal):
def create_user(self, user_id='cafef00dc379cf10c4aaceaf', roles=('subscriber',),
groups=None):
from application.utils.authentication import make_unique_username
from pillar.api.utils.authentication import make_unique_username
with self.app.test_request_context():
users = self.app.data.driver.db['users']
@ -154,12 +165,25 @@ class AbstractPillarTest(TestMinimal):
future = now + datetime.timedelta(days=1)
with self.app.test_request_context():
from application.utils import authentication as auth
from pillar.api.utils import authentication as auth
token_data = auth.store_token(user_id, token, future, None)
return token_data
def create_project_with_admin(self, user_id='cafef00dc379cf10c4aaceaf', roles=('subscriber', )):
"""Creates a project and a user that's member of the project's admin group.
:returns: (project_id, user_id)
:rtype: tuple
"""
project_id, proj = self.ensure_project_exists()
admin_group_id = proj['permissions']['groups'][0]['group']
user_id = self.create_user(user_id=user_id, roles=roles, groups=[admin_group_id])
return project_id, user_id
def badger(self, user_email, roles, action, srv_token=None):
"""Creates a service account, and uses it to grant or revoke a role to the user.
@ -174,7 +198,7 @@ class AbstractPillarTest(TestMinimal):
# Create a service account if needed.
if srv_token is None:
from application.modules.service import create_service_account
from pillar.api.service import create_service_account
with self.app.test_request_context():
_, srv_token_doc = create_service_account('service@example.com',
{'badger'},
@ -182,14 +206,12 @@ class AbstractPillarTest(TestMinimal):
srv_token = srv_token_doc['token']
for role in roles:
resp = self.client.post('/service/badger',
headers={'Authorization': self.make_header(srv_token),
'Content-Type': 'application/json'},
data=json.dumps({'action': action,
self.post('/api/service/badger',
auth_token=srv_token,
json={'action': action,
'role': role,
'user_email': user_email}))
self.assertEqual(204, resp.status_code, resp.data)
'user_email': user_email},
expected_status=204)
return srv_token
def mock_blenderid_validate_unhappy(self):
@ -218,7 +240,7 @@ class AbstractPillarTest(TestMinimal):
:returns: mapping from group name to group ID
"""
from application.modules import service
from pillar.api import service
with self.app.test_request_context():
group_ids = {}
@ -266,7 +288,7 @@ class AbstractPillarTest(TestMinimal):
data=None, headers=None, files=None, content_type=None):
"""Performs a HTTP request to the server."""
from application.utils import dumps
from pillar.api.utils import dumps
import json as mod_json
headers = headers or {}

View File

@ -1,6 +1,6 @@
from settings import *
from pillar.api.eve_settings import *
from eve.tests.test_settings import MONGO_DBNAME
MONGO_DBNAME = 'pillar_test'
def override_eve():
@ -9,5 +9,6 @@ def override_eve():
test_settings.MONGO_HOST = MONGO_HOST
test_settings.MONGO_PORT = MONGO_PORT
test_settings.MONGO_DBNAME = MONGO_DBNAME
tests.MONGO_HOST = MONGO_HOST
tests.MONGO_PORT = MONGO_PORT
tests.MONGO_DBNAME = MONGO_DBNAME

8
pillar/web/__init__.py Normal file
View File

@ -0,0 +1,8 @@
def setup_app(app):
from . import main, users, projects, nodes, notifications, redirects
main.setup_app(app, url_prefix=None)
users.setup_app(app, url_prefix=None)
redirects.setup_app(app, url_prefix='/r')
projects.setup_app(app, url_prefix='/p')
nodes.setup_app(app, url_prefix='/nodes')
notifications.setup_app(app, url_prefix='/notifications')

View File

@ -0,0 +1,5 @@
from .routes import blueprint
def setup_app(app, url_prefix):
app.register_blueprint(blueprint, url_prefix=url_prefix)

324
pillar/web/main/routes.py Normal file
View File

@ -0,0 +1,324 @@
import itertools
import logging
from pillarsdk import Node
from pillarsdk import Project
from pillarsdk.exceptions import ResourceNotFound
from flask import abort
from flask import Blueprint
from flask import current_app
from flask import render_template
from flask import redirect
from flask import request
from flask.ext.login import current_user
from werkzeug.contrib.atom import AtomFeed
from pillar.web.utils import system_util
from pillar.web.nodes.routes import url_for_node
from pillar.web.nodes.custom.posts import posts_view
from pillar.web.nodes.custom.posts import posts_create
from pillar.web.utils import attach_project_pictures
from pillar.web.utils import current_user_is_authenticated
from pillar.web.utils import get_file
blueprint = Blueprint('main', __name__)
log = logging.getLogger(__name__)
@blueprint.route('/')
def homepage():
# Workaround to cache rendering of a page if user not logged in
@current_app.cache.cached(timeout=3600)
def render_page():
return render_template('join.html')
if current_user.is_anonymous:
return render_page()
# Get latest blog posts
api = system_util.pillar_api()
latest_posts = Node.all({
'projection': {'name': 1, 'project': 1, 'node_type': 1,
'picture': 1, 'properties.status': 1, 'properties.url': 1},
'where': {'node_type': 'post', 'properties.status': 'published'},
'embedded': {'project': 1},
'sort': '-_created',
'max_results': '5'
}, api=api)
# Append picture Files to last_posts
for post in latest_posts._items:
post.picture = get_file(post.picture, api=api)
# Get latest assets added to any project
latest_assets = Node.latest('assets', api=api)
# Append picture Files to latest_assets
for asset in latest_assets._items:
asset.picture = get_file(asset.picture, api=api)
# Get latest comments to any node
latest_comments = Node.latest('comments', api=api)
# Get a list of random featured assets
random_featured = get_random_featured_nodes()
# Parse results for replies
for comment in latest_comments._items:
if comment.properties.is_reply:
comment.attached_to = Node.find(comment.parent.parent,
{'projection': {
'_id': 1,
'name': 1,
}},
api=api)
else:
comment.attached_to = comment.parent
main_project = Project.find(current_app.config['MAIN_PROJECT_ID'], api=api)
main_project.picture_header = get_file(main_project.picture_header, api=api)
# Merge latest assets and comments into one activity stream.
def sort_key(item):
return item._created
activities = itertools.chain(latest_posts._items,
latest_assets._items,
latest_comments._items)
activity_stream = sorted(activities, key=sort_key, reverse=True)
return render_template(
'homepage.html',
main_project=main_project,
latest_posts=latest_posts._items,
activity_stream=activity_stream,
random_featured=random_featured,
api=api)
# @blueprint.errorhandler(500)
# def error_500(e):
# return render_template('errors/500.html'), 500
#
#
# @blueprint.errorhandler(404)
# def error_404(e):
# return render_template('errors/404.html'), 404
#
#
# @blueprint.errorhandler(403)
# def error_404(e):
# return render_template('errors/403_embed.html'), 403
#
@blueprint.route('/join')
def join():
"""Join page"""
return redirect('https://store.blender.org/product/membership/')
@blueprint.route('/services')
def services():
"""Services page"""
return render_template('services.html')
@blueprint.route('/blog/')
@blueprint.route('/blog/<url>')
def main_blog(url=None):
"""Blog with project news"""
project_id = current_app.config['MAIN_PROJECT_ID']
@current_app.cache.memoize(timeout=3600, unless=current_user_is_authenticated)
def cache_post_view(url):
return posts_view(project_id, url)
return cache_post_view(url)
@blueprint.route('/blog/create')
def main_posts_create():
project_id = current_app.config['MAIN_PROJECT_ID']
return posts_create(project_id)
@blueprint.route('/p/<project_url>/blog/')
@blueprint.route('/p/<project_url>/blog/<url>')
def project_blog(project_url, url=None):
"""View project blog"""
@current_app.cache.memoize(timeout=3600,
unless=current_user_is_authenticated)
def cache_post_view(project_url, url):
api = system_util.pillar_api()
try:
project = Project.find_one({
'where': '{"url" : "%s"}' % (project_url)}, api=api)
return posts_view(project._id, url=url)
except ResourceNotFound:
return abort(404)
return cache_post_view(project_url, url)
def get_projects(category):
"""Utility to get projects based on category. Should be moved on the API
and improved with more extensive filtering capabilities.
"""
api = system_util.pillar_api()
projects = Project.all({
'where': {
'category': category,
'is_private': False},
'sort': '-_created',
}, api=api)
for project in projects._items:
attach_project_pictures(project, api)
return projects
def get_random_featured_nodes():
import random
api = system_util.pillar_api()
projects = Project.all({
'projection': {'nodes_featured': 1},
'where': {'is_private': False},
'max_results': '15'
}, api=api)
featured_nodes = (p.nodes_featured for p in projects._items if p.nodes_featured)
featured_nodes = [item for sublist in featured_nodes for item in sublist]
if len(featured_nodes) > 3:
featured_nodes = random.sample(featured_nodes, 3)
featured_node_documents = []
for node in featured_nodes:
node_document = Node.find(node, {
'projection': {'name': 1, 'project': 1, 'picture': 1,
'properties.content_type': 1, 'properties.url': 1},
'embedded': {'project': 1}
}, api=api)
node_document.picture = get_file(node_document.picture, api=api)
featured_node_documents.append(node_document)
return featured_node_documents
@blueprint.route('/open-projects')
def open_projects():
@current_app.cache.cached(timeout=3600, unless=current_user_is_authenticated)
def render_page():
projects = get_projects('film')
return render_template(
'projects/index_collection.html',
title='open-projects',
projects=projects._items,
api=system_util.pillar_api())
return render_page()
@blueprint.route('/training')
def training():
@current_app.cache.cached(timeout=3600, unless=current_user_is_authenticated)
def render_page():
projects = get_projects('training')
return render_template(
'projects/index_collection.html',
title='training',
projects=projects._items,
api=system_util.pillar_api())
return render_page()
@blueprint.route('/gallery')
def gallery():
return redirect('/p/gallery')
@blueprint.route('/textures')
def redir_textures():
return redirect('/p/textures')
@blueprint.route('/hdri')
def redir_hdri():
return redirect('/p/hdri')
@blueprint.route('/caminandes')
def caminandes():
return redirect('/p/caminandes-3')
@blueprint.route('/cf2')
def cf2():
return redirect('/p/creature-factory-2')
@blueprint.route('/characters')
def redir_characters():
return redirect('/p/characters')
@blueprint.route('/vrview')
def vrview():
"""Call this from iframes to render sperical content (video and images)"""
if 'image' not in request.args:
return redirect('/')
return render_template('vrview.html')
@blueprint.route('/403')
def error_403():
"""Custom entry point to display the not allowed template"""
return render_template('errors/403_embed.html')
# Shameful redirects
@blueprint.route('/p/blender-cloud/')
def redirect_cloud_blog():
return redirect('/blog')
@blueprint.route('/feeds/blogs.atom')
def feeds_blogs():
"""Global feed generator for latest blogposts across all projects"""
@current_app.cache.cached(60*5)
def render_page():
feed = AtomFeed('Blender Cloud - Latest updates',
feed_url=request.url, url=request.url_root)
# Get latest blog posts
api = system_util.pillar_api()
latest_posts = Node.all({
'where': {'node_type': 'post', 'properties.status': 'published'},
'embedded': {'user': 1},
'sort': '-_created',
'max_results': '15'
}, api=api)
# Populate the feed
for post in latest_posts._items:
author = post.user.fullname
updated = post._updated if post._updated else post._created
url = url_for_node(node=post)
content = post.properties.content[:500]
content = u'<p>{0}... <a href="{1}">Read more</a></p>'.format(content, url)
feed.add(post.name, unicode(content),
content_type='html',
author=author,
url=url,
updated=updated,
published=post._created)
return feed.get_response()
return render_page()
@blueprint.route('/search')
def nodes_search_index():
return render_template('nodes/search.html')

View File

@ -0,0 +1,5 @@
from .routes import blueprint
def setup_app(app, url_prefix=None):
app.register_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -0,0 +1,2 @@
def append_custom_node_endpoints():
pass

View File

@ -0,0 +1,189 @@
import logging
from flask import current_app
from flask import request
from flask import jsonify
from flask import render_template
from flask.ext.login import login_required
from flask.ext.login import current_user
from pillarsdk import Node
from pillarsdk import Project
import werkzeug.exceptions as wz_exceptions
from pillar.web.nodes.routes import blueprint
from pillar.web.utils import gravatar
from pillar.web.utils import pretty_date
from pillar.web.utils import system_util
log = logging.getLogger(__name__)
@blueprint.route('/comments/create', methods=['POST'])
@login_required
def comments_create():
content = request.form['content']
parent_id = request.form.get('parent_id')
api = system_util.pillar_api()
parent_node = Node.find(parent_id, api=api)
node_asset_props = dict(
project=parent_node.project,
name='Comment',
user=current_user.objectid,
node_type='comment',
properties=dict(
content=content,
status='published',
confidence=0,
rating_positive=0,
rating_negative=0))
if parent_id:
node_asset_props['parent'] = parent_id
# Get the parent node and check if it's a comment. In which case we flag
# the current comment as a reply.
parent_node = Node.find(parent_id, api=api)
if parent_node.node_type == 'comment':
node_asset_props['properties']['is_reply'] = True
node_asset = Node(node_asset_props)
node_asset.create(api=api)
return jsonify(
asset_id=node_asset._id,
content=node_asset.properties.content)
@blueprint.route('/comments/<string(length=24):comment_id>', methods=['POST'])
@login_required
def comment_edit(comment_id):
"""Allows a user to edit their comment (or any they have PUT access to)."""
api = system_util.pillar_api()
# Fetch the old comment.
comment_node = Node.find(comment_id, api=api)
if comment_node.node_type != 'comment':
log.info('POST to %s node %s done as if it were a comment edit; rejected.',
comment_node.node_type, comment_id)
raise wz_exceptions.BadRequest('Node ID is not a comment.')
# Update the node.
comment_node.properties.content = request.form['content']
update_ok = comment_node.update(api=api)
if not update_ok:
log.warning('Unable to update comment node %s: %s',
comment_id, comment_node.error)
raise wz_exceptions.InternalServerError('Unable to update comment node, unknown why.')
return '', 204
def format_comment(comment, is_reply=False, is_team=False, replies=None):
"""Format a comment node into a simpler dictionary.
:param comment: the comment object
:param is_reply: True if the comment is a reply to another comment
:param is_team: True if the author belongs to the group that owns the node
:param replies: list of replies (formatted with this function)
"""
try:
is_own = (current_user.objectid == comment.user._id) \
if current_user.is_authenticated else False
except AttributeError:
current_app.bugsnag.notify(Exception(
'Missing user for embedded user ObjectId'),
meta_data={'nodes_info': {'node_id': comment['_id']}})
return
is_rated = False
is_rated_positive = None
if comment.properties.ratings:
for rating in comment.properties.ratings:
if current_user.is_authenticated and rating.user == current_user.objectid:
is_rated = True
is_rated_positive = rating.is_positive
break
return dict(_id=comment._id,
gravatar=gravatar(comment.user.email, size=32),
time_published=pretty_date(comment._created, detail=True),
rating=comment.properties.rating_positive - comment.properties.rating_negative,
author=comment.user.full_name,
author_username=comment.user.username,
content=comment.properties.content,
is_reply=is_reply,
is_own=is_own,
is_rated=is_rated,
is_rated_positive=is_rated_positive,
is_team=is_team,
replies=replies)
@blueprint.route("/comments/")
def comments_index():
parent_id = request.args.get('parent_id')
# Get data only if we format it
api = system_util.pillar_api()
if request.args.get('format') == 'json':
nodes = Node.all({
'where': '{"node_type" : "comment", "parent": "%s"}' % (parent_id),
'embedded': '{"user":1}'}, api=api)
comments = []
for comment in nodes._items:
# Query for first level children (comment replies)
replies = Node.all({
'where': '{"node_type" : "comment", "parent": "%s"}' % (comment._id),
'embedded': '{"user":1}'}, api=api)
replies = replies._items if replies._items else None
if replies:
replies = [format_comment(reply, is_reply=True) for reply in replies]
comments.append(
format_comment(comment, is_reply=False, replies=replies))
return_content = jsonify(items=[c for c in comments if c is not None])
else:
parent_node = Node.find(parent_id, api=api)
project = Project({'_id': parent_node.project})
has_method_POST = project.node_type_has_method('comment', 'POST', api=api)
# Data will be requested via javascript
return_content = render_template('nodes/custom/_comments.html',
parent_id=parent_id,
has_method_POST=has_method_POST)
return return_content
@blueprint.route("/comments/<comment_id>/rate/<operation>", methods=['POST'])
@login_required
def comments_rate(comment_id, operation):
"""Comment rating function
:param comment_id: the comment id
:type comment_id: str
:param rating: the rating (is cast from 0 to False and from 1 to True)
:type rating: int
"""
if operation not in {u'revoke', u'upvote', u'downvote'}:
raise wz_exceptions.BadRequest('Invalid operation')
api = system_util.pillar_api()
comment = Node.find(comment_id, {'projection': {'_id': 1}}, api=api)
if not comment:
log.info('Node %i not found; how could someone click on the upvote/downvote button?',
comment_id)
raise wz_exceptions.NotFound()
# PATCH the node and return the result.
result = comment.patch({'op': operation}, api=api)
assert result['_status'] == 'OK'
return jsonify({
'status': 'success',
'data': {
'op': operation,
'rating_positive': result.properties.rating_positive,
'rating_negative': result.properties.rating_negative,
}})

View File

@ -0,0 +1,36 @@
from flask import request
from flask import jsonify
from flask.ext.login import login_required
from flask.ext.login import current_user
from pillarsdk import Node
from pillar.web.utils import system_util
from ..routes import blueprint
@blueprint.route('/groups/create', methods=['POST'])
@login_required
def groups_create():
# Use current_project_id from the session instead of the cookie
name = request.form['name']
project_id = request.form['project_id']
parent_id = request.form.get('parent_id')
api = system_util.pillar_api()
# We will create the Node object later on, after creating the file object
node_asset_props = dict(
name=name,
user=current_user.objectid,
node_type='group',
project=project_id,
properties=dict(
status='published'))
# Add parent_id only if provided (we do not provide it when creating groups
# at the Project root)
if parent_id:
node_asset_props['parent'] = parent_id
node_asset = Node(node_asset_props)
node_asset.create(api=api)
return jsonify(
status='success',
data=dict(name=name, asset_id=node_asset._id))

View File

@ -0,0 +1,168 @@
from pillarsdk import Node
from pillarsdk import Project
from pillarsdk.exceptions import ResourceNotFound
from flask import abort
from flask import render_template
from flask import redirect
from flask.ext.login import login_required
from flask.ext.login import current_user
from pillar.web.utils import system_util
from pillar.web.utils import attach_project_pictures
from pillar.web.utils import get_file
from pillar.web.nodes.routes import blueprint
from pillar.web.nodes.routes import url_for_node
from pillar.web.nodes.forms import get_node_form
from pillar.web.nodes.forms import process_node_form
from pillar.web.projects.routes import project_update_nodes_list
def posts_view(project_id, url=None):
"""View individual blogpost"""
api = system_util.pillar_api()
# Fetch project (for backgroud images and links generation)
project = Project.find(project_id, api=api)
attach_project_pictures(project, api)
try:
blog = Node.find_one({
'where': {'node_type': 'blog', 'project': project_id},
}, api=api)
except ResourceNotFound:
abort(404)
if url:
try:
post = Node.find_one({
'where': '{"parent": "%s", "properties.url": "%s"}' % (blog._id, url),
'embedded': '{"node_type": 1, "user": 1}',
}, api=api)
if post.picture:
post.picture = get_file(post.picture, api=api)
except ResourceNotFound:
return abort(404)
# If post is not published, check that the user is also the author of
# the post. If not, return 404.
if post.properties.status != "published":
if current_user.is_authenticated:
if not post.has_method('PUT'):
abort(403)
else:
abort(403)
return render_template(
'nodes/custom/post/view.html',
blog=blog,
node=post,
project=project,
title='blog',
api=api)
else:
node_type_post = project.get_node_type('post')
status_query = "" if blog.has_method('PUT') else ', "properties.status": "published"'
posts = Node.all({
'where': '{"parent": "%s" %s}' % (blog._id, status_query),
'embedded': '{"user": 1}',
'sort': '-_created'
}, api=api)
for post in posts._items:
post.picture = get_file(post.picture, api=api)
return render_template(
'nodes/custom/blog/index.html',
node_type_post=node_type_post,
posts=posts._items,
project=project,
title='blog',
api=api)
@blueprint.route("/posts/<project_id>/create", methods=['GET', 'POST'])
@login_required
def posts_create(project_id):
api = system_util.pillar_api()
try:
project = Project.find(project_id, api=api)
except ResourceNotFound:
return abort(404)
attach_project_pictures(project, api)
blog = Node.find_one({
'where': {'node_type': 'blog', 'project': project_id}}, api=api)
node_type = project.get_node_type('post')
# Check if user is allowed to create a post in the blog
if not project.node_type_has_method('post', 'POST', api=api):
return abort(403)
form = get_node_form(node_type)
if form.validate_on_submit():
# Create new post object from scratch
post_props = dict(
node_type='post',
name=form.name.data,
picture=form.picture.data,
user=current_user.objectid,
parent=blog._id,
project=project._id,
properties=dict(
content=form.content.data,
status=form.status.data,
url=form.url.data))
if form.picture.data == '':
post_props['picture'] = None
post = Node(post_props)
post.create(api=api)
# Only if the node is set as published, push it to the list
if post.properties.status == 'published':
project_update_nodes_list(post, project_id=project._id, list_name='blog')
return redirect(url_for_node(node=post))
form.parent.data = blog._id
return render_template('nodes/custom/post/create.html',
node_type=node_type,
form=form,
project=project,
api=api)
@blueprint.route("/posts/<post_id>/edit", methods=['GET', 'POST'])
@login_required
def posts_edit(post_id):
api = system_util.pillar_api()
try:
post = Node.find(post_id, {
'embedded': '{"user": 1}'}, api=api)
except ResourceNotFound:
return abort(404)
# Check if user is allowed to edit the post
if not post.has_method('PUT'):
return abort(403)
project = Project.find(post.project, api=api)
attach_project_pictures(project, api)
node_type = project.get_node_type(post.node_type)
form = get_node_form(node_type)
if form.validate_on_submit():
if process_node_form(form, node_id=post_id, node_type=node_type,
user=current_user.objectid):
# The the post is published, add it to the list
if form.status.data == 'published':
project_update_nodes_list(post, project_id=project._id, list_name='blog')
return redirect(url_for_node(node=post))
form.parent.data = post.parent
form.name.data = post.name
form.content.data = post.properties.content
form.status.data = post.properties.status
form.url.data = post.properties.url
if post.picture:
form.picture.data = post.picture
# Embed picture file
post.picture = get_file(post.picture, api=api)
if post.properties.picture_square:
form.picture_square.data = post.properties.picture_square
return render_template('nodes/custom/post/edit.html',
node_type=node_type,
post=post,
form=form,
project=project,
api=api)

View File

@ -0,0 +1,31 @@
import requests
import os
from pillar.web.utils import system_util
class StorageNode(object):
path = "storage"
def __init__(self, storage_node):
self.storage_node = storage_node
@property
def entrypoint(self):
return os.path.join(
system_util.pillar_server_endpoint(),
self.path,
self.storage_node.properties.backend,
self.storage_node.properties.project,
self.storage_node.properties.subdir)
# @current_app.cache.memoize(timeout=3600)
def browse(self, path=None):
"""Search a storage node for a path, which can point both to a directory
of to a file.
"""
if path is None:
url = self.entrypoint
else:
url = os.path.join(self.entrypoint, path)
r = requests.get(url)
return r.json()

289
pillar/web/nodes/forms.py Normal file
View File

@ -0,0 +1,289 @@
import logging
from datetime import datetime
from datetime import date
import pillarsdk
from flask import current_app
from flask_wtf import Form
from wtforms import StringField
from wtforms import DateField
from wtforms import SelectField
from wtforms import HiddenField
from wtforms import BooleanField
from wtforms import IntegerField
from wtforms import FloatField
from wtforms import TextAreaField
from wtforms import DateTimeField
from wtforms import SelectMultipleField
from wtforms import FieldList
from wtforms.validators import DataRequired
from pillar.web.utils import system_util
from pillar.web.utils.forms import FileSelectField
from pillar.web.utils.forms import ProceduralFileSelectForm
from pillar.web.utils.forms import CustomFormField
from pillar.web.utils.forms import build_file_select_form
log = logging.getLogger(__name__)
def add_form_properties(form_class, node_schema, form_schema, prefix=''):
"""Add fields to a form based on the node and form schema provided.
:type node_schema: dict
:param node_schema: the validation schema used by Cerberus
:type form_class: class
:param form_class: The form class to which we append fields
:type form_schema: dict
:param form_schema: description of how to build the form (which fields to
show and hide)
"""
for prop, schema_prop in node_schema.iteritems():
form_prop = form_schema.get(prop, {})
if prop == 'items':
continue
if not form_prop.get('visible', True):
continue
prop_name = "{0}{1}".format(prefix, prop)
# Recursive call if detects a dict
field_type = schema_prop['type']
if field_type == 'dict':
# This works if the dictionary schema is hardcoded.
# If we define it using propertyschema and valueschema, this
# validation pattern does not work and crahses.
add_form_properties(form_class, schema_prop['schema'],
form_prop['schema'], "{0}__".format(prop_name))
continue
if field_type == 'list':
if prop == 'attachments':
# class AttachmentForm(Form):
# pass
# AttachmentForm.file = FileSelectField('file')
# AttachmentForm.size = StringField()
# AttachmentForm.slug = StringField()
field = FieldList(CustomFormField(ProceduralFileSelectForm))
elif prop == 'files':
schema = schema_prop['schema']['schema']
file_select_form = build_file_select_form(schema)
field = FieldList(CustomFormField(file_select_form),
min_entries=1)
elif 'allowed' in schema_prop['schema']:
choices = [(c, c) for c in schema_prop['schema']['allowed']]
field = SelectMultipleField(choices=choices)
else:
field = SelectMultipleField(choices=[])
elif 'allowed' in schema_prop:
select = []
for option in schema_prop['allowed']:
select.append((str(option), str(option)))
field = SelectField(choices=select)
elif field_type == 'datetime':
if form_prop.get('dateonly'):
field = DateField(prop_name, default=date.today())
else:
field = DateTimeField(prop_name, default=datetime.now())
elif field_type == 'integer':
field = IntegerField(prop_name, default=0)
elif field_type == 'float':
field = FloatField(prop_name, default=0)
elif field_type == 'boolean':
field = BooleanField(prop_name)
elif field_type == 'objectid' and 'data_relation' in schema_prop:
if schema_prop['data_relation']['resource'] == 'files':
field = FileSelectField(prop_name)
else:
field = StringField(prop_name)
elif schema_prop.get('maxlength', 0) > 64:
field = TextAreaField(prop_name)
else:
field = StringField(prop_name)
setattr(form_class, prop_name, field)
def get_node_form(node_type):
"""Get a procedurally generated WTForm, based on the dyn_schema and
node_schema of a specific node_type.
:type node_type: dict
:param node_type: Describes the node type via dyn_schema, form_schema and
parent
"""
class ProceduralForm(Form):
pass
node_schema = node_type['dyn_schema'].to_dict()
form_prop = node_type['form_schema'].to_dict()
parent_prop = node_type['parent']
ProceduralForm.name = StringField('Name', validators=[DataRequired()])
# Parenting
if parent_prop:
parent_names = ", ".join(parent_prop)
ProceduralForm.parent = HiddenField('Parent ({0})'.format(parent_names))
ProceduralForm.description = TextAreaField('Description')
ProceduralForm.picture = FileSelectField('Picture', file_format='image')
ProceduralForm.node_type = HiddenField(default=node_type['name'])
add_form_properties(ProceduralForm, node_schema, form_prop)
return ProceduralForm()
def recursive(path, rdict, data):
item = path.pop(0)
if not item in rdict:
rdict[item] = {}
if len(path) > 0:
rdict[item] = recursive(path, rdict[item], data)
else:
rdict[item] = data
return rdict
def process_node_form(form, node_id=None, node_type=None, user=None):
"""Generic function used to process new nodes, as well as edits
"""
if not user:
log.warning('process_node_form(node_id=%s) called while user not logged in', node_id)
return False
api = system_util.pillar_api()
node_schema = node_type['dyn_schema'].to_dict()
form_schema = node_type['form_schema'].to_dict()
if node_id:
# Update existing node
node = pillarsdk.Node.find(node_id, api=api)
node.name = form.name.data
node.description = form.description.data
if 'picture' in form:
node.picture = form.picture.data
if node.picture == 'None' or node.picture == '':
node.picture = None
if 'parent' in form:
if form.parent.data != "":
node.parent = form.parent.data
def update_data(node_schema, form_schema, prefix=""):
for pr in node_schema:
schema_prop = node_schema[pr]
form_prop = form_schema.get(pr, {})
if pr == 'items':
continue
if 'visible' in form_prop and not form_prop['visible']:
continue
prop_name = "{0}{1}".format(prefix, pr)
if schema_prop['type'] == 'dict':
update_data(
schema_prop['schema'],
form_prop['schema'],
"{0}__".format(prop_name))
continue
data = form[prop_name].data
if schema_prop['type'] == 'dict':
if data == 'None':
continue
elif schema_prop['type'] == 'integer':
if data == '':
data = 0
else:
data = int(form[prop_name].data)
elif schema_prop['type'] == 'datetime':
data = datetime.strftime(data,
app.config['RFC1123_DATE_FORMAT'])
elif schema_prop['type'] == 'list':
if pr == 'attachments':
# data = json.loads(data)
data = [dict(field='description', files=data)]
elif pr == 'files':
# Only keep those items that actually refer to a file.
data = [file_item for file_item in data
if file_item.get('file')]
# elif pr == 'tags':
# data = [tag.strip() for tag in data.split(',')]
elif schema_prop['type'] == 'objectid':
if data == '':
# Set empty object to None so it gets removed by the
# SDK before node.update()
data = None
else:
if pr in form:
data = form[prop_name].data
path = prop_name.split('__')
if len(path) > 1:
recursive_prop = recursive(
path, node.properties.to_dict(), data)
node.properties = recursive_prop
else:
node.properties[prop_name] = data
update_data(node_schema, form_schema)
ok = node.update(api=api)
if not ok:
log.warning('Unable to update node: %s', node.error)
# if form.picture.data:
# image_data = request.files[form.picture.name].read()
# post = node.replace_picture(image_data, api=api)
return ok
else:
# Create a new node
node = pillarsdk.Node()
prop = {}
files = {}
prop['name'] = form.name.data
prop['description'] = form.description.data
prop['user'] = user
if 'picture' in form:
prop['picture'] = form.picture.data
if prop['picture'] == 'None' or prop['picture'] == '':
prop['picture'] = None
if 'parent' in form:
prop['parent'] = form.parent.data
prop['properties'] = {}
def get_data(node_schema, form_schema, prefix=""):
for pr in node_schema:
schema_prop = node_schema[pr]
form_prop = form_schema.get(pr, {})
if pr == 'items':
continue
if 'visible' in form_prop and not form_prop['visible']:
continue
prop_name = "{0}{1}".format(prefix, pr)
if schema_prop['type'] == 'dict':
get_data(
schema_prop['schema'],
form_prop['schema'],
"{0}__".format(prop_name))
continue
data = form[prop_name].data
if schema_prop['type'] == 'media':
tmpfile = '/tmp/binary_data'
data.save(tmpfile)
binfile = open(tmpfile, 'rb')
files[pr] = binfile
continue
if schema_prop['type'] == 'integer':
if data == '':
data = 0
if schema_prop['type'] == 'list':
if data == '':
data = []
if schema_prop['type'] == 'datetime':
data = datetime.strftime(data, app.config['RFC1123_DATE_FORMAT'])
if schema_prop['type'] == 'objectid':
if data == '':
data = None
path = prop_name.split('__')
if len(path) > 1:
prop['properties'] = recursive(path, prop['properties'], data)
else:
prop['properties'][prop_name] = data
get_data(node_schema, form_schema)
prop['node_type'] = form.node_type_id.data
post = node.post(prop, api=api)
return post

688
pillar/web/nodes/routes.py Normal file
View File

@ -0,0 +1,688 @@
import os
import json
import logging
from datetime import datetime
import pillarsdk
from pillarsdk import Node
from pillarsdk import Project
from pillarsdk.exceptions import ResourceNotFound
from pillarsdk.exceptions import ForbiddenAccess
from flask import Blueprint, current_app
from flask import redirect
from flask import render_template
from flask import url_for
from flask import request
from flask import jsonify
from flask import abort
from flask_login import current_user
from werkzeug.exceptions import NotFound
from wtforms import SelectMultipleField
from flask.ext.login import login_required
from jinja2.exceptions import TemplateNotFound
from pillar.web.utils import caching
from pillar.web.nodes.forms import get_node_form
from pillar.web.nodes.forms import process_node_form
from pillar.web.nodes.custom.storage import StorageNode
from pillar.web.projects.routes import project_update_nodes_list
from pillar.web.utils import get_file
from pillar.web.utils.jstree import jstree_build_children
from pillar.web.utils.jstree import jstree_build_from_node
from pillar.web.utils.forms import ProceduralFileSelectForm
from pillar.web.utils.forms import build_file_select_form
from pillar.web import system_util
blueprint = Blueprint('nodes', __name__)
log = logging.getLogger(__name__)
def get_node(node_id, user_id):
api = system_util.pillar_api()
node = Node.find(node_id + '/?embedded={"node_type":1}', api=api)
return node.to_dict()
def get_node_children(node_id, node_type_name, user_id):
"""This function is currently unused since it does not give significant
performance improvements.
"""
api = system_util.pillar_api()
if node_type_name == 'group':
published_status = ',"properties.status": "published"'
else:
published_status = ''
children = Node.all({
'where': '{"parent": "%s" %s}' % (node_id, published_status),
'embedded': '{"node_type": 1}'}, api=api)
return children.to_dict()
@blueprint.route("/<node_id>/jstree")
def jstree(node_id):
"""JsTree view.
This return a lightweight version of the node, to be used by JsTree in the
frontend. We have two possible cases:
- https://pillar/<node_id>/jstree (construct the whole
expanded tree starting from the node_id. Use only once)
- https://pillar/<node_id>/jstree&children=1 (deliver the
children of a node - use in the navigation of the tree)
"""
# Get node with basic embedded data
api = system_util.pillar_api()
node = Node.find(node_id, {
'projection': {
'name': 1,
'node_type': 1,
'parent': 1,
'project': 1,
'properties.content_type': 1,
}
}, api=api)
if request.args.get('children') != '1':
return jsonify(items=jstree_build_from_node(node))
if node.node_type == 'storage':
storage = StorageNode(node)
# Check if we specify a path within the storage
path = request.args.get('path')
# Generate the storage listing
listing = storage.browse(path)
# Inject the current node id in the response, so that JsTree can
# expose the storage_node property and use it for further queries
listing['storage_node'] = node._id
if 'children' in listing:
for child in listing['children']:
child['storage_node'] = node._id
return jsonify(listing)
return jsonify(jstree_build_children(node))
@blueprint.route("/<node_id>/view")
def view(node_id):
api = system_util.pillar_api()
# Get node, we'll embed linked objects later.
try:
node = Node.find(node_id, api=api)
except ResourceNotFound:
return render_template('errors/404_embed.html')
except ForbiddenAccess:
return render_template('errors/403_embed.html')
node_type_name = node.node_type
if node_type_name == 'post':
# Posts shouldn't be shown at this route, redirect to the correct one.
return redirect(url_for_node(node=node))
# Set the default name of the template path based on the node name
template_path = os.path.join('nodes', 'custom', node_type_name)
# Set the default action for a template. By default is view and we override
# it only if we are working storage nodes, where an 'index' is also possible
template_action = 'view'
def allow_link():
"""Helper function to cross check if the user is authenticated, and it
is has the 'subscriber' role. Also, we check if the node has world GET
permissions, which means it's free.
"""
# Check if node permissions for the world exist (if node is free)
if node.permissions and node.permissions.world:
return 'GET' in node.permissions.world
if current_user.is_authenticated:
allowed_roles = {u'subscriber', u'demo', u'admin'}
return bool(allowed_roles.intersection(current_user.roles or ()))
return False
link_allowed = allow_link()
node_type_handlers = {
'asset': _view_handler_asset,
'storage': _view_handler_storage,
'texture': _view_handler_texture,
'hdri': _view_handler_hdri,
}
if node_type_name in node_type_handlers:
handler = node_type_handlers[node_type_name]
template_path, template_action = handler(node, template_path, template_action, link_allowed)
# Fetch linked resources.
node.picture = get_file(node.picture, api=api)
node.user = node.user and pillarsdk.User.find(node.user, api=api)
try:
node.parent = node.parent and pillarsdk.Node.find(node.parent, api=api)
except ForbiddenAccess:
# This can happen when a node has world-GET, but the parent doesn't.
node.parent = None
# Get children
children_projection = {'project': 1, 'name': 1, 'picture': 1, 'parent': 1,
'node_type': 1, 'properties.order': 1, 'properties.status': 1,
'user': 1, 'properties.content_type': 1}
children_where = {'parent': node._id}
if node_type_name == 'group':
children_where['properties.status'] = 'published'
children_projection['permissions.world'] = 1
else:
children_projection['properties.files'] = 1
children_projection['properties.is_tileable'] = 1
try:
children = Node.all({
'projection': children_projection,
'where': children_where,
'sort': [('properties.order', 1), ('name', 1)]}, api=api)
except ForbiddenAccess:
return render_template('errors/403_embed.html')
children = children._items
for child in children:
child.picture = get_file(child.picture, api=api)
if request.args.get('format') == 'json':
node = node.to_dict()
node['url_edit'] = url_for('nodes.edit', node_id=node['_id'])
return jsonify({
'node': node,
'children': children.to_dict() if children else {},
'parent': node['parent'] if 'parent' in node else {}
})
if 't' in request.args:
template_path = os.path.join('nodes', 'custom', 'asset')
template_action = 'view_theatre'
template_path = '{0}/{1}_embed.html'.format(template_path, template_action)
# template_path_full = os.path.join(current_app.config['TEMPLATES_PATH'], template_path)
#
# # Check if template exists on the filesystem
# if not os.path.exists(template_path_full):
# log.warning('Template %s does not exist for node type %s',
# template_path, node_type_name)
# raise NotFound("Missing template '{0}'".format(template_path))
return render_template(template_path,
node_id=node._id,
node=node,
parent=node.parent,
children=children,
config=current_app.config,
api=api)
def _view_handler_asset(node, template_path, template_action, link_allowed):
# Attach the file document to the asset node
node_file = get_file(node.properties.file)
node.file = node_file
# Remove the link to the file if it's not allowed.
if node_file and not link_allowed:
node.file.link = None
if node_file and node_file.content_type is not None:
asset_type = node_file.content_type.split('/')[0]
else:
asset_type = None
if asset_type == 'video':
# Process video type and select video template
if link_allowed:
sources = []
if node_file and node_file.variations:
for f in node_file.variations:
sources.append({'type': f.content_type, 'src': f.link})
# Build a link that triggers download with proper filename
# TODO: move this to Pillar
if f.backend == 'cdnsun':
f.link = "{0}&name={1}.{2}".format(f.link, node.name, f.format)
node.video_sources = json.dumps(sources)
node.file_variations = node_file.variations
else:
node.video_sources = None
node.file_variations = None
elif asset_type != 'image':
# Treat it as normal file (zip, blend, application, etc)
asset_type = 'file'
template_path = os.path.join(template_path, asset_type)
return template_path, template_action
def _view_handler_storage(node, template_path, template_action, link_allowed):
storage = StorageNode(node)
path = request.args.get('path')
listing = storage.browse(path)
node.name = listing['name']
listing['storage_node'] = node._id
# If the item has children we are working with a group
if 'children' in listing:
for child in listing['children']:
child['storage_node'] = node._id
child['name'] = child['text']
child['content_type'] = os.path.dirname(child['type'])
node.children = listing['children']
template_action = 'index'
else:
node.status = 'published'
node.length = listing['size']
node.download_link = listing['signed_url']
return template_path, template_action
def _view_handler_texture(node, template_path, template_action, link_allowed):
for f in node.properties.files:
f.file = get_file(f.file)
# Remove the link to the file if it's not allowed.
if f.file and not link_allowed:
f.file.link = None
return template_path, template_action
def _view_handler_hdri(node, template_path, template_action, link_allowed):
if not link_allowed:
node.properties.files = None
else:
for f in node.properties.files:
f.file = get_file(f.file)
return template_path, template_action
@blueprint.route("/<node_id>/edit", methods=['GET', 'POST'])
@login_required
def edit(node_id):
"""Generic node editing form
"""
def set_properties(dyn_schema, form_schema, node_properties, form,
prefix="",
set_data=True):
"""Initialize custom properties for the form. We run this function once
before validating the function with set_data=False, so that we can set
any multiselect field that was originally specified empty and fill it
with the current choices.
"""
for prop in dyn_schema:
schema_prop = dyn_schema[prop]
form_prop = form_schema.get(prop, {})
prop_name = "{0}{1}".format(prefix, prop)
if schema_prop['type'] == 'dict':
set_properties(
schema_prop['schema'],
form_prop['schema'],
node_properties[prop_name],
form,
"{0}__".format(prop_name))
continue
if prop_name not in form:
continue
try:
db_prop_value = node_properties[prop]
except KeyError:
log.debug('%s not found in form for node %s', prop_name, node_id)
continue
if schema_prop['type'] == 'datetime':
db_prop_value = datetime.strptime(db_prop_value,
current_app.config['RFC1123_DATE_FORMAT'])
if isinstance(form[prop_name], SelectMultipleField):
# If we are dealing with a multiselect field, check if
# it's empty (usually because we can't query the whole
# database to pick all the choices). If it's empty we
# populate the choices with the actual data.
if not form[prop_name].choices:
form[prop_name].choices = [(d, d) for d in db_prop_value]
# Choices should be a tuple with value and name
# Assign data to the field
if set_data:
if prop_name == 'attachments':
for attachment_collection in db_prop_value:
for a in attachment_collection['files']:
attachment_form = ProceduralFileSelectForm()
attachment_form.file = a['file']
attachment_form.slug = a['slug']
attachment_form.size = 'm'
form[prop_name].append_entry(attachment_form)
elif prop_name == 'files':
schema = schema_prop['schema']['schema']
# Extra entries are caused by min_entries=1 in the form
# creation.
field_list = form[prop_name]
if len(db_prop_value) > 0:
while len(field_list):
field_list.pop_entry()
for file_data in db_prop_value:
file_form_class = build_file_select_form(schema)
subform = file_form_class()
for key, value in file_data.iteritems():
setattr(subform, key, value)
field_list.append_entry(subform)
# elif prop_name == 'tags':
# form[prop_name].data = ', '.join(data)
else:
form[prop_name].data = db_prop_value
else:
# Default population of multiple file form list (only if
# we are getting the form)
if request.method == 'POST':
continue
if prop_name == 'attachments':
if not db_prop_value:
attachment_form = ProceduralFileSelectForm()
attachment_form.file = 'file'
attachment_form.slug = ''
attachment_form.size = ''
form[prop_name].append_entry(attachment_form)
api = system_util.pillar_api()
node = Node.find(node_id, api=api)
project = Project.find(node.project, api=api)
node_type = project.get_node_type(node.node_type)
form = get_node_form(node_type)
user_id = current_user.objectid
dyn_schema = node_type['dyn_schema'].to_dict()
form_schema = node_type['form_schema'].to_dict()
error = ""
node_properties = node.properties.to_dict()
ensure_lists_exist_as_empty(node.to_dict(), node_type)
set_properties(dyn_schema, form_schema, node_properties, form,
set_data=False)
if form.validate_on_submit():
if process_node_form(form, node_id=node_id, node_type=node_type, user=user_id):
# Handle the specific case of a blog post
if node_type.name == 'post':
project_update_nodes_list(node, list_name='blog')
else:
project_update_nodes_list(node)
# Emergency hardcore cache flush
# cache.clear()
return redirect(url_for('nodes.view', node_id=node_id, embed=1,
_external=True,
_scheme=current_app.config['SCHEME']))
else:
log.debug('Error sending data to Pillar, see Pillar logs.')
error = 'Server error'
else:
if form.errors:
log.debug('Form errors: %s', form.errors)
# Populate Form
form.name.data = node.name
form.description.data = node.description
if 'picture' in form:
form.picture.data = node.picture
if node.parent:
form.parent.data = node.parent
set_properties(dyn_schema, form_schema, node_properties, form)
# Get previews
node.picture = get_file(node.picture, api=api) if node.picture else None
# Get Parent
try:
parent = Node.find(node['parent'], api=api)
except KeyError:
parent = None
except ResourceNotFound:
parent = None
embed_string = ''
# Check if we want to embed the content via an AJAX call
if request.args.get('embed'):
if request.args.get('embed') == '1':
# Define the prefix for the embedded template
embed_string = '_embed'
template = '{0}/edit{1}.html'.format(node_type['name'], embed_string)
# We should more simply check if the template file actually exsists on
# the filesystem level
try:
return render_template(
template,
node=node,
parent=parent,
form=form,
errors=form.errors,
error=error,
api=api)
except TemplateNotFound:
template = 'nodes/edit{1}.html'.format(node_type['name'], embed_string)
return render_template(
template,
node=node,
parent=parent,
form=form,
errors=form.errors,
error=error,
api=api)
def ensure_lists_exist_as_empty(node_doc, node_type):
"""Ensures that any properties of type 'list' exist as empty lists.
This allows us to iterate over lists without worrying that they
are set to None. Only works for top-level list properties.
"""
node_properties = node_doc.setdefault('properties', {})
for prop, schema in node_type.dyn_schema.to_dict().iteritems():
if schema['type'] != 'list':
continue
if node_properties.get(prop) is None:
node_properties[prop] = []
@blueprint.route('/create', methods=['POST'])
@login_required
def create():
"""Create a node. Requires a number of params:
- project id
- node_type
- parent node (optional)
"""
if request.method != 'POST':
return abort(403)
project_id = request.form['project_id']
parent_id = request.form.get('parent_id')
node_type_name = request.form['node_type_name']
api = system_util.pillar_api()
# Fetch the Project or 404
try:
project = Project.find(project_id, api=api)
except ResourceNotFound:
return abort(404)
node_type = project.get_node_type(node_type_name)
node_type_name = 'folder' if node_type['name'] == 'group' else \
node_type['name']
node_props = dict(
name='New {}'.format(node_type_name),
project=project['_id'],
user=current_user.objectid,
node_type=node_type['name'],
properties={}
)
if parent_id:
node_props['parent'] = parent_id
ensure_lists_exist_as_empty(node_props, node_type)
node = Node(node_props)
node.create(api=api)
return jsonify(status='success', data=dict(asset_id=node['_id']))
@blueprint.route("/<node_id>/redir")
def redirect_to_context(node_id):
"""Redirects to the context URL of the node.
Comment: redirects to whatever the comment is attached to + #node_id
(unless 'whatever the comment is attached to' already contains '#', then
'#node_id' isn't appended)
Post: redirects to main or project-specific blog post
Other: redirects to project.url + #node_id
"""
if node_id.lower() == '{{objectid}}':
log.warning("JavaScript should have filled in the ObjectID placeholder, but didn't. "
"URL=%s and referrer=%s",
request.url, request.referrer)
raise NotFound('Invalid ObjectID')
try:
url = url_for_node(node_id)
except ValueError as ex:
log.warning("%s: URL=%s and referrer=%s",
str(ex), request.url, request.referrer)
raise NotFound('Invalid ObjectID')
return redirect(url)
def url_for_node(node_id=None, node=None):
assert isinstance(node_id, (basestring, type(None)))
api = system_util.pillar_api()
# Find node by its ID, or the ID by the node, depending on what was passed
# as parameters.
if node is None:
try:
node = Node.find(node_id, api=api)
except ResourceNotFound:
log.warning(
'url_for_node(node_id=%r, node=None): Unable to find node.',
node_id)
raise ValueError('Unable to find node %r' % node_id)
elif node_id is None:
node_id = node['_id']
else:
raise ValueError('Either node or node_id must be given')
return _find_url_for_node(node_id, node=node)
@caching.cache_for_request()
def project_url(project_id, project):
"""Returns the project, raising a ValueError if it can't be found.
Uses the "urler" service endpoint.
"""
if project is not None:
return project
urler_api = system_util.pillar_api(
token=current_app.config['URLER_SERVICE_AUTH_TOKEN'])
return Project.find_from_endpoint(
'/service/urler/%s' % project_id, api=urler_api)
# Cache the actual URL based on the node ID, for the duration of the request.
@caching.cache_for_request()
def _find_url_for_node(node_id, node):
api = system_util.pillar_api()
# Find the node's project, or its ID, depending on whether a project
# was embedded. This is needed in two of the three finder functions.
project_id = node.project
if isinstance(project_id, pillarsdk.Resource):
# Embedded project
project = project_id
project_id = project['_id']
else:
project = None
def find_for_comment():
"""Returns the URL for a comment."""
parent = node
while parent.node_type == 'comment':
if isinstance(parent.parent, pillarsdk.Resource):
parent = parent.parent
continue
try:
parent = Node.find(parent.parent, api=api)
except ResourceNotFound:
log.warning(
'url_for_node(node_id=%r): Unable to find parent node %r',
node_id, parent.parent)
raise ValueError('Unable to find parent node %r' % parent.parent)
# Find the redirection URL for the parent node.
parent_url = url_for_node(node=parent)
if '#' in parent_url:
# We can't attach yet another fragment, so just don't link to
# the comment for now.
return parent_url
return parent_url + '#{}'.format(node_id)
def find_for_post():
"""Returns the URL for a blog post."""
if str(project_id) == current_app.config['MAIN_PROJECT_ID']:
return url_for('main.main_blog',
url=node.properties.url)
the_project = project_url(project_id, project=project)
return url_for('main.project_blog',
project_url=the_project.url,
url=node.properties.url)
# Fallback: Assets, textures, and other node types.
def find_for_other():
the_project = project_url(project_id, project=project)
return url_for('projects.view_node',
project_url=the_project.url,
node_id=node_id)
# Determine which function to use to find the correct URL.
url_finders = {
'comment': find_for_comment,
'post': find_for_post,
}
finder = url_finders.get(node.node_type, find_for_other)
return finder()
# Import of custom modules (using the same nodes decorator)
import custom.comments
import custom.groups
import custom.storage
import custom.posts

View File

@ -0,0 +1,126 @@
import logging
from flask import jsonify
from flask import Blueprint
from flask import request
from flask import url_for
from flask import abort
from flask.ext.login import login_required
from flask.ext.login import current_user
from pillarsdk.activities import Notification
from pillarsdk.activities import ActivitySubscription
from pillar.web.utils import system_util
from pillar.web.utils import pretty_date
log = logging.getLogger(__name__)
blueprint = Blueprint('notifications', __name__)
def notification_parse(notification):
if notification.actor:
username = notification.actor['username']
avatar = notification.actor['avatar']
else:
return None
return dict(
_id=notification['_id'],
username=username,
username_avatar=avatar,
action=notification.action,
object_type=notification.object_type,
object_name=notification.object_name,
object_url=url_for(
'nodes.redirect_to_context', node_id=notification.object_id),
context_object_type=notification.context_object_type,
context_object_name=notification.context_object_name,
context_object_url=url_for(
'nodes.redirect_to_context', node_id=notification.context_object_id),
date=pretty_date(notification['_created'], detail=True),
is_read=notification.is_read,
is_subscribed=notification.is_subscribed,
subscription=notification.subscription
)
@blueprint.route('/')
@login_required
def index():
"""Get notifications for the current user.
Optional url args:
- limit: limits the number of notifications
"""
limit = request.args.get('limit', 25)
api = system_util.pillar_api()
user_notifications = Notification.all({
'where': {'user': str(current_user.objectid)},
'sort': '-_created',
'max_results': str(limit),
'parse': '1'}, api=api)
# TODO: properly investigate and handle missing actors
items = [notification_parse(n) for n in user_notifications['_items'] if
notification_parse(n)]
return jsonify(items=items)
@blueprint.route('/<notification_id>/read-toggle')
@login_required
def action_read_toggle(notification_id):
api = system_util.pillar_api()
notification = Notification.find(notification_id, api=api)
if notification.user == current_user.objectid:
notification.is_read = not notification.is_read
notification.update(api=api)
return jsonify(
status='success',
data=dict(
message="Notification {0} is_read {1}".format(
notification_id,
notification.is_read),
is_read=notification.is_read))
else:
return abort(403)
@blueprint.route('/read-all')
@login_required
def action_read_all():
"""Mark all notifications as read"""
api = system_util.pillar_api()
notifications = Notification.all({
'where': '{"user": "%s"}' % current_user.objectid,
'sort': '-_created'}, api=api)
for notification in notifications._items:
notification = Notification.find(notification._id, api=api)
notification.is_read = True
notification.update(api=api)
return jsonify(
status='success',
data=dict(message="All notifications mark as read"))
@blueprint.route('/<notification_id>/subscription-toggle')
@login_required
def action_subscription_toggle(notification_id):
"""Given a notification id, get the ActivitySubscription and update it by
toggling the notifications status for the web key.
"""
api = system_util.pillar_api()
# Get the notification
notification = notification_parse(
Notification.find(notification_id, {'parse':'1'}, api=api))
# Get the subscription and modify it
subscription = ActivitySubscription.find(
notification['subscription'], api=api)
subscription.notifications['web'] = not subscription.notifications['web']
subscription.update(api=api)
return jsonify(
status='success',
data=dict(message="You have been {}subscribed".format(
'' if subscription.notifications['web'] else 'un')))
def setup_app(app, url_prefix=None):
app.register_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -0,0 +1,5 @@
from .routes import blueprint
def setup_app(app, url_prefix=None):
app.register_blueprint(blueprint, url_prefix=url_prefix)

View File

@ -0,0 +1,63 @@
from flask_wtf import Form
from wtforms import StringField
from wtforms import BooleanField
from wtforms import HiddenField
from wtforms import TextAreaField
from wtforms import SelectField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from pillarsdk.projects import Project
from pillarsdk import exceptions as sdk_exceptions
from pillar.web import system_util
from pillar.web.utils.forms import FileSelectField, JSONRequired
class ProjectForm(Form):
project_id = HiddenField('project_id', validators=[DataRequired()])
name = StringField('Name', validators=[DataRequired()])
url = StringField('Url', validators=[DataRequired()])
summary = StringField('Summary', validators=[Length(min=1, max=128)])
description = TextAreaField('Description', validators=[DataRequired()])
is_private = BooleanField('Private')
category = SelectField('Category', choices=[
('film', 'Film'),
('training', 'Training'),
('assets', 'Assets')])
status = SelectField('Status', choices=[
('published', 'Published'),
('pending', 'Pending'),
('deleted', 'Deleted')])
picture_header = FileSelectField('Picture header', file_format='image')
picture_square = FileSelectField('Picture square', file_format='image')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
api = system_util.pillar_api()
project = Project.find(self.project_id.data, api=api)
if project.url == self.url.data:
# Same URL as before, so that's fine.
return True
try:
project_url = Project.find_one({'where': {'url': self.url.data}}, api=api)
except sdk_exceptions.ResourceNotFound:
# Not found, so the URL is fine.
return True
if project_url:
self.url.errors.append('Sorry, project url already exists!')
return False
return True
class NodeTypeForm(Form):
project_id = HiddenField('project_id', validators=[DataRequired()])
name = StringField('Name', validators=[DataRequired()])
parent = StringField('Parent')
description = TextAreaField('Description')
dyn_schema = TextAreaField('Schema', validators=[JSONRequired()])
form_schema = TextAreaField('Form Schema', validators=[JSONRequired()])
permissions = TextAreaField('Permissions', validators=[JSONRequired()])

View File

@ -0,0 +1,771 @@
import json
import logging
from pillarsdk import Node
from pillarsdk import Project
from pillarsdk.exceptions import ResourceNotFound
from pillarsdk.exceptions import ForbiddenAccess
from flask import Blueprint, current_app
from flask import render_template
from flask import request
from flask import jsonify
from flask import session
from flask import abort
from flask import redirect
from flask import url_for
from flask.ext.login import login_required
from flask.ext.login import current_user
import werkzeug.exceptions as wz_exceptions
from pillar.web import system_util
from pillar.web.utils import get_file
from pillar.web.utils import attach_project_pictures
from pillar.web.utils.jstree import jstree_get_children
from pillar.web.utils import gravatar
from .forms import ProjectForm
from .forms import NodeTypeForm
blueprint = Blueprint('projects', __name__)
log = logging.getLogger(__name__)
SYNC_GROUP_NODE_NAME = 'Blender Sync'
IMAGE_SHARING_GROUP_NODE_NAME = 'Image sharing'
@blueprint.route('/')
@login_required
def index():
api = system_util.pillar_api()
# Get all projects, except the home project.
projects_user = Project.all({
'where': {'user': current_user.objectid,
'category': {'$ne': 'home'}},
'sort': '-_created'
}, api=api)
projects_shared = Project.all({
'where': {'user': {'$ne': current_user.objectid},
'permissions.groups.group': {'$in': current_user.groups},
'is_private': True},
'sort': '-_created',
'embedded': {'user': 1},
}, api=api)
# Attach project images
for project in projects_user['_items']:
attach_project_pictures(project, api)
for project in projects_shared['_items']:
attach_project_pictures(project, api)
return render_template(
'projects/index_dashboard.html',
gravatar=gravatar(current_user.email, size=128),
projects_user=projects_user['_items'],
projects_shared=projects_shared['_items'],
api=api)
@blueprint.route('/<project_url>/jstree')
def jstree(project_url):
"""Entry point to view a project as JSTree"""
api = system_util.pillar_api()
try:
project = Project.find_one({
'projection': {'_id': 1},
'where': {'url': project_url}
}, api=api)
except ResourceNotFound:
raise wz_exceptions.NotFound('No such project')
return jsonify(items=jstree_get_children(None, project._id))
@blueprint.route('/home/')
@login_required
def home_project():
api = system_util.pillar_api()
project = _home_project(api)
# Get the synchronised Blender versions
project_id = project['_id']
synced_versions = synced_blender_versions(project_id, api)
extra_context = {
'synced_versions': synced_versions,
'show_addon_download_buttons': True,
}
return render_project(project, api, extra_context)
@blueprint.route('/home/images')
@login_required
def home_project_shared_images():
api = system_util.pillar_api()
project = _home_project(api)
# Get the shared images
project_id = project['_id']
image_nodes = shared_image_nodes(project_id, api)
extra_context = {
'shared_images': image_nodes,
'show_addon_download_buttons': current_user.has_role('subscriber', 'demo'),
}
return render_project(project, api, extra_context,
template_name='projects/home_images.html')
def _home_project(api):
try:
project = Project.find_from_endpoint('/bcloud/home-project', api=api)
except ResourceNotFound:
log.warning('Home project for user %s not found', current_user.objectid)
raise wz_exceptions.NotFound('No such project')
return project
def synced_blender_versions(home_project_id, api):
"""Returns a list of Blender versions with synced settings.
Returns a list of {'version': '2.77', 'date': datetime.datetime()} dicts.
Returns an empty list if no Blender versions were synced.
"""
sync_group = Node.find_first({
'where': {'project': home_project_id,
'node_type': 'group',
'parent': None,
'name': SYNC_GROUP_NODE_NAME},
'projection': {'_id': 1}},
api=api)
if not sync_group:
return []
sync_nodes = Node.all({
'where': {'project': home_project_id,
'node_type': 'group',
'parent': sync_group['_id']},
'projection': {
'name': 1,
'_updated': 1,
}},
api=api)
sync_nodes = sync_nodes._items
if not sync_nodes:
return []
return [{'version': node.name, 'date': node._updated}
for node in sync_nodes]
def shared_image_nodes(home_project_id, api):
"""Returns a list of pillarsdk.Node objects."""
parent_group = Node.find_first({
'where': {'project': home_project_id,
'node_type': 'group',
'parent': None,
'name': IMAGE_SHARING_GROUP_NODE_NAME},
'projection': {'_id': 1}},
api=api)
if not parent_group:
log.debug('No image sharing parent node found.')
return []
nodes = Node.all({
'where': {'project': home_project_id,
'node_type': 'asset',
'properties.content_type': 'image',
'parent': parent_group['_id']},
'sort': '-_created',
'projection': {
'_created': 1,
'name': 1,
'picture': 1,
'short_code': 1,
}},
api=api)
nodes = nodes._items or []
for node in nodes:
node.picture = get_file(node.picture)
return nodes
@blueprint.route('/home/jstree')
def home_jstree():
"""Entry point to view the home project as JSTree"""
api = system_util.pillar_api()
try:
project = Project.find_from_endpoint('/bcloud/home-project',
params={'projection': {
'_id': 1,
'permissions': 1,
'category': 1,
'user': 1}},
api=api)
except ResourceNotFound:
raise wz_exceptions.NotFound('No such project')
return jsonify(items=jstree_get_children(None, project._id))
@blueprint.route('/<project_url>/')
def view(project_url):
"""Entry point to view a project"""
if request.args.get('format') == 'jstree':
log.warning('projects.view(%r) endpoint called with format=jstree, '
'redirecting to proper endpoint. URL is %s; referrer is %s',
project_url, request.url, request.referrer)
return redirect(url_for('projects.jstree', project_url=project_url))
api = system_util.pillar_api()
project = find_project_or_404(project_url,
embedded={'header_node': 1},
api=api)
# Load the header video file, if there is any.
header_video_file = None
header_video_node = None
if project.header_node and project.header_node.node_type == 'asset' and \
project.header_node.properties.content_type == 'video':
header_video_node = project.header_node
header_video_file = get_file(project.header_node.properties.file)
header_video_node.picture = get_file(header_video_node.picture)
return render_project(project, api,
extra_context={'header_video_file': header_video_file,
'header_video_node': header_video_node})
def render_project(project, api, extra_context=None, template_name=None):
project.picture_square = get_file(project.picture_square, api=api)
project.picture_header = get_file(project.picture_header, api=api)
def load_latest(list_of_ids, get_picture=False):
"""Loads a list of IDs in reversed order."""
if not list_of_ids:
return []
# Construct query parameters outside the loop.
projection = {'name': 1, 'user': 1, 'node_type': 1, 'project': 1, 'properties.url': 1}
params = {'projection': projection, 'embedded': {'user': 1}}
if get_picture:
projection['picture'] = 1
list_latest = []
for node_id in reversed(list_of_ids or ()):
try:
node_item = Node.find(node_id, params, api=api)
node_item.picture = get_file(node_item.picture, api=api)
list_latest.append(node_item)
except ForbiddenAccess:
pass
except ResourceNotFound:
log.warning('Project %s refers to removed node %s!',
project._id, node_id)
return list_latest
project.nodes_latest = load_latest(project.nodes_latest)
project.nodes_featured = load_latest(project.nodes_featured, get_picture=True)
project.nodes_blog = load_latest(project.nodes_blog)
if extra_context is None:
extra_context = {}
if project.category == 'home' and not current_app.config['RENDER_HOME_AS_REGULAR_PROJECT']:
template_name = template_name or 'projects/home_index.html'
return render_template(
template_name,
gravatar=gravatar(current_user.email, size=128),
project=project,
api=system_util.pillar_api(),
**extra_context)
if template_name is None:
if request.args.get('embed'):
embed_string = '_embed'
else:
embed_string = ''
template_name = "projects/view{0}.html".format(embed_string)
return render_template(template_name,
api=api,
project=project,
node=None,
show_node=False,
show_project=True,
og_picture=project.picture_header,
**extra_context)
@blueprint.route('/<project_url>/<node_id>')
def view_node(project_url, node_id):
"""Entry point to view a node in the context of a project"""
# Some browsers mangle URLs and URL-encode /p/{p-url}/#node-id
if node_id.startswith('#'):
return redirect(url_for('projects.view_node',
project_url=project_url,
node_id=node_id[1:]),
code=301) # permanent redirect
api = system_util.pillar_api()
theatre_mode = 't' in request.args
# Fetch the node before the project. If this user has access to the
# node, we should be able to get the project URL too.
try:
node = Node.find(node_id, api=api)
except ForbiddenAccess:
return render_template('errors/403.html'), 403
except ResourceNotFound:
raise wz_exceptions.NotFound('No such node')
try:
project = Project.find_one({'where': {"url": project_url, '_id': node.project}}, api=api)
except ResourceNotFound:
# In theatre mode, we don't need access to the project at all.
if theatre_mode:
project = None
else:
raise wz_exceptions.NotFound('No such project')
og_picture = node.picture = get_file(node.picture, api=api)
if project:
if not node.picture:
og_picture = get_file(project.picture_header, api=api)
project.picture_square = get_file(project.picture_square, api=api)
# Append _theatre to load the proper template
theatre = '_theatre' if theatre_mode else ''
return render_template('projects/view{}.html'.format(theatre),
api=api,
project=project,
node=node,
show_node=True,
show_project=False,
og_picture=og_picture)
def find_project_or_404(project_url, embedded=None, api=None):
"""Aborts with a NotFound exception when the project cannot be found."""
params = {'where': {"url": project_url}}
if embedded:
params['embedded'] = embedded
try:
project = Project.find_one(params, api=api)
except ResourceNotFound:
raise wz_exceptions.NotFound('No such project')
return project
@blueprint.route('/<project_url>/search')
def search(project_url):
"""Search into a project"""
api = system_util.pillar_api()
project = find_project_or_404(project_url, api=api)
project.picture_square = get_file(project.picture_square, api=api)
project.picture_header = get_file(project.picture_header, api=api)
return render_template('nodes/search.html',
project=project,
og_picture=project.picture_header)
@blueprint.route('/<project_url>/about')
def about(project_url):
"""About page of a project"""
# TODO: Duplicated code from view function, we could re-use view instead
api = system_util.pillar_api()
project = find_project_or_404(project_url,
embedded={'header_node': 1},
api=api)
# Load the header video file, if there is any.
header_video_file = None
header_video_node = None
if project.header_node and project.header_node.node_type == 'asset' and \
project.header_node.properties.content_type == 'video':
header_video_node = project.header_node
header_video_file = get_file(project.header_node.properties.file)
header_video_node.picture = get_file(header_video_node.picture)
return render_project(project, api,
extra_context={'title': 'about',
'header_video_file': header_video_file,
'header_video_node': header_video_node})
@blueprint.route('/<project_url>/edit', methods=['GET', 'POST'])
@login_required
def edit(project_url):
api = system_util.pillar_api()
# Fetch the Node or 404
try:
project = Project.find_one({'where': {'url': project_url}}, api=api)
# project = Project.find(project_url, api=api)
except ResourceNotFound:
abort(404)
attach_project_pictures(project, api)
form = ProjectForm(
project_id=project._id,
name=project.name,
url=project.url,
summary=project.summary,
description=project.description,
is_private=u'GET' not in project.permissions.world,
category=project.category,
status=project.status,
)
if form.validate_on_submit():
project = Project.find(project._id, api=api)
project.name = form.name.data
project.url = form.url.data
project.summary = form.summary.data
project.description = form.description.data
project.category = form.category.data
project.status = form.status.data
if form.picture_square.data:
project.picture_square = form.picture_square.data
if form.picture_header.data:
project.picture_header = form.picture_header.data
# Update world permissions from is_private checkbox
if form.is_private.data:
project.permissions.world = []
else:
project.permissions.world = [u'GET']
project.update(api=api)
# Reattach the pictures
attach_project_pictures(project, api)
else:
if project.picture_square:
form.picture_square.data = project.picture_square._id
if project.picture_header:
form.picture_header.data = project.picture_header._id
# List of fields from the form that should be hidden to regular users
if current_user.has_role('admin'):
hidden_fields = []
else:
hidden_fields = ['url', 'status', 'is_private', 'category']
return render_template('projects/edit.html',
form=form,
hidden_fields=hidden_fields,
project=project,
api=api)
@blueprint.route('/<project_url>/edit/node-type')
@login_required
def edit_node_types(project_url):
api = system_util.pillar_api()
# Fetch the project or 404
try:
project = Project.find_one({
'where': '{"url" : "%s"}' % (project_url)}, api=api)
except ResourceNotFound:
return abort(404)
attach_project_pictures(project, api)
return render_template('projects/edit_node_types.html',
api=api,
project=project)
@blueprint.route('/<project_url>/e/node-type/<node_type_name>', methods=['GET', 'POST'])
@login_required
def edit_node_type(project_url, node_type_name):
api = system_util.pillar_api()
# Fetch the Node or 404
try:
project = Project.find_one({
'where': '{"url" : "%s"}' % (project_url)}, api=api)
except ResourceNotFound:
return abort(404)
attach_project_pictures(project, api)
node_type = project.get_node_type(node_type_name)
form = NodeTypeForm()
if form.validate_on_submit():
# Update dynamic & form schemas
dyn_schema = json.loads(form.dyn_schema.data)
node_type.dyn_schema = dyn_schema
form_schema = json.loads(form.form_schema.data)
node_type.form_schema = form_schema
# Update permissions
permissions = json.loads(form.permissions.data)
node_type.permissions = permissions
project.update(api=api)
elif request.method == 'GET':
form.project_id.data = project._id
if node_type:
form.name.data = node_type.name
form.description.data = node_type.description
form.parent.data = node_type.parent
dyn_schema = node_type.dyn_schema.to_dict()
form_schema = node_type.form_schema.to_dict()
if 'permissions' in node_type:
permissions = node_type.permissions.to_dict()
else:
permissions = {}
form.form_schema.data = json.dumps(form_schema, indent=4)
form.dyn_schema.data = json.dumps(dyn_schema, indent=4)
form.permissions.data = json.dumps(permissions, indent=4)
return render_template('projects/edit_node_type.html',
form=form,
project=project,
api=api,
node_type=node_type)
@blueprint.route('/<project_url>/edit/sharing', methods=['GET', 'POST'])
@login_required
def sharing(project_url):
api = system_util.pillar_api()
# Fetch the project or 404
try:
project = Project.find_one({
'where': '{"url" : "%s"}' % (project_url)}, api=api)
except ResourceNotFound:
return abort(404)
# Fetch users that are part of the admin group
users = project.get_users(api=api)
for user in users['_items']:
user['avatar'] = gravatar(user['email'])
if request.method == 'POST':
user_id = request.form['user_id']
action = request.form['action']
try:
if action == 'add':
user = project.add_user(user_id, api=api)
elif action == 'remove':
user = project.remove_user(user_id, api=api)
except ResourceNotFound:
log.info('/p/%s/edit/sharing: User %s not found', project_url, user_id)
return jsonify({'_status': 'ERROR',
'message': 'User %s not found' % user_id}), 404
# Add gravatar to user
user['avatar'] = gravatar(user['email'])
return jsonify(user)
attach_project_pictures(project, api)
return render_template('projects/sharing.html',
api=api,
project=project,
users=users['_items'])
@blueprint.route('/e/add-featured-node', methods=['POST'])
@login_required
def add_featured_node():
"""Feature a node in a project. This method belongs here, because it affects
the project node itself, not the asset.
"""
api = system_util.pillar_api()
node = Node.find(request.form['node_id'], api=api)
action = project_update_nodes_list(node, project_id=node.project, list_name='featured')
return jsonify(status='success', data=dict(action=action))
@blueprint.route('/e/move-node', methods=['POST'])
@login_required
def move_node():
"""Move a node within a project. While this affects the node.parent prop, we
keep it in the scope of the project."""
node_id = request.form['node_id']
dest_parent_node_id = request.form.get('dest_parent_node_id')
api = system_util.pillar_api()
node = Node.find(node_id, api=api)
# Get original parent id for clearing template fragment on success
previous_parent_id = node.parent
if dest_parent_node_id:
node.parent = dest_parent_node_id
elif node.parent:
node.parent = None
node.update(api=api)
return jsonify(status='success', data=dict(message='node moved'))
@blueprint.route('/e/delete-node', methods=['POST'])
@login_required
def delete_node():
"""Delete a node"""
api = system_util.pillar_api()
node = Node.find(request.form['node_id'], api=api)
if not node.has_method('DELETE'):
return abort(403)
node.delete(api=api)
return jsonify(status='success', data=dict(message='Node deleted'))
@blueprint.route('/e/toggle-node-public', methods=['POST'])
@login_required
def toggle_node_public():
"""Give a node GET permissions for the world. Later on this can turn into
a more powerful permission management function.
"""
api = system_util.pillar_api()
node = Node.find(request.form['node_id'], api=api)
if node.has_method('PUT'):
if node.permissions and 'world' in node.permissions.to_dict():
node.permissions = {}
message = "Node is not public anymore."
else:
node.permissions = dict(world=['GET'])
message = "Node is now public!"
node.update(api=api)
return jsonify(status='success', data=dict(message=message))
else:
return abort(403)
@blueprint.route('/e/toggle-node-project-header', methods=['POST'])
@login_required
def toggle_node_project_header():
"""Sets this node as the project header, or removes it if already there.
"""
api = system_util.pillar_api()
node_id = request.form['node_id']
try:
node = Node.find(node_id, {'projection': {'project': 1}}, api=api)
except ResourceNotFound:
log.info('User %s trying to toggle non-existing node %s as project header',
current_user.objectid, node_id)
return jsonify(_status='ERROR', message='Node not found'), 404
try:
project = Project.find(node.project, api=api)
except ResourceNotFound:
log.info('User %s trying to toggle node %s as project header, but project %s not found',
current_user.objectid, node_id, node.project)
return jsonify(_status='ERROR', message='Project not found'), 404
# Toggle header node
if project.header_node == node_id:
log.debug('Un-setting header node of project %s', node.project)
project.header_node = None
action = 'unset'
else:
log.debug('Setting node %s as header of project %s', node_id, node.project)
project.header_node = node_id
action = 'set'
# Save the project
project.update(api=api)
return jsonify({'_status': 'OK',
'action': action})
def project_update_nodes_list(node, project_id=None, list_name='latest'):
"""Update the project node with the latest edited or favorited node.
The list value can be 'latest' or 'featured' and it will determined where
the node reference will be placed in.
"""
if node.properties.status and node.properties.status == 'published':
if not project_id and 'current_project_id' in session:
project_id = session['current_project_id']
elif not project_id:
return None
project_id = node.project
if type(project_id) is not unicode:
project_id = node.project._id
api = system_util.pillar_api()
project = Project.find(project_id, api=api)
if list_name == 'latest':
nodes_list = project.nodes_latest
elif list_name == 'blog':
nodes_list = project.nodes_blog
else:
nodes_list = project.nodes_featured
if not nodes_list:
node_list_name = 'nodes_' + list_name
project[node_list_name] = []
nodes_list = project[node_list_name]
elif len(nodes_list) > 5:
nodes_list.pop(0)
if node._id in nodes_list:
# Pop to put this back on top of the list
nodes_list.remove(node._id)
if list_name == 'featured':
# We treat the action as a toggle and do not att the item back
project.update(api=api)
return "removed"
nodes_list.append(node._id)
project.update(api=api)
return "added"
@blueprint.route('/create')
@login_required
def create():
"""Create a new project. This is a multi step operation that involves:
- initialize basic node types
- initialize basic permissions
- create and connect storage space
"""
api = system_util.pillar_api()
project_properties = dict(
name='My project',
user=current_user.objectid,
category='assets',
status='pending'
)
project = Project(project_properties)
project.create(api=api)
return redirect(url_for('projects.edit',
project_url="p-{}".format(project['_id'])))
@blueprint.route('/delete', methods=['POST'])
@login_required
def delete():
"""Unapologetically deletes a project"""
api = system_util.pillar_api()
project_id = request.form['project_id']
project = Project.find(project_id, api=api)
project.delete(api=api)
return jsonify(dict(staus='success', data=dict(
message='Project deleted {}'.format(project['_id']))))

View File

@ -0,0 +1,64 @@
import logging
import string
import urlparse
from flask import Blueprint, redirect, current_app
from werkzeug.exceptions import NotFound
import pillarsdk
from pillar.web import system_util
from pillar.web.nodes.routes import url_for_node
blueprint = Blueprint('redirects', __name__)
log = logging.getLogger(__name__)
short_code_chars = string.ascii_letters + string.digits
@blueprint.route('/<path:path>')
def redirect_to_path(path):
redirects = current_app.config.get('REDIRECTS', {})
# Try our dict of redirects first.
try:
url = redirects[path]
except KeyError:
pass
else:
return redirect(url, code=307)
# The path may be a node short-code.
resp = redirect_with_short_code(path)
if resp is not None:
return resp
log.warning('Non-existing redirect %r requested', path)
raise NotFound()
def redirect_with_short_code(short_code):
if any(c not in short_code_chars for c in short_code):
# Can't be a short code
return
log.debug('Path %s may be a short-code', short_code)
api = system_util.pillar_api()
try:
node = pillarsdk.Node.find_one({'where': {'short_code': short_code},
'projection': {'_id': 1}},
api=api)
except pillarsdk.ResourceNotFound:
log.debug("Nope, it isn't.")
return
# Redirect to 'theatre' view for the node.
url = url_for_node(node=node)
url = urlparse.urljoin(url, '?t')
log.debug('Found short code %s, redirecting to %s', short_code, url)
return redirect(url, code=307)
def setup_app(app, url_prefix):
app.register_blueprint(blueprint, url_prefix=url_prefix)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(a){"use strict";"function"==typeof define&&define.amd?define(["jquery"],a):a("object"==typeof exports?require("jquery"):window.jQuery)}(function(a){"use strict";var b=0;a.ajaxTransport("iframe",function(c){if(c.async){var e,f,g,d=c.initialIframeSrc||"javascript:false;";return{send:function(h,i){e=a('<form style="display:none;"></form>'),e.attr("accept-charset",c.formAcceptCharset),g=/\?/.test(c.url)?"&":"?","DELETE"===c.type?(c.url=c.url+g+"_method=DELETE",c.type="POST"):"PUT"===c.type?(c.url=c.url+g+"_method=PUT",c.type="POST"):"PATCH"===c.type&&(c.url=c.url+g+"_method=PATCH",c.type="POST"),b+=1,f=a('<iframe src="'+d+'" name="iframe-transport-'+b+'"></iframe>').bind("load",function(){var b,g=a.isArray(c.paramName)?c.paramName:[c.paramName];f.unbind("load").bind("load",function(){var b;try{if(b=f.contents(),!b.length||!b[0].firstChild)throw new Error}catch(a){b=void 0}i(200,"success",{iframe:b}),a('<iframe src="'+d+'"></iframe>').appendTo(e),window.setTimeout(function(){e.remove()},0)}),e.prop("target",f.prop("name")).prop("action",c.url).prop("method",c.type),c.formData&&a.each(c.formData,function(b,c){a('<input type="hidden"/>').prop("name",c.name).val(c.value).appendTo(e)}),c.fileInput&&c.fileInput.length&&"POST"===c.type&&(b=c.fileInput.clone(),c.fileInput.after(function(a){return b[a]}),c.paramName&&c.fileInput.each(function(b){a(this).prop("name",g[b]||c.paramName)}),e.append(c.fileInput).prop("enctype","multipart/form-data").prop("encoding","multipart/form-data"),c.fileInput.removeAttr("form")),e.submit(),b&&b.length&&c.fileInput.each(function(c,d){var e=a(b[c]);a(d).prop("name",e.prop("name")).attr("form",e.attr("form")),e.replaceWith(d)})}),e.append(f).appendTo(document.body)},abort:function(){f&&f.unbind("load").prop("src",d),e&&e.remove()}}}}),a.ajaxSetup({converters:{"iframe text":function(b){return b&&a(b[0].body).text()},"iframe json":function(b){return b&&a.parseJSON(a(b[0].body).text())},"iframe html":function(b){return b&&a(b[0].body).html()},"iframe xml":function(b){var c=b&&b[0];return c&&a.isXMLDoc(c)?c:a.parseXML(c.XMLDocument&&c.XMLDocument.xml||a(c.body).html())},"iframe script":function(b){return b&&a.globalEval(a(b[0].body).text())}}})});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More