2016-03-18 16:31:26 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2016-04-04 17:32:48 +02:00
|
|
|
from __future__ import print_function
|
2015-12-22 20:39:57 +01:00
|
|
|
from __future__ import division
|
2016-04-04 17:32:48 +02:00
|
|
|
|
2015-04-24 11:57:40 +02:00
|
|
|
import os
|
2016-04-05 12:25:41 +02:00
|
|
|
import logging
|
|
|
|
|
2016-01-25 16:32:50 +01:00
|
|
|
from bson.objectid import ObjectId
|
2015-10-11 22:20:18 +02:00
|
|
|
from eve.methods.put import put_internal
|
2015-12-22 20:39:57 +01:00
|
|
|
from eve.methods.post import post_internal
|
2015-11-15 15:43:43 +01:00
|
|
|
from flask.ext.script import Manager
|
2016-03-15 10:52:46 +01:00
|
|
|
|
|
|
|
# Use a sensible default when running manage.py commands.
|
|
|
|
if not os.environ.get('EVE_SETTINGS'):
|
2016-03-24 15:16:37 +01:00
|
|
|
settings_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
|
|
|
'settings.py')
|
2016-03-15 10:52:46 +01:00
|
|
|
os.environ['EVE_SETTINGS'] = settings_path
|
|
|
|
|
2015-02-04 02:02:21 +01:00
|
|
|
from application import app
|
2016-03-24 15:16:37 +01:00
|
|
|
from application.utils.gcs import GoogleCloudStorageBucket
|
2016-04-04 15:40:46 +02:00
|
|
|
from manage_extra.node_types.asset import node_type_asset
|
|
|
|
from manage_extra.node_types.blog import node_type_blog
|
|
|
|
from manage_extra.node_types.comment import node_type_comment
|
|
|
|
from manage_extra.node_types.group import node_type_group
|
|
|
|
from manage_extra.node_types.post import node_type_post
|
|
|
|
from manage_extra.node_types.project import node_type_project
|
|
|
|
from manage_extra.node_types.storage import node_type_storage
|
|
|
|
from manage_extra.node_types.texture import node_type_texture
|
|
|
|
from manage_extra.node_types.group_texture import node_type_group_texture
|
2015-02-04 02:02:21 +01:00
|
|
|
|
|
|
|
manager = Manager(app)
|
2015-04-07 14:31:41 +02:00
|
|
|
|
2016-04-05 12:25:41 +02:00
|
|
|
log = logging.getLogger('manage')
|
|
|
|
log.setLevel(logging.INFO)
|
|
|
|
|
2015-09-08 15:06:45 +02:00
|
|
|
MONGO_HOST = os.environ.get('MONGO_HOST', 'localhost')
|
|
|
|
|
2016-03-14 12:13:49 +01:00
|
|
|
|
2015-04-01 10:10:26 -03:00
|
|
|
@manager.command
|
|
|
|
def runserver():
|
2015-10-08 14:30:32 +02:00
|
|
|
# Automatic creation of STORAGE_DIR path if it's missing
|
|
|
|
if not os.path.exists(app.config['STORAGE_DIR']):
|
|
|
|
os.makedirs(app.config['STORAGE_DIR'])
|
2015-04-01 10:10:26 -03:00
|
|
|
|
2016-04-04 14:59:11 +02:00
|
|
|
app.run()
|
2015-04-01 10:10:26 -03:00
|
|
|
|
2015-04-07 14:40:39 +02:00
|
|
|
|
2016-01-26 15:34:56 +01:00
|
|
|
def post_item(entry, data):
|
|
|
|
return post_internal(entry, data)
|
|
|
|
|
|
|
|
|
2015-12-22 20:39:57 +01:00
|
|
|
def put_item(collection, item):
|
|
|
|
item_id = item['_id']
|
|
|
|
internal_fields = ['_id', '_etag', '_updated', '_created']
|
|
|
|
for field in internal_fields:
|
|
|
|
item.pop(field, None)
|
2016-01-25 16:32:50 +01:00
|
|
|
# print item
|
|
|
|
# print type(item_id)
|
2015-12-22 20:39:57 +01:00
|
|
|
p = put_internal(collection, item, **{'_id': item_id})
|
|
|
|
if p[0]['_status'] == 'ERR':
|
2016-02-16 10:54:44 +01:00
|
|
|
print(p)
|
|
|
|
print(item)
|
2015-12-22 20:39:57 +01:00
|
|
|
|
|
|
|
|
2016-02-25 12:43:46 +01:00
|
|
|
@manager.command
|
|
|
|
def setup_db():
|
|
|
|
"""Setup the database
|
|
|
|
- Create admin, subscriber and demo Group collection
|
|
|
|
- Create admin user (must use valid blender-id credentials)
|
|
|
|
- Create one project
|
|
|
|
"""
|
|
|
|
# groups_collection = app.data.driver.db['groups']
|
|
|
|
groups_list = []
|
|
|
|
for group in ['admin', 'subscriber', 'demo']:
|
|
|
|
g = {'name': group}
|
|
|
|
g = post_internal('groups', g)
|
|
|
|
groups_list.append(g[0]['_id'])
|
|
|
|
print("Creating group {0}".format(group))
|
|
|
|
|
|
|
|
while True:
|
|
|
|
admin_username = raw_input('Admin email:')
|
|
|
|
if len(admin_username) < 1:
|
|
|
|
print ("Username is too short")
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
user = dict(
|
|
|
|
username=admin_username,
|
|
|
|
groups=groups_list,
|
|
|
|
roles=['admin', 'subscriber', 'demo'],
|
|
|
|
settings=dict(email_communications=1),
|
|
|
|
auth=[],
|
|
|
|
full_name=admin_username,
|
|
|
|
email=admin_username,
|
|
|
|
)
|
|
|
|
user = post_internal('users', user)
|
|
|
|
print("Created user {0}".format(user[0]['_id']))
|
|
|
|
|
|
|
|
# TODO: Create a default project
|
2016-04-05 12:25:41 +02:00
|
|
|
default_permissions = _default_permissions()
|
2016-03-24 15:16:37 +01:00
|
|
|
|
|
|
|
node_type_blog['permissions'] = default_permissions
|
|
|
|
node_type_post['permissions'] = default_permissions
|
|
|
|
node_type_comment['permissions'] = default_permissions
|
|
|
|
|
|
|
|
project = dict(
|
|
|
|
owners=dict(users=[], groups=[]),
|
|
|
|
description='Default Project',
|
|
|
|
name='Default Project',
|
|
|
|
node_types=[
|
|
|
|
node_type_blog,
|
|
|
|
node_type_post,
|
|
|
|
node_type_comment
|
|
|
|
],
|
|
|
|
status='published',
|
|
|
|
user=user[0]['_id'],
|
|
|
|
is_private=False,
|
|
|
|
permissions=default_permissions,
|
|
|
|
url='default-project',
|
|
|
|
summary='Default Project summary',
|
|
|
|
category='training'
|
|
|
|
)
|
2016-03-24 16:05:39 +01:00
|
|
|
# Manually insert into db, since using post_internal would trigger hook
|
|
|
|
# TODO: fix this by bassing the context (and the user to g object)
|
|
|
|
projects_collection = app.data.driver.db['projects']
|
|
|
|
project = projects_collection.insert_one(project)
|
|
|
|
print("Created default project {0}".format(project.inserted_id))
|
|
|
|
gcs_storage = GoogleCloudStorageBucket(str(project.inserted_id))
|
2016-03-24 15:16:37 +01:00
|
|
|
|
|
|
|
if gcs_storage.bucket.exists():
|
|
|
|
print("Created CGS instance")
|
2016-02-25 12:43:46 +01:00
|
|
|
|
2016-03-14 12:13:49 +01:00
|
|
|
|
2016-04-05 12:25:41 +02:00
|
|
|
def _default_permissions():
|
|
|
|
"""Returns a dict of default permissions.
|
|
|
|
|
|
|
|
Usable for projects, node types, and others.
|
|
|
|
|
|
|
|
:rtype: dict
|
|
|
|
"""
|
|
|
|
|
|
|
|
groups_collection = app.data.driver.db['groups']
|
|
|
|
admin_group = groups_collection.find_one({'name': 'admin'})
|
|
|
|
|
|
|
|
default_permissions = {
|
|
|
|
'world': ['GET'],
|
|
|
|
'users': [],
|
|
|
|
'groups': [
|
|
|
|
{'group': admin_group['_id'],
|
|
|
|
'methods': ['GET', 'PUT', 'POST']},
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
return default_permissions
|
|
|
|
|
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def setup_for_attract(project_uuid, replace=False):
|
|
|
|
"""Adds Attract node types to the project.
|
|
|
|
|
|
|
|
:param project_uuid: the UUID of the project to update
|
|
|
|
:type project_uuid: str
|
|
|
|
:param replace: whether to replace existing Attract node types (True),
|
|
|
|
or to keep existing node types (False, the default).
|
|
|
|
:type replace: bool
|
|
|
|
"""
|
|
|
|
|
|
|
|
from manage_extra.node_types.act import node_type_act
|
|
|
|
from manage_extra.node_types.scene import node_type_scene
|
|
|
|
from manage_extra.node_types.shot import node_type_shot
|
|
|
|
|
|
|
|
default_permissions = _default_permissions()
|
|
|
|
node_type_act['permissions'] = default_permissions
|
|
|
|
node_type_scene['permissions'] = default_permissions
|
|
|
|
node_type_shot['permissions'] = default_permissions
|
|
|
|
|
|
|
|
project = _get_project(project_uuid)
|
|
|
|
|
|
|
|
# Add the missing node types.
|
|
|
|
for node_type in (node_type_act, node_type_scene, node_type_shot):
|
|
|
|
found = [nt for nt in project['node_types']
|
|
|
|
if nt['name'] == node_type['name']]
|
|
|
|
if found:
|
|
|
|
assert len(found) == 1, 'node type name should be unique (found %ix)' % len(found)
|
|
|
|
|
|
|
|
# TODO: validate that the node type contains all the properties Attract needs.
|
|
|
|
if replace:
|
|
|
|
log.info('Replacing existing node type %s', node_type['name'])
|
|
|
|
project['node_types'].remove(found[0])
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
project['node_types'].append(node_type)
|
|
|
|
|
|
|
|
_update_project(project_uuid, project)
|
|
|
|
|
|
|
|
log.info('Project %s was updated for Attract.', project_uuid)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_project(project_uuid):
|
|
|
|
"""Find a project in the database, or SystemExit()s.
|
|
|
|
|
|
|
|
:param project_uuid: UUID of the project
|
|
|
|
:type: str
|
|
|
|
:return: the project
|
|
|
|
:rtype: dict
|
|
|
|
"""
|
|
|
|
|
|
|
|
projects_collection = app.data.driver.db['projects']
|
|
|
|
project_id = ObjectId(project_uuid)
|
|
|
|
|
|
|
|
# Find the project in the database.
|
|
|
|
project = projects_collection.find_one(project_id)
|
|
|
|
if not project:
|
|
|
|
log.error('Project %s does not exist.', project_uuid)
|
|
|
|
raise SystemExit()
|
|
|
|
|
|
|
|
return project
|
|
|
|
|
|
|
|
|
|
|
|
def _update_project(project_uuid, project):
|
|
|
|
"""Updates a project in the database, or SystemExit()s.
|
|
|
|
|
|
|
|
:param project_uuid: UUID of the project
|
|
|
|
:type: str
|
|
|
|
:param project: the project data, should be the entire project document
|
|
|
|
:type: dict
|
|
|
|
:return: the project
|
|
|
|
:rtype: dict
|
|
|
|
"""
|
|
|
|
|
2016-04-05 12:32:05 +02:00
|
|
|
from application.utils import remove_private_keys
|
2016-04-05 12:25:41 +02:00
|
|
|
|
2016-04-05 12:32:05 +02:00
|
|
|
project_id = ObjectId(project_uuid)
|
2016-04-05 12:25:41 +02:00
|
|
|
project = remove_private_keys(project)
|
2016-04-05 12:32:05 +02:00
|
|
|
result, _, _, _ = put_internal('projects', project, _id=project_id)
|
2016-04-05 12:25:41 +02:00
|
|
|
|
2016-04-05 12:32:05 +02:00
|
|
|
if result['_status'] != 'OK':
|
|
|
|
log.error("Can't update project %s, issues: %s", project_uuid, result['_issues'])
|
2016-04-05 12:25:41 +02:00
|
|
|
raise SystemExit()
|
|
|
|
|
|
|
|
|
2015-04-08 18:09:04 +02:00
|
|
|
@manager.command
|
|
|
|
def clear_db():
|
|
|
|
"""Wipes the database
|
|
|
|
"""
|
|
|
|
from pymongo import MongoClient
|
|
|
|
|
2015-09-08 15:06:45 +02:00
|
|
|
client = MongoClient(MONGO_HOST, 27017)
|
2015-04-08 18:09:04 +02:00
|
|
|
db = client.eve
|
|
|
|
db.drop_collection('nodes')
|
|
|
|
db.drop_collection('node_types')
|
|
|
|
db.drop_collection('tokens')
|
|
|
|
db.drop_collection('users')
|
|
|
|
|
|
|
|
|
2015-04-13 15:08:44 -03:00
|
|
|
@manager.command
|
|
|
|
def upgrade_node_types():
|
2015-11-15 17:46:32 +01:00
|
|
|
"""Wipes node_types collection and populates it again"""
|
|
|
|
node_types_collection = app.data.driver.db['node_types']
|
|
|
|
node_types = node_types_collection.find({})
|
2015-04-13 15:08:44 -03:00
|
|
|
old_ids = {}
|
2015-11-15 17:46:32 +01:00
|
|
|
for node_type in node_types:
|
|
|
|
old_ids[node_type['name']] = node_type['_id']
|
2015-04-13 15:08:44 -03:00
|
|
|
populate_node_types(old_ids)
|
|
|
|
|
|
|
|
|
2015-05-21 11:51:56 -03:00
|
|
|
@manager.command
|
|
|
|
def manage_groups():
|
|
|
|
"""Take user email and group name,
|
|
|
|
and add or remove the user from that group.
|
|
|
|
"""
|
|
|
|
from pymongo import MongoClient
|
2015-09-08 15:06:45 +02:00
|
|
|
client = MongoClient(MONGO_HOST, 27017)
|
2015-05-21 11:51:56 -03:00
|
|
|
db = client.eve
|
|
|
|
|
|
|
|
print ("")
|
|
|
|
print ("Add or Remove user from group")
|
|
|
|
print ("leave empty to cancel")
|
|
|
|
print ("")
|
|
|
|
|
|
|
|
# Select Action
|
|
|
|
print ("Do you want to Add or Remove the user from the group?")
|
|
|
|
retry = True
|
|
|
|
while retry:
|
|
|
|
action = raw_input('add/remove: ')
|
|
|
|
if action == '':
|
|
|
|
return
|
|
|
|
elif action.lower() in ['add', 'a', 'insert']:
|
2016-04-04 17:31:06 +02:00
|
|
|
action = 'add'
|
2015-05-21 11:51:56 -03:00
|
|
|
retry = False
|
|
|
|
elif action.lower() in ['remove', 'r', 'rmv', 'rem', 'delete', 'del']:
|
|
|
|
action = 'remove'
|
|
|
|
retry = False
|
|
|
|
else:
|
|
|
|
print ("Incorrect action, press type 'add' or 'remove'")
|
|
|
|
|
|
|
|
# Select User
|
|
|
|
retry = True
|
|
|
|
while retry:
|
|
|
|
user_email = raw_input('User email: ')
|
|
|
|
if user_email == '':
|
|
|
|
return
|
|
|
|
user = db.users.find_one({'email': user_email})
|
|
|
|
if user:
|
|
|
|
retry = False
|
|
|
|
else:
|
|
|
|
print ("Incorrect user email, try again, or leave empty to cancel")
|
|
|
|
|
|
|
|
# Select group
|
|
|
|
retry = True
|
|
|
|
while retry:
|
|
|
|
group_name = raw_input('Group name: ')
|
|
|
|
if group_name == '':
|
|
|
|
return
|
|
|
|
group = db.groups.find_one({'name': group_name})
|
|
|
|
if group:
|
|
|
|
retry = False
|
|
|
|
else:
|
|
|
|
print ("Incorrect group name, try again, or leave empty to cancel")
|
|
|
|
|
|
|
|
# Do
|
|
|
|
current_groups = user.get('groups', [])
|
|
|
|
if action == 'add':
|
|
|
|
if group['_id'] in current_groups:
|
2016-04-04 17:32:48 +02:00
|
|
|
print("User {0} is already in group {1}".format(
|
|
|
|
user_email, group_name))
|
2015-05-21 11:51:56 -03:00
|
|
|
else:
|
|
|
|
current_groups.append(group['_id'])
|
|
|
|
db.users.update({'_id': user['_id']},
|
|
|
|
{"$set": {'groups': current_groups}})
|
2016-04-04 17:32:48 +02:00
|
|
|
print("User {0} added to group {1}".format(user_email, group_name))
|
2015-05-21 11:51:56 -03:00
|
|
|
elif action == 'remove':
|
|
|
|
if group['_id'] not in current_groups:
|
2016-04-04 17:32:48 +02:00
|
|
|
print("User {0} is not in group {1}".format(user_email, group_name))
|
2015-05-21 11:51:56 -03:00
|
|
|
else:
|
|
|
|
current_groups.remove(group['_id'])
|
|
|
|
db.users.update({'_id': user['_id']},
|
|
|
|
{"$set": {'groups': current_groups}})
|
2016-04-04 17:32:48 +02:00
|
|
|
print("User {0} removed from group {1}".format(
|
|
|
|
user_email, group_name))
|
2015-05-21 11:51:56 -03:00
|
|
|
|
|
|
|
|
2015-04-13 15:08:44 -03:00
|
|
|
def populate_node_types(old_ids={}):
|
2015-11-15 17:46:32 +01:00
|
|
|
node_types_collection = app.data.driver.db['node_types']
|
2015-04-13 15:08:44 -03:00
|
|
|
|
|
|
|
def mix_node_type(old_id, node_type_dict):
|
|
|
|
# Take eve parameters
|
2015-11-15 17:46:32 +01:00
|
|
|
node_type = node_types_collection.find_one({'_id': old_id})
|
2015-04-13 15:08:44 -03:00
|
|
|
for attr in node_type:
|
2015-11-15 17:46:32 +01:00
|
|
|
if attr[0] == '_':
|
|
|
|
# Mix with node eve attributes. This is really not needed since
|
|
|
|
# the attributes are stripped before doing a put_internal.
|
|
|
|
node_type_dict[attr] = node_type[attr]
|
|
|
|
elif attr == 'permissions':
|
|
|
|
node_type_dict['permissions'] = node_type['permissions']
|
2015-04-13 15:08:44 -03:00
|
|
|
return node_type_dict
|
|
|
|
|
2015-04-20 19:21:35 -03:00
|
|
|
def upgrade(node_type, old_ids):
|
2015-10-05 19:55:56 +02:00
|
|
|
print("Node {0}".format(node_type['name']))
|
2015-04-20 19:21:35 -03:00
|
|
|
node_name = node_type['name']
|
|
|
|
if node_name in old_ids:
|
2015-11-15 17:46:32 +01:00
|
|
|
node_id = old_ids[node_name]
|
|
|
|
node_type = mix_node_type(node_id, node_type)
|
2015-10-11 22:20:18 +02:00
|
|
|
|
|
|
|
# Removed internal fields that would cause validation error
|
|
|
|
internal_fields = ['_id', '_etag', '_updated', '_created']
|
|
|
|
for field in internal_fields:
|
|
|
|
node_type.pop(field, None)
|
|
|
|
p = put_internal('node_types', node_type, **{'_id': node_id})
|
2015-04-20 19:21:35 -03:00
|
|
|
else:
|
2015-08-31 17:45:29 +02:00
|
|
|
print("Making the node")
|
2015-10-05 19:55:56 +02:00
|
|
|
print(node_type)
|
2015-04-20 19:21:35 -03:00
|
|
|
post_item('node_types', node_type)
|
2015-04-13 15:08:44 -03:00
|
|
|
|
2015-08-31 17:45:29 +02:00
|
|
|
# upgrade(shot_node_type, old_ids)
|
|
|
|
# upgrade(task_node_type, old_ids)
|
|
|
|
# upgrade(scene_node_type, old_ids)
|
|
|
|
# upgrade(act_node_type, old_ids)
|
2015-10-11 22:20:18 +02:00
|
|
|
upgrade(node_type_project, old_ids)
|
|
|
|
upgrade(node_type_group, old_ids)
|
|
|
|
upgrade(node_type_asset, old_ids)
|
2015-10-03 17:07:14 +02:00
|
|
|
upgrade(node_type_storage, old_ids)
|
2015-10-05 19:55:56 +02:00
|
|
|
upgrade(node_type_comment, old_ids)
|
2015-10-15 14:24:35 +02:00
|
|
|
upgrade(node_type_blog, old_ids)
|
|
|
|
upgrade(node_type_post, old_ids)
|
2015-12-22 20:39:57 +01:00
|
|
|
upgrade(node_type_texture, old_ids)
|
|
|
|
upgrade(node_type_group_texture, old_ids)
|
2015-08-31 17:45:29 +02:00
|
|
|
|
|
|
|
|
2015-10-29 19:10:53 +01:00
|
|
|
@manager.command
|
|
|
|
def add_parent_to_nodes():
|
2015-11-15 15:43:43 +01:00
|
|
|
"""Find the parent of any node in the nodes collection"""
|
2015-10-29 19:10:53 +01:00
|
|
|
import codecs
|
|
|
|
import sys
|
2016-01-25 16:32:50 +01:00
|
|
|
|
2015-10-29 19:10:53 +01:00
|
|
|
UTF8Writer = codecs.getwriter('utf8')
|
|
|
|
sys.stdout = UTF8Writer(sys.stdout)
|
|
|
|
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
def find_parent_project(node):
|
|
|
|
if node and 'parent' in node:
|
|
|
|
parent = nodes_collection.find_one({'_id': node['parent']})
|
|
|
|
return find_parent_project(parent)
|
|
|
|
if node:
|
|
|
|
return node
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
nodes = nodes_collection.find()
|
|
|
|
nodes_index = 0
|
|
|
|
nodes_orphan = 0
|
|
|
|
for node in nodes:
|
|
|
|
nodes_index += 1
|
|
|
|
if node['node_type'] == ObjectId("55a615cfea893bd7d0489f2d"):
|
2016-04-04 17:32:48 +02:00
|
|
|
print(u"Skipping project node - {0}".format(node['name']))
|
2015-10-29 19:10:53 +01:00
|
|
|
else:
|
|
|
|
project = find_parent_project(node)
|
|
|
|
if project:
|
|
|
|
nodes_collection.update({'_id': node['_id']},
|
|
|
|
{"$set": {'project': project['_id']}})
|
2016-04-04 17:32:48 +02:00
|
|
|
print(u"{0} {1}".format(node['_id'], node['name']))
|
2015-10-29 19:10:53 +01:00
|
|
|
else:
|
|
|
|
nodes_orphan += 1
|
|
|
|
nodes_collection.remove({'_id': node['_id']})
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Removed {0} {1}".format(node['_id'], node['name']))
|
2015-10-29 19:10:53 +01:00
|
|
|
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Edited {0} nodes".format(nodes_index))
|
|
|
|
print("Orphan {0} nodes".format(nodes_orphan))
|
2015-10-29 19:10:53 +01:00
|
|
|
|
2015-11-25 16:16:09 +01:00
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def remove_children_files():
|
|
|
|
"""Remove any file object with a parent field"""
|
|
|
|
files_collection = app.data.driver.db['files']
|
|
|
|
for f in files_collection.find():
|
|
|
|
if 'parent' in f:
|
|
|
|
file_id = f['_id']
|
|
|
|
# Delete child object
|
|
|
|
files_collection.remove({'_id': file_id})
|
2016-04-04 17:32:48 +02:00
|
|
|
print("deleted {0}".format(file_id))
|
2015-11-25 16:16:09 +01:00
|
|
|
|
|
|
|
|
2015-12-22 20:39:57 +01:00
|
|
|
@manager.command
|
|
|
|
def make_project_public(project_id):
|
|
|
|
"""Convert every node of a project from pending to public"""
|
2016-01-25 16:32:50 +01:00
|
|
|
|
2015-12-22 20:39:57 +01:00
|
|
|
DRY_RUN = False
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
for n in nodes_collection.find({'project': ObjectId(project_id)}):
|
|
|
|
n['properties']['status'] = 'published'
|
2016-04-04 17:32:48 +02:00
|
|
|
print(u"Publishing {0} {1}".format(n['_id'], n['name'].encode('ascii', 'ignore')))
|
2015-12-22 20:39:57 +01:00
|
|
|
if not DRY_RUN:
|
|
|
|
put_item('nodes', n)
|
|
|
|
|
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def convert_assets_to_textures(project_id):
|
|
|
|
"""Get any node of type asset in a certain project and convert it to a
|
|
|
|
node_type texture.
|
|
|
|
"""
|
|
|
|
|
|
|
|
DRY_RUN = False
|
|
|
|
|
|
|
|
node_types_collection = app.data.driver.db['node_types']
|
|
|
|
files_collection = app.data.driver.db['files']
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
|
|
|
|
def parse_name(name):
|
|
|
|
"""Parse a texture name to infer properties"""
|
|
|
|
variation = 'col'
|
|
|
|
is_tileable = False
|
|
|
|
variations = ['_bump', '_spec', '_nor', '_col', '_translucency']
|
|
|
|
for v in variations:
|
|
|
|
if v in name:
|
|
|
|
variation = v[1:]
|
|
|
|
break
|
|
|
|
if '_tileable' in name:
|
|
|
|
is_tileable = True
|
|
|
|
return dict(variation=variation, is_tileable=is_tileable)
|
|
|
|
|
|
|
|
def make_texture_node(base_node, files, parent_id=None):
|
|
|
|
texture_node_type = node_types_collection.find_one({'name':'texture'})
|
|
|
|
files_list = []
|
|
|
|
is_tileable = False
|
|
|
|
|
|
|
|
if parent_id is None:
|
|
|
|
parent_id = base_node['parent']
|
|
|
|
else:
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Using provided parent {0}".format(parent_id))
|
2015-12-22 20:39:57 +01:00
|
|
|
|
|
|
|
# Create a list with all the file fariations for the texture
|
|
|
|
for f in files:
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Processing {1} {0}".format(f['name'], f['_id']))
|
2015-12-22 20:39:57 +01:00
|
|
|
attributes = parse_name(f['name'])
|
|
|
|
if attributes['is_tileable']:
|
|
|
|
is_tileable = True
|
|
|
|
file_entry = dict(
|
|
|
|
file=f['properties']['file'],
|
|
|
|
is_tileable=attributes['is_tileable'],
|
|
|
|
map_type=attributes['variation'])
|
|
|
|
files_list.append(file_entry)
|
|
|
|
# Get the first file from the files list and use it as base for some
|
|
|
|
# node properties
|
|
|
|
first_file = files_collection.find_one({'_id': files[0]['properties']['file']})
|
|
|
|
if 'picture' in base_node and base_node['picture'] != None:
|
|
|
|
picture = base_node['picture']
|
|
|
|
else:
|
|
|
|
picture = first_file['_id']
|
|
|
|
if 'height' in first_file:
|
|
|
|
node = dict(
|
|
|
|
name=base_node['name'],
|
|
|
|
picture=picture,
|
|
|
|
parent=parent_id,
|
|
|
|
project=base_node['project'],
|
|
|
|
user=base_node['user'],
|
|
|
|
node_type=texture_node_type['_id'],
|
|
|
|
properties=dict(
|
|
|
|
status=base_node['properties']['status'],
|
|
|
|
files=files_list,
|
|
|
|
resolution="{0}x{1}".format(first_file['height'], first_file['width']),
|
|
|
|
is_tileable=is_tileable,
|
|
|
|
is_landscape=(first_file['height'] < first_file['width']),
|
|
|
|
aspect_ratio=round(
|
|
|
|
(first_file['width'] / first_file['height']), 2)
|
|
|
|
)
|
|
|
|
)
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Making {0}".format(node['name']))
|
2015-12-22 20:39:57 +01:00
|
|
|
if not DRY_RUN:
|
|
|
|
p = post_internal('nodes', node)
|
|
|
|
if p[0]['_status'] == 'ERR':
|
|
|
|
import pprint
|
|
|
|
pprint.pprint(node)
|
|
|
|
|
2016-01-25 16:32:50 +01:00
|
|
|
|
2015-12-22 20:39:57 +01:00
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
|
|
|
|
for n in nodes_collection.find({'project': ObjectId(project_id)}):
|
|
|
|
n_type = node_types_collection.find_one({'_id': n['node_type']})
|
|
|
|
processed_nodes = []
|
|
|
|
if n_type['name'] == 'group' and n['name'].startswith('_'):
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Processing {0}".format(n['name']))
|
2015-12-22 20:39:57 +01:00
|
|
|
# Get the content of the group
|
|
|
|
children = [c for c in nodes_collection.find({'parent': n['_id']})]
|
|
|
|
make_texture_node(children[0], children, parent_id=n['parent'])
|
|
|
|
processed_nodes += children
|
|
|
|
processed_nodes.append(n)
|
|
|
|
elif n_type['name'] == 'group':
|
|
|
|
# Change group type to texture group
|
2016-02-26 18:38:39 +01:00
|
|
|
node_type_texture = node_types_collection.find_one(
|
|
|
|
{'name':'group_texture'})
|
2015-12-22 20:39:57 +01:00
|
|
|
n['node_type'] = node_type_texture['_id']
|
|
|
|
n['properties'].pop('notes', None)
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Updating {0}".format(n['name']))
|
2015-12-22 20:39:57 +01:00
|
|
|
if not DRY_RUN:
|
|
|
|
put_item('nodes', n)
|
|
|
|
# Delete processed nodes
|
|
|
|
for node in processed_nodes:
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Removing {0} {1}".format(node['_id'], node['name']))
|
2015-12-22 20:39:57 +01:00
|
|
|
if not DRY_RUN:
|
|
|
|
nodes_collection.remove({'_id': node['_id']})
|
|
|
|
# Make texture out of single image
|
|
|
|
for n in nodes_collection.find({'project': ObjectId(project_id)}):
|
|
|
|
n_type = node_types_collection.find_one({'_id': n['node_type']})
|
|
|
|
if n_type['name'] == 'asset':
|
|
|
|
make_texture_node(n, [n])
|
|
|
|
# Delete processed nodes
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Removing {0} {1}".format(n['_id'], n['name']))
|
2015-12-22 20:39:57 +01:00
|
|
|
if not DRY_RUN:
|
|
|
|
nodes_collection.remove({'_id': n['_id']})
|
|
|
|
|
|
|
|
|
2016-01-08 12:55:21 +01:00
|
|
|
@manager.command
|
|
|
|
def set_attachment_names():
|
|
|
|
"""Loop through all existing nodes and assign proper ContentDisposition
|
|
|
|
metadata to referenced files that are using GCS.
|
|
|
|
"""
|
|
|
|
from application import update_file_name
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
for n in nodes_collection.find():
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Updating node {0}".format(n['_id']))
|
2016-01-08 12:55:21 +01:00
|
|
|
update_file_name(n)
|
|
|
|
|
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def files_verify_project():
|
|
|
|
"""Verify for missing or conflicting node/file ids"""
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
files_collection = app.data.driver.db['files']
|
2016-01-08 14:20:07 +01:00
|
|
|
issues = dict(missing=[], conflicting=[], processing=[])
|
2016-01-08 12:55:21 +01:00
|
|
|
|
|
|
|
def _parse_file(item, file_id):
|
|
|
|
f = files_collection.find_one({'_id': file_id})
|
|
|
|
if f:
|
|
|
|
if 'project' in item and 'project' in f:
|
|
|
|
if item['project'] != f['project']:
|
|
|
|
issues['conflicting'].append(item['_id'])
|
2016-01-08 14:20:07 +01:00
|
|
|
if 'status' in item['properties'] \
|
|
|
|
and item['properties']['status'] == 'processing':
|
|
|
|
issues['processing'].append(item['_id'])
|
2016-01-08 12:55:21 +01:00
|
|
|
else:
|
|
|
|
issues['missing'].append(
|
|
|
|
"{0} missing {1}".format(item['_id'], file_id))
|
|
|
|
|
|
|
|
for item in nodes_collection.find():
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Verifying node {0}".format(item['_id']))
|
2016-01-08 12:55:21 +01:00
|
|
|
if 'file' in item['properties']:
|
|
|
|
_parse_file(item, item['properties']['file'])
|
|
|
|
elif 'files' in item['properties']:
|
|
|
|
for f in item['properties']['files']:
|
|
|
|
_parse_file(item, f['file'])
|
|
|
|
|
2016-04-04 17:32:48 +02:00
|
|
|
print("===")
|
|
|
|
print("Issues detected:")
|
2016-01-08 14:20:07 +01:00
|
|
|
for k, v in issues.iteritems():
|
2016-04-04 17:32:48 +02:00
|
|
|
print("{0}:".format(k))
|
2016-01-08 14:20:07 +01:00
|
|
|
for i in v:
|
2016-04-04 17:32:48 +02:00
|
|
|
print(i)
|
|
|
|
print("===")
|
2016-01-08 12:55:21 +01:00
|
|
|
|
2016-01-25 16:32:50 +01:00
|
|
|
|
|
|
|
def replace_node_type(project, node_type_name, new_node_type):
|
|
|
|
"""Update or create the specified node type. We rely on the fact that
|
|
|
|
node_types have a unique name in a project.
|
|
|
|
"""
|
|
|
|
|
|
|
|
old_node_type = next(
|
|
|
|
(item for item in project['node_types'] if item.get('name') \
|
|
|
|
and item['name'] == node_type_name), None)
|
|
|
|
if old_node_type:
|
|
|
|
for i, v in enumerate(project['node_types']):
|
|
|
|
if v['name'] == node_type_name:
|
|
|
|
project['node_types'][i] = new_node_type
|
|
|
|
else:
|
|
|
|
project['node_types'].append(new_node_type)
|
|
|
|
|
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def project_upgrade_node_types(project_id):
|
|
|
|
projects_collection = app.data.driver.db['projects']
|
|
|
|
project = projects_collection.find_one({'_id': ObjectId(project_id)})
|
|
|
|
replace_node_type(project, 'group', node_type_group)
|
|
|
|
replace_node_type(project, 'asset', node_type_asset)
|
|
|
|
replace_node_type(project, 'storage', node_type_storage)
|
|
|
|
replace_node_type(project, 'comment', node_type_comment)
|
|
|
|
replace_node_type(project, 'blog', node_type_blog)
|
|
|
|
replace_node_type(project, 'post', node_type_post)
|
|
|
|
replace_node_type(project, 'texture', node_type_texture)
|
|
|
|
put_item('projects', project)
|
|
|
|
|
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def test_put_item(node_id):
|
|
|
|
import pprint
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
node = nodes_collection.find_one(ObjectId(node_id))
|
|
|
|
pprint.pprint(node)
|
|
|
|
put_item('nodes', node)
|
|
|
|
|
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def test_post_internal(node_id):
|
|
|
|
import pprint
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
node = nodes_collection.find_one(ObjectId(node_id))
|
|
|
|
internal_fields = ['_id', '_etag', '_updated', '_created']
|
|
|
|
for field in internal_fields:
|
|
|
|
node.pop(field, None)
|
|
|
|
pprint.pprint(node)
|
2016-04-04 17:32:48 +02:00
|
|
|
print(post_internal('nodes', node))
|
2016-01-25 16:32:50 +01:00
|
|
|
|
|
|
|
|
2016-02-10 16:13:07 +01:00
|
|
|
@manager.command
|
|
|
|
def algolia_push_users():
|
|
|
|
"""Loop through all users and push them to Algolia"""
|
|
|
|
from application.utils.algolia import algolia_index_user_save
|
|
|
|
users_collection = app.data.driver.db['users']
|
|
|
|
for user in users_collection.find():
|
2016-04-04 17:32:48 +02:00
|
|
|
print("Pushing {0}".format(user['username']))
|
2016-02-10 16:13:07 +01:00
|
|
|
algolia_index_user_save(user)
|
|
|
|
|
2016-02-26 18:38:39 +01:00
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def algolia_push_nodes():
|
|
|
|
"""Loop through all nodes and push them to Algolia"""
|
|
|
|
from application.utils.algolia import algolia_index_node_save
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
for node in nodes_collection.find():
|
2016-04-04 17:32:48 +02:00
|
|
|
print(u"Pushing {0}: {1}".format(node['_id'], node['name'].encode(
|
|
|
|
'ascii', 'ignore')))
|
2016-02-26 18:38:39 +01:00
|
|
|
algolia_index_node_save(node)
|
|
|
|
|
|
|
|
|
|
|
|
@manager.command
|
|
|
|
def files_make_public_t():
|
|
|
|
"""Loop through all files and if they are images on GCS, make the size t
|
|
|
|
public
|
|
|
|
"""
|
2016-02-26 19:13:24 +01:00
|
|
|
from gcloud.exceptions import InternalServerError
|
2016-02-26 18:38:39 +01:00
|
|
|
from application.utils.gcs import GoogleCloudStorageBucket
|
|
|
|
files_collection = app.data.driver.db['files']
|
|
|
|
for f in files_collection.find({'backend': 'gcs'}):
|
|
|
|
if 'variations' in f:
|
|
|
|
variation_t = next((item for item in f['variations'] \
|
|
|
|
if item['size'] == 't'), None)
|
|
|
|
if variation_t:
|
2016-03-01 17:44:01 +01:00
|
|
|
try:
|
|
|
|
storage = GoogleCloudStorageBucket(str(f['project']))
|
|
|
|
blob = storage.Get(variation_t['file_path'], to_dict=False)
|
|
|
|
if blob:
|
|
|
|
try:
|
|
|
|
print("Making blob public: {0}".format(blob.path))
|
|
|
|
blob.make_public()
|
|
|
|
except InternalServerError:
|
|
|
|
print("Internal Server Error")
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
except InternalServerError:
|
|
|
|
print("Internal Server Error")
|
|
|
|
except Exception:
|
|
|
|
pass
|
2016-02-26 18:38:39 +01:00
|
|
|
|
2016-03-09 16:54:52 +01:00
|
|
|
@manager.command
|
|
|
|
def subscribe_node_owners():
|
|
|
|
"""Automatically subscribe node owners to notifications for items created
|
|
|
|
in the past.
|
|
|
|
"""
|
|
|
|
from application import after_inserting_nodes
|
|
|
|
nodes_collection = app.data.driver.db['nodes']
|
|
|
|
for n in nodes_collection.find():
|
|
|
|
if 'parent' in n:
|
|
|
|
after_inserting_nodes([n])
|
|
|
|
|
|
|
|
|
2016-04-01 13:03:27 +02:00
|
|
|
@manager.command
|
2016-04-01 13:29:22 +02:00
|
|
|
def refresh_project_links(project, chunk_size=50, quiet=False):
|
2016-04-01 13:03:27 +02:00
|
|
|
"""Regenerates almost-expired file links for a certain project."""
|
|
|
|
|
2016-04-01 13:29:22 +02:00
|
|
|
if quiet:
|
|
|
|
import logging
|
|
|
|
from application import log
|
|
|
|
|
|
|
|
logging.getLogger().setLevel(logging.WARNING)
|
|
|
|
log.setLevel(logging.WARNING)
|
2016-04-01 13:03:27 +02:00
|
|
|
|
2016-04-01 13:29:22 +02:00
|
|
|
chunk_size = int(chunk_size) # CLI parameters are passed as strings
|
2016-04-01 13:03:27 +02:00
|
|
|
from application.modules import file_storage
|
|
|
|
file_storage.refresh_links_for_project(project, chunk_size, 2 * 3600)
|
|
|
|
|
|
|
|
|
2016-04-01 18:06:14 +02:00
|
|
|
@manager.command
|
|
|
|
def expire_all_project_links(project_uuid):
|
|
|
|
"""Expires all file links for a certain project without refreshing.
|
|
|
|
|
|
|
|
This is just for testing.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import datetime
|
|
|
|
import bson.tz_util
|
|
|
|
|
|
|
|
files_collection = app.data.driver.db['files']
|
|
|
|
|
|
|
|
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
|
|
|
expires = now - datetime.timedelta(days=1)
|
|
|
|
|
|
|
|
result = files_collection.update_many(
|
|
|
|
{'project': ObjectId(project_uuid)},
|
|
|
|
{'$set': {'link_expires': expires}}
|
|
|
|
)
|
|
|
|
|
|
|
|
print('Expired %i links' % result.matched_count)
|
|
|
|
|
|
|
|
|
2015-03-12 15:05:10 +01:00
|
|
|
if __name__ == '__main__':
|
|
|
|
manager.run()
|