Introducing indexing for nodes

This commit is contained in:
2016-02-26 16:17:38 +01:00
parent ea7e5a0955
commit eb92d179e8
5 changed files with 76 additions and 25 deletions

View File

@@ -104,8 +104,10 @@ if 'ALGOLIA_USER' in app.config:
app.config['ALGOLIA_USER'], app.config['ALGOLIA_USER'],
app.config['ALGOLIA_API_KEY']) app.config['ALGOLIA_API_KEY'])
algolia_index_users = client.init_index(app.config['ALGOLIA_INDEX_USERS']) algolia_index_users = client.init_index(app.config['ALGOLIA_INDEX_USERS'])
algolia_index_nodes = client.init_index(app.config['ALGOLIA_INDEX_NODES'])
else: else:
algolia_index_users = None algolia_index_users = None
algolia_index_nodes = None
# Encoding backend # Encoding backend
if app.config['ENCODING_BACKEND'] == 'zencoder': if app.config['ENCODING_BACKEND'] == 'zencoder':
@@ -115,10 +117,12 @@ else:
from application.utils.authentication import validate_token from application.utils.authentication import validate_token
from application.utils.authorization import check_permissions from application.utils.authorization import check_permissions
from application.utils.cdn import hash_file_path
from application.utils.gcs import GoogleCloudStorageBucket
from application.utils.gcs import update_file_name from application.utils.gcs import update_file_name
from application.utils.algolia import algolia_index_user_save from application.utils.algolia import algolia_index_user_save
from application.utils.algolia import algolia_index_node_save
from modules.file_storage import process_file
from modules.file_storage import delete_file
from modules.file_storage import generate_link
def before_returning_item_permissions(response): def before_returning_item_permissions(response):
@@ -136,6 +140,10 @@ def before_replacing_node(item, original):
check_permissions(original, 'PUT') check_permissions(original, 'PUT')
update_file_name(item) update_file_name(item)
def after_replacing_node(item, original):
"""Push an update to the Algolia index when a node item is updated"""
algolia_index_node_save(item)
def before_inserting_nodes(items): def before_inserting_nodes(items):
"""Before inserting a node in the collection we check if the user is allowed """Before inserting a node in the collection we check if the user is allowed
and we append the project id to it. and we append the project id to it.
@@ -231,6 +239,7 @@ app.on_fetched_resource_nodes += resource_parse_attachments
app.on_fetched_item_node_types += before_returning_item_permissions app.on_fetched_item_node_types += before_returning_item_permissions
app.on_fetched_resource_node_types += before_returning_resource_permissions app.on_fetched_resource_node_types += before_returning_resource_permissions
app.on_replace_nodes += before_replacing_node app.on_replace_nodes += before_replacing_node
app.on_replaced_nodes += after_replacing_node
app.on_insert_nodes += before_inserting_nodes app.on_insert_nodes += before_inserting_nodes
app.on_fetched_item_projects += before_returning_item_permissions app.on_fetched_item_projects += before_returning_item_permissions
app.on_fetched_item_projects += project_node_type_has_method app.on_fetched_item_projects += project_node_type_has_method
@@ -252,9 +261,6 @@ def after_replacing_user(item, original):
app.on_post_GET_users += post_GET_user app.on_post_GET_users += post_GET_user
app.on_replace_users += after_replacing_user app.on_replace_users += after_replacing_user
from modules.file_storage import process_file
from modules.file_storage import delete_file
def post_POST_files(request, payload): def post_POST_files(request, payload):
"""After an file object has been created, we do the necessary processing """After an file object has been created, we do the necessary processing
and further update it. and further update it.
@@ -264,22 +270,6 @@ def post_POST_files(request, payload):
app.on_post_POST_files += post_POST_files app.on_post_POST_files += post_POST_files
# Hook to check the backend of a file resource, to build an appropriate link
# that can be used by the client to retrieve the actual file.
def generate_link(backend, file_path, project_id=None):
if backend == 'gcs':
storage = GoogleCloudStorageBucket(project_id)
blob = storage.Get(file_path)
link = None if not blob else blob['signed_url']
elif backend == 'pillar':
link = url_for('file_storage.index', file_name=file_path, _external=True,
_scheme=app.config['SCHEME'])
elif backend == 'cdnsun':
link = hash_file_path(file_path, None)
else:
link = None
return link
def before_returning_file(response): def before_returning_file(response):
# TODO: add project id to all files # TODO: add project id to all files
project_id = None if 'project' not in response else str(response['project']) project_id = None if 'project' not in response else str(response['project'])

View File

@@ -7,6 +7,7 @@ from flask import Blueprint
from flask import abort from flask import abort
from flask import jsonify from flask import jsonify
from flask import send_from_directory from flask import send_from_directory
from flask import url_for
from eve.methods.put import put_internal from eve.methods.put import put_internal
from application import app from application import app
from application.utils.imaging import generate_local_thumbnails from application.utils.imaging import generate_local_thumbnails
@@ -14,6 +15,7 @@ from application.utils.imaging import get_video_data
from application.utils.imaging import ffmpeg_encode from application.utils.imaging import ffmpeg_encode
from application.utils.storage import remote_storage_sync from application.utils.storage import remote_storage_sync
from application.utils.storage import push_to_storage from application.utils.storage import push_to_storage
from application.utils.cdn import hash_file_path
from application.utils.gcs import GoogleCloudStorageBucket from application.utils.gcs import GoogleCloudStorageBucket
from application.utils.encoding import Encoder from application.utils.encoding import Encoder
@@ -222,8 +224,6 @@ def process_file(src_file):
p = Process(target=push_to_storage, args=( p = Process(target=push_to_storage, args=(
str(src_file['project']), sync_path)) str(src_file['project']), sync_path))
p.start() p.start()
else:
sync_path = file_abs_path
# Update the original file with additional info, e.g. image resolution # Update the original file with additional info, e.g. image resolution
r = put_internal('files', src_file, **{'_id': ObjectId(file_id)}) r = put_internal('files', src_file, **{'_id': ObjectId(file_id)})
@@ -251,3 +251,25 @@ def delete_file(file_item):
# Finally remove the original file # Finally remove the original file
process_file_delete(file_item) process_file_delete(file_item)
def generate_link(backend, file_path, project_id=None, is_public=False):
"""Hook to check the backend of a file resource, to build an appropriate link
that can be used by the client to retrieve the actual file.
"""
if backend == 'gcs':
storage = GoogleCloudStorageBucket(project_id)
blob = storage.Get(file_path)
if blob and not is_public:
link = blob['signed_url']
elif blob and is_public:
link = blob['public_url']
else:
link = None
elif backend == 'pillar':
link = url_for('file_storage.index', file_name=file_path, _external=True,
_scheme=app.config['SCHEME'])
elif backend == 'cdnsun':
link = hash_file_path(file_path, None)
else:
link = None
return link

View File

@@ -1,4 +1,8 @@
from bson import ObjectId
from application import app
from application import algolia_index_users from application import algolia_index_users
from application import algolia_index_nodes
from application.modules.file_storage import generate_link
def algolia_index_user_save(user): def algolia_index_user_save(user):
# Define accepted roles # Define accepted roles
@@ -18,3 +22,34 @@ def algolia_index_user_save(user):
'groups': user['groups'], 'groups': user['groups'],
'email': user['email'] 'email': user['email']
}) })
def algolia_index_node_save(node):
accepted_node_types = ['asset', 'texture', 'group']
if node['node_type'] in accepted_node_types and algolia_index_nodes:
projects_collection = app.data.driver.db['projects']
lookup = {'_id': ObjectId(node['project'])}
project = projects_collection.find_one(lookup)
node_ob = {
'objectID': node['_id'],
'name': node['name'],
'project': {
'_id': project['_id'],
'name': project['name']
},
}
if 'description' in node and node['description']:
node_ob['description'] = node['description']
if 'picture' in node and node['picture']:
files_collection = app.data.driver.db['files']
lookup = {'_id': ObjectId(node['picture'])}
picture = files_collection.find_one(lookup)
variation_t = next((item for item in picture['variations'] \
if item['size'] == 't'), None)
if variation_t:
node_ob['picture'] = generate_link(picture['backend'],
variation_t['file_path'], project_id=str(picture['project']),
is_public=True)
algolia_index_nodes.save_object(node_ob)

View File

@@ -101,7 +101,9 @@ class GoogleCloudStorageBucket(object):
name=os.path.basename(blob.name), name=os.path.basename(blob.name),
size=blob.size, size=blob.size,
content_type=blob.content_type, content_type=blob.content_type,
signed_url=blob.generate_signed_url(expiration, credentials=self.credentials_p12)) signed_url=blob.generate_signed_url(
expiration, credentials=self.credentials_p12),
public_url=blob.public_url)
def Get(self, path, to_dict=True): def Get(self, path, to_dict=True):

View File

@@ -20,7 +20,6 @@ _file_embedded_schema = {
} }
} }
users_schema = { users_schema = {
'full_name': { 'full_name': {
'type': 'string', 'type': 'string',
@@ -412,6 +411,9 @@ files_schema = {
'schema': { 'schema': {
'type': 'dict', 'type': 'dict',
'schema': { 'schema': {
'is_public': { # If True, the link will not be hashed or signed
'type': 'boolean'
},
'content_type': { # MIME type image/png video/mp4 'content_type': { # MIME type image/png video/mp4
'type': 'string', 'type': 'string',
'required': True, 'required': True,