Merge branch 'master' into wip-asset-obscure

This commit is contained in:
Pablo Vazquez 2018-03-28 12:42:42 +02:00
commit dcde2a4551
55 changed files with 945 additions and 222 deletions

View File

@ -101,6 +101,7 @@ class PillarServer(BlinkerCompatibleEve):
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
self.log.info('Creating new instance from %r', self.app_root)
self._config_url_map()
self._config_auth_token_hmac_key()
self._config_tempdirs()
self._config_git()
@ -171,6 +172,19 @@ class PillarServer(BlinkerCompatibleEve):
if self.config['DEBUG']:
log.info('Pillar starting, debug=%s', self.config['DEBUG'])
def _config_url_map(self):
"""Extend Flask url_map with our own converters."""
import secrets, re
from . import flask_extra
if not self.config.get('STATIC_FILE_HASH'):
self.log.warning('STATIC_FILE_HASH is empty, generating random one')
f = open('/data/git/blender-cloud/config_local.py', 'a')
h = re.sub(r'[_.~-]', '', secrets.token_urlsafe())[:8]
self.config['STATIC_FILE_HASH'] = h
self.url_map.converters['hashed_path'] = flask_extra.HashedPathConverter
def _config_auth_token_hmac_key(self):
"""Load AUTH_TOKEN_HMAC_KEY, falling back to SECRET_KEY."""
@ -209,6 +223,7 @@ class PillarServer(BlinkerCompatibleEve):
self.log.info('Git revision %r', self.config['GIT_REVISION'])
def _config_sentry(self):
# TODO(Sybren): keep Sentry unconfigured when running CLI commands.
sentry_dsn = self.config.get('SENTRY_CONFIG', {}).get('dsn')
if self.config.get('TESTING') or sentry_dsn in {'', '-set-in-config-local-'}:
self.log.warning('Sentry NOT configured.')
@ -529,7 +544,7 @@ class PillarServer(BlinkerCompatibleEve):
from pillar.web.staticfile import PillarStaticFile
view_func = PillarStaticFile.as_view(endpoint_name, static_folder=static_folder)
self.add_url_rule('%s/<path:filename>' % url_prefix, view_func=view_func)
self.add_url_rule(f'{url_prefix}/<hashed_path:filename>', view_func=view_func)
def process_extensions(self):
"""This is about Eve extensions, not Pillar extensions."""
@ -793,6 +808,19 @@ class PillarServer(BlinkerCompatibleEve):
return patch_internal(resource, payload=payload, concurrency_check=concurrency_check,
skip_validation=skip_validation, **lookup)[:4]
def delete_internal(self, resource: str, concurrency_check=False,
suppress_callbacks=False, **lookup):
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.delete import deleteitem_internal
url = self.config['URLS'][resource]
path = '%s/%s/%s' % (self.api_prefix, url, lookup['_id'])
with self.__fake_request_url_rule('DELETE', path):
return deleteitem_internal(resource,
concurrency_check=concurrency_check,
suppress_callbacks=suppress_callbacks,
**lookup)[:4]
def _list_routes(self):
from pprint import pprint
from flask import url_for

View File

@ -1,10 +1,13 @@
import logging
from bson import ObjectId, tz_util
from datetime import datetime, tzinfo
from datetime import datetime
import cerberus.errors
from eve.io.mongo import Validator
from flask import current_app
import pillar.markdown
log = logging.getLogger(__name__)
@ -102,6 +105,9 @@ class ValidateCustomFields(Validator):
val = v.validate(value)
if val:
# This ensures the modifications made by v's coercion rules are
# visible to this validator's output.
self.current[field] = v.current
return True
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
@ -152,3 +158,52 @@ class ValidateCustomFields(Validator):
if not isinstance(value, (bytes, bytearray)):
self._error(field_name, f'wrong value type {type(value)}, expected bytes or bytearray')
def _validate_coerce(self, coerce, field: str, value):
"""Override Cerberus' _validate_coerce method for richer features.
This now supports named coercion functions (available in Cerberus 1.0+)
and passes the field name to coercion functions as well.
"""
if isinstance(coerce, str):
coerce = getattr(self, f'_normalize_coerce_{coerce}')
try:
return coerce(field, value)
except (TypeError, ValueError):
self._error(field, cerberus.errors.ERROR_COERCION_FAILED.format(field))
def _normalize_coerce_markdown(self, field: str, value):
"""Render Markdown from this field into {field}_html.
The field name MUST NOT end in `_html`. The Markdown is read from this
field and the rendered HTML is written to the field `{field}_html`.
"""
html = pillar.markdown.markdown(value)
field_name = pillar.markdown.cache_field_name(field)
self.current[field_name] = html
return value
if __name__ == '__main__':
from pprint import pprint
v = ValidateCustomFields()
v.schema = {
'foo': {'type': 'string', 'coerce': 'markdown'},
'foo_html': {'type': 'string'},
'nested': {
'type': 'dict',
'schema': {
'bar': {'type': 'string', 'coerce': 'markdown'},
'bar_html': {'type': 'string'},
}
}
}
print('Valid :', v.validate({
'foo': '# Title\n\nHeyyyy',
'nested': {'bar': 'bhahaha'},
}))
print('Document:')
pprint(v.document)
print('Errors :', v.errors)

View File

@ -155,7 +155,9 @@ organizations_schema = {
'description': {
'type': 'string',
'maxlength': 256,
'coerce': 'markdown',
},
'_description_html': {'type': 'string'},
'website': {
'type': 'string',
'maxlength': 256,
@ -290,7 +292,9 @@ nodes_schema = {
},
'description': {
'type': 'string',
'coerce': 'markdown',
},
'_description_html': {'type': 'string'},
'picture': _file_embedded_schema,
'order': {
'type': 'integer',
@ -535,7 +539,9 @@ projects_schema = {
},
'description': {
'type': 'string',
'coerce': 'markdown',
},
'_description_html': {'type': 'string'},
# Short summary for the project
'summary': {
'type': 'string',

View File

@ -25,12 +25,11 @@ from flask import url_for, helpers
from pillar.api import utils
from pillar.api.file_storage_backends.gcs import GoogleCloudStorageBucket, \
GoogleCloudStorageBlob
from pillar.api.utils import remove_private_keys
from pillar.api.utils import remove_private_keys, imaging
from pillar.api.utils.authorization import require_login, \
user_matches_roles
from pillar.api.utils.cdn import hash_file_path
from pillar.api.utils.encoding import Encoder
from pillar.api.utils.imaging import generate_local_thumbnails
from pillar.api.file_storage_backends import default_storage_backend, Bucket
from pillar.auth import current_user
@ -97,8 +96,9 @@ def _process_image(bucket: Bucket,
# Generate previews
log.info('Generating thumbnails for file %s', file_id)
src_file['variations'] = generate_local_thumbnails(src_file['name'],
local_file.name)
local_path = pathlib.Path(local_file.name)
name_base = pathlib.Path(src_file['name']).stem
src_file['variations'] = imaging.generate_local_thumbnails(name_base, local_path)
# Send those previews to Google Cloud Storage.
log.info('Uploading %i thumbnails for file %s to Google Cloud Storage '
@ -386,13 +386,8 @@ def before_returning_file(response):
def strip_link_and_variations(response):
# Check the access level of the user.
if current_user.is_anonymous:
has_full_access = False
else:
user_roles = current_user.roles
# TODO: convert to a capability and check for that.
access_roles = current_app.config['FULL_FILE_ACCESS_ROLES']
has_full_access = bool(user_roles.intersection(access_roles))
capability = current_app.config['FULL_FILE_ACCESS_CAP']
has_full_access = current_user.has_cap(capability)
# Strip all file variations (unless image) and link to the actual file.
if not has_full_access:

View File

@ -2,10 +2,6 @@ node_type_blog = {
'name': 'blog',
'description': 'Container for node_type post.',
'dyn_schema': {
# Path for a custom template to be used for rendering the posts
'template': {
'type': 'string',
},
'categories': {
'type': 'list',
'schema': {
@ -17,5 +13,5 @@ node_type_blog = {
'categories': {},
'template': {},
},
'parent': ['project',],
'parent': ['project', ],
}

View File

@ -2,16 +2,14 @@ node_type_comment = {
'name': 'comment',
'description': 'Comments for asset nodes, pages, etc.',
'dyn_schema': {
# The actual comment content (initially Markdown format)
# The actual comment content
'content': {
'type': 'string',
'minlength': 5,
'required': True,
'coerce': 'markdown',
},
# The converted-to-HTML content.
'content_html': {
'type': 'string',
},
'_content_html': {'type': 'string'},
'status': {
'type': 'string',
'allowed': [

View File

@ -4,13 +4,14 @@ node_type_post = {
'name': 'post',
'description': 'A blog post, for any project',
'dyn_schema': {
# The blogpost content (Markdown format)
'content': {
'type': 'string',
'minlength': 5,
'maxlength': 90000,
'required': True
'required': True,
'coerce': 'markdown',
},
'_content_html': {'type': 'string'},
'status': {
'type': 'string',
'allowed': [

View File

@ -378,30 +378,6 @@ def after_deleting_node(item):
index.node_delete.delay(str(item['_id']))
only_for_comments = only_for_node_type_decorator('comment')
@only_for_comments
def convert_markdown(node, original=None):
"""Converts comments from Markdown to HTML.
Always does this on save, even when the original Markdown hasn't changed,
because our Markdown -> HTML conversion rules might have.
"""
try:
content = node['properties']['content']
except KeyError:
node['properties']['content_html'] = ''
else:
node['properties']['content_html'] = pillar.markdown.markdown(content)
def nodes_convert_markdown(nodes):
for node in nodes:
convert_markdown(node)
only_for_textures = only_for_node_type_decorator('texture')
@ -433,7 +409,6 @@ def setup_app(app, url_prefix):
app.on_fetched_resource_nodes += before_returning_nodes
app.on_replace_nodes += before_replacing_node
app.on_replace_nodes += convert_markdown
app.on_replace_nodes += texture_sort_files
app.on_replace_nodes += deduct_content_type
app.on_replace_nodes += node_set_default_picture
@ -442,11 +417,9 @@ def setup_app(app, url_prefix):
app.on_insert_nodes += before_inserting_nodes
app.on_insert_nodes += nodes_deduct_content_type
app.on_insert_nodes += nodes_set_default_picture
app.on_insert_nodes += nodes_convert_markdown
app.on_insert_nodes += textures_sort_files
app.on_inserted_nodes += after_inserting_nodes
app.on_update_nodes += convert_markdown
app.on_update_nodes += texture_sort_files
app.on_delete_item_nodes += before_deleting_node

View File

@ -162,7 +162,7 @@ def edit_comment(user_id, node_id, patch):
log.info('User %s edited comment %s', user_id, node_id)
# Fetch the new content, so the client can show these without querying again.
node = nodes_coll.find_one(node_id, projection={'properties.content_html': 1})
node = nodes_coll.find_one(node_id, projection={'properties._content_html': 1})
return status, node

View File

@ -158,16 +158,21 @@ class MetaFalsey(type):
return False
class DoesNotExist(object, metaclass=MetaFalsey):
class DoesNotExistMeta(MetaFalsey):
def __repr__(cls) -> str:
return 'DoesNotExist'
class DoesNotExist(object, metaclass=DoesNotExistMeta):
"""Returned as value by doc_diff if a value does not exist."""
def doc_diff(doc1, doc2, falsey_is_equal=True):
def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
"""Generator, yields differences between documents.
Yields changes as (key, value in doc1, value in doc2) tuples, where
the value can also be the DoesNotExist class. Does not report changed
private keys (i.e. starting with underscores).
private keys (i.e. the standard Eve keys starting with underscores).
Sub-documents (i.e. dicts) are recursed, and dot notation is used
for the keys if changes are found.
@ -176,25 +181,60 @@ def doc_diff(doc1, doc2, falsey_is_equal=True):
function won't report differences between DoesNotExist, False, '', and 0.
"""
for key in set(doc1.keys()).union(set(doc2.keys())):
if isinstance(key, str) and key[0] == '_':
continue
private_keys = {'_id', '_etag', '_deleted', '_updated', '_created'}
val1 = doc1.get(key, DoesNotExist)
val2 = doc2.get(key, DoesNotExist)
def combine_key(some_key):
"""Combine this key with the superkey.
# Only recurse if both values are dicts
if isinstance(val1, dict) and isinstance(val2, dict):
for subkey, subval1, subval2 in doc_diff(val1, val2):
yield '%s.%s' % (key, subkey), subval1, subval2
continue
Keep the key type the same, unless we have to combine with a superkey.
"""
if not superkey:
return some_key
if isinstance(some_key, str) and some_key[0] == '[':
return f'{superkey}{some_key}'
return f'{superkey}.{some_key}'
if val1 == val2:
continue
if falsey_is_equal and bool(val1) == bool(val2) == False:
continue
if doc1 is doc2:
return
yield key, val1, val2
if falsey_is_equal and not bool(doc1) and not bool(doc2):
return
if isinstance(doc1, dict) and isinstance(doc2, dict):
for key in set(doc1.keys()).union(set(doc2.keys())):
if key in private_keys:
continue
val1 = doc1.get(key, DoesNotExist)
val2 = doc2.get(key, DoesNotExist)
yield from doc_diff(val1, val2,
falsey_is_equal=falsey_is_equal,
superkey=combine_key(key))
return
if isinstance(doc1, list) and isinstance(doc2, list):
for idx in range(max(len(doc1), len(doc2))):
try:
item1 = doc1[idx]
except IndexError:
item1 = DoesNotExist
try:
item2 = doc2[idx]
except IndexError:
item2 = DoesNotExist
subkey = f'[{idx}]'
if item1 is DoesNotExist or item2 is DoesNotExist:
yield combine_key(subkey), item1, item2
else:
yield from doc_diff(item1, item2,
falsey_is_equal=falsey_is_equal,
superkey=combine_key(subkey))
return
if doc1 != doc2:
yield superkey, doc1, doc2
def random_etag() -> str:

View File

@ -46,10 +46,10 @@ def force_cli_user():
'email': 'local@nowhere',
'username': 'CLI',
})
log.warning('CONSTRUCTED CLI USER %s of type %s', id(CLI_USER), id(type(CLI_USER)))
log.info('CONSTRUCTED CLI USER %s of type %s', id(CLI_USER), id(type(CLI_USER)))
log.warning('Logging in as CLI_USER (%s) of type %s, circumventing authentication.',
id(CLI_USER), id(type(CLI_USER)))
log.info('Logging in as CLI_USER (%s) of type %s, circumventing authentication.',
id(CLI_USER), id(type(CLI_USER)))
g.current_user = CLI_USER
@ -230,7 +230,7 @@ def hash_auth_token(token: str) -> str:
def store_token(user_id, token: str, token_expiry, oauth_subclient_id=False,
org_roles: typing.Set[str]=frozenset()):
org_roles: typing.Set[str] = frozenset()):
"""Stores an authentication token.
:returns: the token document from MongoDB

View File

@ -27,6 +27,12 @@ def check_permissions(collection_name, resource, method, append_allowed_methods=
:param check_node_type: node type to check. Only valid when collection_name='projects'.
:type check_node_type: str
"""
from pillar.auth import get_current_user
from .authentication import CLI_USER
if get_current_user() is CLI_USER:
log.debug('Short-circuiting check_permissions() for CLI user')
return
if not has_permissions(collection_name, resource, method, append_allowed_methods,
check_node_type):

View File

@ -1,54 +1,61 @@
import os
import json
import typing
import os
import pathlib
import subprocess
from PIL import Image
from flask import current_app
# Images with these modes will be thumbed to PNG, others to JPEG.
MODES_FOR_PNG = {'RGBA', 'LA'}
# TODO: refactor to use pathlib.Path and f-strings.
def generate_local_thumbnails(name_base, src):
def generate_local_thumbnails(fp_base: str, src: pathlib.Path):
"""Given a source image, use Pillow to generate thumbnails according to the
application settings.
:param name_base: the thumbnail will get a field 'name': '{basename}-{thumbsize}.jpg'
:type name_base: str
:param fp_base: the thumbnail will get a field
'file_path': '{fp_base}-{thumbsize}.{ext}'
:param src: the path of the image to be thumbnailed
:type src: str
"""
thumbnail_settings = current_app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS']
thumbnails = []
save_to_base, _ = os.path.splitext(src)
name_base, _ = os.path.splitext(name_base)
for size, settings in thumbnail_settings.items():
dst = '{0}-{1}{2}'.format(save_to_base, size, '.jpg')
name = '{0}-{1}{2}'.format(name_base, size, '.jpg')
im = Image.open(src)
extra_args = {}
# If the source image has transparency, save as PNG
if im.mode in MODES_FOR_PNG:
suffix = '.png'
imformat = 'PNG'
else:
suffix = '.jpg'
imformat = 'JPEG'
extra_args = {'quality': 95}
dst = src.with_name(f'{src.stem}-{size}{suffix}')
if settings['crop']:
resize_and_crop(src, dst, settings['size'])
width, height = settings['size']
im = resize_and_crop(im, settings['size'])
else:
im = Image.open(src)
im.thumbnail(settings['size'], resample=Image.LANCZOS)
width, height = im.size
# If the source image has transparency, save as PNG
if im.mode == 'RGBA':
im.save(dst, format='PNG', optimize=True)
else:
im.save(dst, format='JPEG', optimize=True, quality=95)
width, height = im.size
if imformat == 'JPEG':
im = im.convert('RGB')
im.save(dst, format=imformat, optimize=True, **extra_args)
thumb_info = {'size': size,
'file_path': name,
'local_path': dst,
'length': os.stat(dst).st_size,
'file_path': f'{fp_base}-{size}{suffix}',
'local_path': str(dst),
'length': dst.stat().st_size,
'width': width,
'height': height,
'md5': '',
'content_type': 'image/jpeg'}
'content_type': f'image/{imformat.lower()}'}
if size == 't':
thumb_info['is_public'] = True
@ -58,63 +65,40 @@ def generate_local_thumbnails(name_base, src):
return thumbnails
def resize_and_crop(img_path, modified_path, size, crop_type='middle'):
"""
Resize and crop an image to fit the specified size. Thanks to:
https://gist.github.com/sigilioso/2957026
def resize_and_crop(img: Image, size: typing.Tuple[int, int]) -> Image:
"""Resize and crop an image to fit the specified size.
args:
img_path: path for the image to resize.
modified_path: path to store the modified image.
size: `(width, height)` tuple.
crop_type: can be 'top', 'middle' or 'bottom', depending on this
value, the image will cropped getting the 'top/left', 'middle' or
'bottom/right' of the image to fit the size.
raises:
Exception: if can not open the file in img_path of there is problems
to save the image.
ValueError: if an invalid `crop_type` is provided.
Thanks to: https://gist.github.com/sigilioso/2957026
:param img: opened PIL.Image to work on
:param size: `(width, height)` tuple.
"""
# If height is higher we resize vertically, if not we resize horizontally
img = Image.open(img_path).convert('RGB')
# Get current and desired ratio for the images
img_ratio = img.size[0] / float(img.size[1])
ratio = size[0] / float(size[1])
cur_w, cur_h = img.size # current
img_ratio = cur_w / cur_h
w, h = size # desired
ratio = w / h
# The image is scaled/cropped vertically or horizontally depending on the ratio
if ratio > img_ratio:
img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),
Image.ANTIALIAS)
# Crop in the top, middle or bottom
if crop_type == 'top':
box = (0, 0, img.size[0], size[1])
elif crop_type == 'middle':
box = (0, int(round((img.size[1] - size[1]) / 2)), img.size[0],
int(round((img.size[1] + size[1]) / 2)))
elif crop_type == 'bottom':
box = (0, img.size[1] - size[1], img.size[0], img.size[1])
else:
raise ValueError('ERROR: invalid value for crop_type')
uncropped_h = (w * cur_h) // cur_w
img = img.resize((w, uncropped_h), Image.ANTIALIAS)
box = (0, (uncropped_h - h) // 2,
w, (uncropped_h + h) // 2)
img = img.crop(box)
elif ratio < img_ratio:
img = img.resize((int(round(size[1] * img.size[0] / img.size[1])), size[1]),
Image.ANTIALIAS)
# Crop in the top, middle or bottom
if crop_type == 'top':
box = (0, 0, size[0], img.size[1])
elif crop_type == 'middle':
box = (int(round((img.size[0] - size[0]) / 2)), 0,
int(round((img.size[0] + size[0]) / 2)), img.size[1])
elif crop_type == 'bottom':
box = (img.size[0] - size[0], 0, img.size[0], img.size[1])
else:
raise ValueError('ERROR: invalid value for crop_type')
uncropped_w = (h * cur_w) // cur_h
img = img.resize((uncropped_w, h), Image.ANTIALIAS)
box = ((uncropped_w - w) // 2, 0,
(uncropped_w + w) // 2, h)
img = img.crop(box)
else:
img = img.resize((size[0], size[1]),
Image.ANTIALIAS)
img = img.resize((w, h), Image.ANTIALIAS)
# If the scale is the same, we do not need to crop
img.save(modified_path, "JPEG")
return img
def get_video_data(filepath):

View File

@ -263,6 +263,53 @@ def check_home_project_groups():
return bad
@manager_maintenance.option('-g', '--go', dest='go',
action='store_true', default=False,
help='Actually go and perform the changes, without this just '
'shows differences.')
def purge_home_projects(go=False):
"""Deletes all home projects that have no owner."""
from pillar.api.utils.authentication import force_cli_user
force_cli_user()
users_coll = current_app.data.driver.db['users']
proj_coll = current_app.data.driver.db['projects']
good = bad = 0
def bad_projects():
nonlocal good, bad
for proj in proj_coll.find({'category': 'home', '_deleted': {'$ne': True}}):
pid = proj['_id']
uid = proj.get('user')
if not uid:
log.info('Project %s has no user assigned', uid)
bad += 1
yield pid
continue
if users_coll.find({'_id': uid, '_deleted': {'$ne': True}}).count() == 0:
log.info('Project %s has non-existing owner %s', pid, uid)
bad += 1
yield pid
continue
good += 1
if not go:
log.info('Dry run, use --go to actually perform the changes.')
for project_id in bad_projects():
log.info('Soft-deleting project %s', project_id)
if go:
r, _, _, status = current_app.delete_internal('projects', _id=project_id)
if status != 204:
raise ValueError(f'Error {status} deleting {project_id}: {r}')
log.info('%i projects OK, %i projects deleted', good, bad)
return bad
@manager_maintenance.command
@manager_maintenance.option('-c', '--chunk', dest='chunk_size', default=50,
help='Number of links to update, use 0 to update all.')
@ -404,79 +451,121 @@ def expire_all_project_links(project_uuid):
print('Expired %i links' % result.matched_count)
@manager_maintenance.command
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
@manager_maintenance.option('-u', '--url', dest='project_url', nargs='?',
help='Project URL')
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
help='Replace on all projects.')
@manager_maintenance.option('-m', '--missing', dest='missing',
action='store_true', default=False,
help='Add missing node types. Note that this may add unwanted ones.')
def replace_pillar_node_type_schemas(proj_url=None, all_projects=False, missing=False):
@manager_maintenance.option('-g', '--go', dest='go',
action='store_true', default=False,
help='Actually go and perform the changes, without this just '
'shows differences.')
@manager_maintenance.option('-i', '--id', dest='project_id', nargs='?',
help='Project ID')
def replace_pillar_node_type_schemas(project_url=None, all_projects=False, missing=False, go=False,
project_id=None):
"""Replaces the project's node type schemas with the standard Pillar ones.
Non-standard node types are left alone.
"""
if bool(proj_url) == all_projects:
log.error('Use either --project or --all.')
if sum([bool(project_url), all_projects, bool(project_id)]) != 1:
log.error('Use either --project, --id, or --all.')
return 1
from pillar.api.utils.authentication import force_cli_user
force_cli_user()
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
from pillar.api.utils import remove_private_keys
from pillar.api.utils import remove_private_keys, doc_diff
projects_collection = current_app.db()['projects']
will_would = 'Will' if go else 'Would'
def handle_project(project):
log.info('Handling project %s', project['url'])
is_public_proj = not project.get('is_private', True)
projects_changed = projects_seen = 0
for proj_nt in project['node_types']:
def handle_project(proj):
nonlocal projects_changed, projects_seen
projects_seen += 1
orig_proj = copy.deepcopy(proj)
proj_id = proj['_id']
if 'url' not in proj:
log.warning('Project %s has no URL!', proj_id)
proj_url = proj.get('url', f'-no URL id {proj_id}')
log.debug('Handling project %s', proj_url)
for proj_nt in proj['node_types']:
nt_name = proj_nt['name']
try:
pillar_nt = PILLAR_NAMED_NODE_TYPES[nt_name]
except KeyError:
log.info(' - skipping non-standard node type "%s"', nt_name)
log.debug(' - skipping non-standard node type "%s"', nt_name)
continue
log.info(' - replacing schema on node type "%s"', nt_name)
log.debug(' - replacing schema on node type "%s"', nt_name)
# This leaves node type keys intact that aren't in Pillar's node_type_xxx definitions,
# such as permissions.
# such as permissions. It also keeps form schemas as-is.
pillar_nt.pop('form_schema', None)
proj_nt.update(copy.deepcopy(pillar_nt))
# On our own public projects we want to be able to set license stuff.
if is_public_proj:
proj_nt['form_schema'].pop('license_type', None)
proj_nt['form_schema'].pop('license_notes', None)
# Find new node types that aren't in the project yet.
if missing:
project_ntnames = set(nt['name'] for nt in project['node_types'])
project_ntnames = set(nt['name'] for nt in proj['node_types'])
for nt_name in set(PILLAR_NAMED_NODE_TYPES.keys()) - project_ntnames:
log.info(' - Adding node type "%s"', nt_name)
pillar_nt = PILLAR_NAMED_NODE_TYPES[nt_name]
project['node_types'].append(copy.deepcopy(pillar_nt))
proj['node_types'].append(copy.deepcopy(pillar_nt))
# Use Eve to PUT, so we have schema checking.
db_proj = remove_private_keys(project)
r, _, _, status = current_app.put_internal('projects', db_proj, _id=project['_id'])
if status != 200:
log.error('Error %i storing altered project %s %s', status, project['_id'], r)
raise SystemExit('Error storing project, see log.')
log.info('Project saved succesfully.')
proj_has_difference = False
for key, val1, val2 in doc_diff(orig_proj, proj, falsey_is_equal=False):
if not proj_has_difference:
if proj.get('_deleted', False):
deleted = ' (deleted)'
else:
deleted = ''
log.info('%s change project %s%s', will_would, proj_url, deleted)
proj_has_difference = True
log.info(' %30r: %r%r', key, val1, val2)
projects_changed += proj_has_difference
if go and proj_has_difference:
# Use Eve to PUT, so we have schema checking.
db_proj = remove_private_keys(proj)
try:
r, _, _, status = current_app.put_internal('projects', db_proj, _id=proj_id)
except Exception:
log.exception('Error saving project %s (url=%s)', proj_id, proj_url)
raise SystemExit(5)
if status != 200:
log.error('Error %i storing altered project %s %s', status, proj['_id'], r)
raise SystemExit('Error storing project, see log.')
log.debug('Project saved succesfully.')
if not go:
log.info('Not changing anything, use --go to actually go and change things.')
if all_projects:
for project in projects_collection.find():
for project in projects_collection.find({'_deleted': {'$ne': True}}):
handle_project(project)
log.info('%s %d of %d projects',
'Changed' if go else 'Would change',
projects_changed, projects_seen)
return
project = projects_collection.find_one({'url': proj_url})
if project_url:
project = projects_collection.find_one({'url': project_url})
else:
project = projects_collection.find_one({'_id': bson.ObjectId(project_id)})
if not project:
log.error('Project url=%s not found', proj_url)
log.error('Project url=%s id=%s not found', project_url, project_id)
return 3
handle_project(project)

View File

@ -64,9 +64,8 @@ def mass_copy_between_backends(src_backend='cdnsun', dest_backend='gcs'):
log.info('%i files we did not copy', copy_errs)
@manager_operations.command
@manager_operations.option('-p', '--project', dest='dest_proj_url',
help='Destination project URL')
@manager_operations.option('dest_proj_url', help='Destination project URL')
@manager_operations.option('node_uuid', help='ID of the node to move')
@manager_operations.option('-f', '--force', dest='force', action='store_true', default=False,
help='Move even when already at the given project.')
@manager_operations.option('-s', '--skip-gcs', dest='skip_gcs', action='store_true', default=False,

View File

@ -109,8 +109,8 @@ FILE_LINK_VALIDITY = defaultdict(
gcs=3600 * 23, # 23 hours for Google Cloud Storage.
)
# Roles with full GET-access to all variations of files.
FULL_FILE_ACCESS_ROLES = {'admin', 'subscriber', 'demo'}
# Capability with GET-access to all variations of files.
FULL_FILE_ACCESS_CAP = 'subscriber'
# Client and Subclient IDs for Blender ID
BLENDER_ID_CLIENT_ID = 'SPECIAL-SNOWFLAKE-57'
@ -247,3 +247,11 @@ SMTP_TIMEOUT = 30 # timeout in seconds, https://docs.python.org/3/library/smtpl
MAIL_RETRY = 180 # in seconds, delay until trying to send an email again.
MAIL_DEFAULT_FROM_NAME = 'Blender Cloud'
MAIL_DEFAULT_FROM_ADDR = 'cloudsupport@localhost'
SEND_FILE_MAX_AGE_DEFAULT = 3600 * 24 * 365 # seconds
# MUST be 8 characters long, see pillar.flask_extra.HashedPathConverter
# Intended to be changed for every deploy. If it is empty, a random hash will
# be used. Note that this causes extra traffic, since every time the process
# restarts the URLs will be different.
STATIC_FILE_HASH = ''

View File

@ -1,5 +1,34 @@
import re
import functools
import flask
import werkzeug.routing
class HashedPathConverter(werkzeug.routing.PathConverter):
"""Allows for files `xxx.yyy.js` to be served as `xxx.yyy.abc123.js`.
The hash code is placed before the last extension.
"""
weight = 300
# Hash length is hard-coded to 8 characters for now.
hash_re = re.compile(r'\.([a-zA-Z0-9]{8})(?=\.[^.]+$)')
@functools.lru_cache(maxsize=1024)
def to_python(self, from_url: str) -> str:
return self.hash_re.sub('', from_url)
@functools.lru_cache(maxsize=1024)
def to_url(self, filepath: str) -> str:
try:
dotidx = filepath.rindex('.')
except ValueError:
# Happens when there is no dot. Very unlikely.
return filepath
current_hash = flask.current_app.config['STATIC_FILE_HASH']
before, after = filepath[:dotidx], filepath[dotidx:]
return f'{before}.{current_hash}{after}'
def add_response_headers(headers: dict):

View File

@ -47,3 +47,11 @@ def markdown(s):
attributes=ALLOWED_ATTRIBUTES,
styles=ALLOWED_STYLES)
return safe_html
def cache_field_name(field_name: str) -> str:
"""Return the field name containing the cached HTML.
See ValidateCustomFields._normalize_coerce_markdown().
"""
return f'_{field_name}_html'

View File

@ -42,3 +42,6 @@ ELASTIC_INDICES = {
'NODE': 'test_nodes',
'USER': 'test_users',
}
# MUST be 8 characters long, see pillar.flask_extra.HashedPathConverter
STATIC_FILE_HASH = 'abcd1234'

View File

@ -10,6 +10,7 @@ import flask_login
import jinja2.filters
import jinja2.utils
import werkzeug.exceptions as wz_exceptions
import pillarsdk
import pillar.api.utils
from pillar.web.utils import pretty_date
@ -95,6 +96,12 @@ def do_pluralize(value, arg='s'):
def do_markdown(s: typing.Optional[str]):
"""Convert Markdown.
This filter is not preferred. Use {'coerce': 'markdown'} in the Eve schema
instead, to cache the HTML in the database, and use do_markdowned() to
fetch it.
"""
if s is None:
return None
@ -106,6 +113,35 @@ def do_markdown(s: typing.Optional[str]):
return jinja2.utils.Markup(safe_html)
def do_markdowned(document: typing.Union[dict, pillarsdk.Resource], field_name: str) -> str:
"""Fetch pre-converted Markdown or render on the fly.
Use {'coerce': 'markdown'} in the Eve schema to cache the HTML in the
database and use do_markdowned() to fetch it in a safe way.
Jinja example: {{ node.properties | markdowned:'content' }}
"""
if isinstance(document, pillarsdk.Resource):
document = document.to_dict()
if not document:
return ''
my_log = log.getChild('do_markdowned')
cache_field_name = pillar.markdown.cache_field_name(field_name)
my_log.debug('Getting %r', cache_field_name)
cached_html = document.get(cache_field_name)
if cached_html is not None:
my_log.debug('Cached HTML is %r', cached_html[:40])
return jinja2.utils.Markup(cached_html)
markdown_src = document.get(field_name)
my_log.debug('No cached HTML, rendering doc[%r]', field_name)
return do_markdown(markdown_src)
def do_url_for_node(node_id=None, node=None):
try:
return url_for_node(node_id=node_id, node=node)
@ -156,6 +192,7 @@ def setup_jinja_env(jinja_env, app_config: dict):
jinja_env.filters['pluralize'] = do_pluralize
jinja_env.filters['gravatar'] = pillar.api.utils.gravatar
jinja_env.filters['markdown'] = do_markdown
jinja_env.filters['markdowned'] = do_markdowned
jinja_env.filters['yesno'] = do_yesno
jinja_env.filters['repr'] = repr
jinja_env.filters['urljoin'] = functools.partial(urllib.parse.urljoin, allow_fragments=True)

View File

@ -80,7 +80,7 @@ def comment_edit(comment_id):
return jsonify({
'status': 'success',
'data': {
'content_html': result.properties.content_html,
'content_html': result.properties['_content_html'],
}})

View File

@ -17,6 +17,8 @@ from flask import request
from flask import jsonify
from flask import abort
from flask_login import current_user
from flask_wtf.csrf import validate_csrf
import werkzeug.exceptions as wz_exceptions
from wtforms import SelectMultipleField
from flask_login import login_required
@ -24,6 +26,7 @@ from jinja2.exceptions import TemplateNotFound
from pillar.api.utils.authorization import check_permissions
from pillar.web.utils import caching
from pillar.markdown import markdown
from pillar.web.nodes.forms import get_node_form
from pillar.web.nodes.forms import process_node_form
from pillar.web.nodes.custom.storage import StorageNode
@ -475,6 +478,26 @@ def edit(node_id):
)
@blueprint.route('/preview-markdown', methods=['POST'])
@login_required
def preview_markdown():
"""Return the 'content' field of POST request as HTML.
This endpoint can be called via AJAX in order to preview the
content of a node.
"""
if not validate_csrf(request.headers.get('X-CSRFToken')):
return jsonify({'_status': 'ERR',
'message': 'CSRF validation failed.'}), 403
try:
content = request.form['content']
except KeyError:
return jsonify({'_status': 'ERR',
'message': 'The field "content" was not specified.'}), 400
return jsonify(content=markdown(content))
def ensure_lists_exist_as_empty(node_doc, node_type):
"""Ensures that any properties of type 'list' exist as empty lists.

View File

@ -1,12 +1,21 @@
"""Static file handling"""
import logging
import flask
import flask.views
log = logging.getLogger(__name__)
class PillarStaticFile(flask.views.MethodView):
def __init__(self, static_folder):
self.static_folder = static_folder
def get(self, filename):
log.debug('Request file %s/%s', self.static_folder, filename)
return flask.send_from_directory(self.static_folder, filename)
return flask.send_from_directory(
self.static_folder, filename,
conditional=True,
add_etags=True,
)

View File

@ -36,7 +36,7 @@ zencoder==0.6.5
amqp==2.1.4
billiard==3.5.0.2
Flask-PyMongo==0.4.1
Cerberus==0.9.2
-e git+git@github.com:armadillica/cerberus.git@sybren-0.9#egg=Cerberus
Events==0.2.2
future==0.15.2
html5lib==0.9999999

View File

@ -39,8 +39,8 @@ html(lang="en")
loadCSS( "//fonts.googleapis.com/css?family=Roboto:300,400" );
script(src="{{ url_for('static_pillar', filename='assets/js/markdown.min.js', v=17320171) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/tutti.min.js', v=17320171) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/markdown.min.js') }}")
script(src="{{ url_for('static_pillar', filename='assets/js/tutti.min.js') }}")
link(href="{{ url_for('static', filename='assets/img/favicon.png') }}", rel="shortcut icon")
link(href="{{ url_for('static', filename='assets/img/apple-touch-icon-precomposed.png') }}", rel="icon apple-touch-icon-precomposed", sizes="192x192")
@ -50,12 +50,12 @@ html(lang="en")
| {% block head %}{% endblock %}
| {% block css %}
link(href="{{ url_for('static_pillar', filename='assets/css/font-pillar.css', v=17320171) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/base.css', v=17320171) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/font-pillar.css') }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/base.css') }}", rel="stylesheet")
| {% if title == 'blog' %}
link(href="{{ url_for('static_pillar', filename='assets/css/blog.css', v=17320171) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/blog.css') }}", rel="stylesheet")
| {% else %}
link(href="{{ url_for('static_pillar', filename='assets/css/main.css', v=17320171) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/main.css') }}", rel="stylesheet")
| {% endif %}
| {% endblock %}
@ -81,7 +81,7 @@ html(lang="en")
| {% endblock footer %}
| {% endblock footer_container%}
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/jquery.bootstrap-3.3.7.min.js', v=17320171) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/jquery.bootstrap-3.3.7.min.js') }}")
| {% block footer_scripts_pre %}{% endblock %}

View File

@ -25,7 +25,7 @@ a.blog_index-header(href="{{ node.url }}")
| {{ node.name }}
.item-content
| {{ node.properties.content | markdown }}
| {{ node.properties | markdowned('content') }}
| {% endmacro %}

View File

@ -5,7 +5,7 @@
| {% block css %}
| {{ super() }}
link(href="{{ url_for('static_pillar', filename='assets/css/blog.css', v=17320171) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/blog.css') }}", rel="stylesheet")
| {% endblock %}
| {% block project_context %}

View File

@ -11,7 +11,7 @@
.comment-body
p.comment-author {{ comment._user.full_name }}
span {{comment.properties.content_html | safe }}
span {{comment.properties | markdowned('content') }}
// TODO: Markdown preview when editing

View File

@ -13,10 +13,10 @@
data-placement="top")
i.pi-list
| {% if node.description %}
.node-details-description#node-description
| {{ node.description | markdown }}
| {% endif %}
| {% if node.description %}
.node-details-description
| {{ node | markdowned('description') }}
| {% endif %}
section.node-children.group

View File

@ -7,7 +7,7 @@
| {% if node.description %}
section.node-row
.node-details-description
| {{ node.description | markdown }}
| {{ node | markdowned('description') }}
| {% endif %}
| {% if children %}

View File

@ -7,7 +7,7 @@
| {% if node.description %}
section.node-row
.node-details-description
| {{ node.description | markdown }}
| {{ node | markdowned('description') }}
| {% endif %}
| {% if children %}

View File

@ -17,7 +17,7 @@
| {% if node.description %}
.node-details-description#node-description
| {{ node.description | markdown }}
| {{ node | markdowned('description') }}
| {% endif %}
.node-details-meta.footer

View File

@ -21,7 +21,7 @@ meta(property="og:image", content="{{ node.picture.thumbnail('l', api=api) }}")
| {% block css %}
| {{ super() }}
link(href="{{ url_for('static_pillar', filename='assets/css/blog.css', v=17320171) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/blog.css') }}", rel="stylesheet")
| {% endblock %}
| {% block project_context %}

View File

@ -2,9 +2,9 @@
| {% block page_title %}Search{% if project %} {{ project.name }}{% endif %}{% endblock %}
| {% block head %}
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-6.2.8.min.js', v=9112017) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-ga-0.4.2.min.js', v=9112017) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-hotkeys-0.2.20.min.js', v=9112017) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-6.2.8.min.js') }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-ga-0.4.2.min.js') }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-hotkeys-0.2.20.min.js') }}")
| {% endblock %}
| {% block og %}

View File

@ -26,7 +26,7 @@
| {# DESCRIPTION #}
| {% if node.description %}
.node-details-description#node-description
| {{ node.description | markdown }}
| {{ node | markdowned('description') }}
| {% endif %}

View File

@ -66,20 +66,20 @@ meta(property="og:url", content="{{url_for('projects.view', project_url=project.
| {% endblock %}
| {% block head %}
link(href="{{ url_for('static_pillar', filename='assets/jstree/themes/default/style.min.css', v=9112017) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/jstree/themes/default/style.min.css') }}", rel="stylesheet")
| {% if node %}
link(rel="amphtml", href="{{ url_for('nodes.view', node_id=node._id, _external=True, format='amp') }}")
| {% endif %}
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-6.2.8.min.js', v=9112017) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-ga-0.4.2.min.js', v=9112017) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-hotkeys-0.2.20.min.js', v=9112017) }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-6.2.8.min.js') }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-ga-0.4.2.min.js') }}")
script(src="{{ url_for('static_pillar', filename='assets/js/vendor/videojs-hotkeys-0.2.20.min.js') }}")
| {% endblock %}
| {% block css %}
link(href="{{ url_for('static_pillar', filename='assets/css/font-pillar.css', v=9112017) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/base.css', v=9112017) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/project-main.css', v=9112017) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/font-pillar.css') }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/base.css') }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/project-main.css') }}", rel="stylesheet")
| {% endblock %}
| {% block body %}
@ -280,7 +280,7 @@ link(href="{{ url_for('static_pillar', filename='assets/css/project-main.css', v
| {% if project.has_method('PUT') %}
| {# JS containing the Edit, Add, Featured, and Move functions #}
script(type="text/javascript", src="{{ url_for('static_pillar', filename='assets/js/project-edit.min.js', v=9112017) }}")
script(type="text/javascript", src="{{ url_for('static_pillar', filename='assets/js/project-edit.min.js') }}")
| {% endif %}
script.

View File

@ -28,7 +28,7 @@
| {% if project.description %}
.node-details-description
| {{ project.description | markdown }}
| {{ project | markdowned('description') }}
| {% endif %}
| {# Until we implement urls for pages
@ -73,9 +73,9 @@
a.title(href="{{ url_for_node(node=n) }}") {{ n.name }}
p.description(href="{{ url_for_node(node=n) }}")
| {% if n.node_type == 'post' %}
| {{ n.properties.content | markdown | striptags | truncate(140, end="... <small>read more</small>") | safe | hide_none }}
| {{ n.properties | markdowned('content') | striptags | truncate(140, end="... <small>read more</small>") | safe | hide_none }}
| {% else %}
| {{ n.description | markdown | striptags | truncate(140, end="... <small>read more</small>") | safe | hide_none }}
| {{ n | markdowned('description') | striptags | truncate(140, end="... <small>read more</small>") | safe | hide_none }}
| {% endif %}
span.details
span.what {% if n.properties.content_type %}{{ n.properties.content_type | undertitle }}{% else %}{{ n.node_type | undertitle }}{% endif %} ·

View File

@ -29,8 +29,8 @@ li
| {% endblock %}
| {% block css %}
link(href="{{ url_for('static_pillar', filename='assets/css/font-pillar.css', v=171020161) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/theatre.css', v=171020161) }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/font-pillar.css') }}", rel="stylesheet")
link(href="{{ url_for('static_pillar', filename='assets/css/theatre.css') }}", rel="stylesheet")
| {% endblock %}
| {% block body %}

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 243 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 186 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 611 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 175 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 184 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 434 KiB

View File

@ -0,0 +1,2 @@
Images courtesy of Blender Cloud
https://cloud.blender.org/

View File

@ -293,7 +293,7 @@ class ReplaceNodeTypesTest(AbstractNodeReplacementTest):
# Run the CLI command
with self.app.test_request_context():
replace_pillar_node_type_schemas(proj_url=self.proj['url'])
replace_pillar_node_type_schemas(project_url=self.proj['url'])
# Fetch the project again from MongoDB
dbproj = self.fetch_project_from_db()

View File

@ -0,0 +1,292 @@
import pathlib
import shutil
import tempfile
from pillar.tests import AbstractPillarTest
class ThumbnailTest(AbstractPillarTest):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.image_path = pathlib.Path(__file__).with_name('images')
def setUp(self, **kwargs):
super().setUp(**kwargs)
self._tmp = tempfile.TemporaryDirectory()
self.tmp = pathlib.Path(self._tmp.name)
def tearDown(self):
super().tearDown()
self._tmp.cleanup()
def _tmpcopy(self, image_fname: str) -> pathlib.Path:
src = self.image_path / image_fname
dst = self.tmp / image_fname
shutil.copy(str(src), str(dst))
return dst
def _thumb_test(self, source):
from PIL import Image
from pillar.api.utils import imaging
with self.app.app_context():
# Almost same as in production, but less different sizes.
self.app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS'] = {
's': {'size': (90, 90), 'crop': True},
'b': {'size': (160, 160), 'crop': True},
't': {'size': (160, 160), 'crop': False},
'm': {'size': (320, 320), 'crop': False},
}
thumbs = imaging.generate_local_thumbnails('มัสมั่น', source)
# Remove the length field, it is can be hard to predict.
for t in thumbs:
t.pop('length')
# Verify that the images can be loaded and have the advertised size.
for t in thumbs:
local_path = pathlib.Path(t['local_path'])
im = Image.open(local_path)
self.assertEqual((t['width'], t['height']), im.size)
return thumbs
def test_thumbgen_jpg(self):
source = self._tmpcopy('512x512-8bit-rgb.jpg')
thumbs = self._thumb_test(source)
self.assertEqual(
[
{'size': 's',
'file_path': 'มัสมั่น-s.jpg',
'local_path': str(source.with_name('512x512-8bit-rgb-s.jpg')),
'width': 90, 'height': 90,
'md5': '',
'content_type': 'image/jpeg'},
{'size': 'b',
'file_path': 'มัสมั่น-b.jpg',
'local_path': str(source.with_name('512x512-8bit-rgb-b.jpg')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/jpeg'},
{'size': 't',
'file_path': 'มัสมั่น-t.jpg',
'local_path': str(source.with_name('512x512-8bit-rgb-t.jpg')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/jpeg',
'is_public': True},
{'size': 'm',
'file_path': 'มัสมั่น-m.jpg',
'local_path': str(source.with_name('512x512-8bit-rgb-m.jpg')),
'width': 320, 'height': 320,
'md5': '',
'content_type': 'image/jpeg'},
],
thumbs)
def test_thumbgen_vertical(self):
source = self._tmpcopy('300x512-8bit-rgb.jpg')
thumbs = self._thumb_test(source)
self.assertEqual(
[
{'size': 's',
'file_path': 'มัสมั่น-s.jpg',
'local_path': str(source.with_name('300x512-8bit-rgb-s.jpg')),
'width': 90, 'height': 90,
'md5': '',
'content_type': 'image/jpeg'},
{'size': 'b',
'file_path': 'มัสมั่น-b.jpg',
'local_path': str(source.with_name('300x512-8bit-rgb-b.jpg')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/jpeg'},
{'size': 't',
'file_path': 'มัสมั่น-t.jpg',
'local_path': str(source.with_name('300x512-8bit-rgb-t.jpg')),
'width': 93, 'height': 160,
'md5': '',
'content_type': 'image/jpeg',
'is_public': True},
{'size': 'm',
'file_path': 'มัสมั่น-m.jpg',
'local_path': str(source.with_name('300x512-8bit-rgb-m.jpg')),
'width': 187, 'height': 320,
'md5': '',
'content_type': 'image/jpeg'},
],
thumbs)
def test_thumbgen_png_alpha(self):
source = self._tmpcopy('512x512-8bit-rgba.png')
thumbs = self._thumb_test(source)
self.assertEqual(
[
{'size': 's',
'file_path': 'มัสมั่น-s.png',
'local_path': str(source.with_name('512x512-8bit-rgba-s.png')),
'width': 90, 'height': 90,
'md5': '',
'content_type': 'image/png'},
{'size': 'b',
'file_path': 'มัสมั่น-b.png',
'local_path': str(source.with_name('512x512-8bit-rgba-b.png')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/png'},
{'size': 't',
'file_path': 'มัสมั่น-t.png',
'local_path': str(source.with_name('512x512-8bit-rgba-t.png')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/png',
'is_public': True},
{'size': 'm',
'file_path': 'มัสมั่น-m.png',
'local_path': str(source.with_name('512x512-8bit-rgba-m.png')),
'width': 320, 'height': 320,
'md5': '',
'content_type': 'image/png'},
],
thumbs)
def test_thumbgen_png_greyscale_alpha(self):
source = self._tmpcopy('512x512-8bit-grey-alpha.png')
thumbs = self._thumb_test(source)
self.assertEqual(
[
{'size': 's',
'file_path': 'มัสมั่น-s.png',
'local_path': str(source.with_name('512x512-8bit-grey-alpha-s.png')),
'width': 90, 'height': 90,
'md5': '',
'content_type': 'image/png'},
{'size': 'b',
'file_path': 'มัสมั่น-b.png',
'local_path': str(source.with_name('512x512-8bit-grey-alpha-b.png')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/png'},
{'size': 't',
'file_path': 'มัสมั่น-t.png',
'local_path': str(source.with_name('512x512-8bit-grey-alpha-t.png')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/png',
'is_public': True},
{'size': 'm',
'file_path': 'มัสมั่น-m.png',
'local_path': str(source.with_name('512x512-8bit-grey-alpha-m.png')),
'width': 320, 'height': 320,
'md5': '',
'content_type': 'image/png'},
],
thumbs)
def test_thumbgen_png_16bit(self):
source = self._tmpcopy('512x256-16bit-rgb.png')
thumbs = self._thumb_test(source)
self.assertEqual(
[
{'size': 's',
'file_path': 'มัสมั่น-s.png',
'local_path': str(source.with_name('512x256-16bit-rgb-s.png')),
'width': 90, 'height': 90,
'md5': '',
'content_type': 'image/png'},
{'size': 'b',
'file_path': 'มัสมั่น-b.png',
'local_path': str(source.with_name('512x256-16bit-rgb-b.png')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/png'},
{'size': 't',
'file_path': 'มัสมั่น-t.png',
'local_path': str(source.with_name('512x256-16bit-rgb-t.png')),
'width': 160, 'height': 80,
'md5': '',
'content_type': 'image/png',
'is_public': True},
{'size': 'm',
'file_path': 'มัสมั่น-m.png',
'local_path': str(source.with_name('512x256-16bit-rgb-m.png')),
'width': 320, 'height': 160,
'md5': '',
'content_type': 'image/png'},
],
thumbs)
def test_thumbgen_png_16bit_grey(self):
source = self._tmpcopy('512x256-16bit-grey.png')
thumbs = self._thumb_test(source)
self.assertEqual(
[
{'size': 's',
'file_path': 'มัสมั่น-s.jpg',
'local_path': str(source.with_name('512x256-16bit-grey-s.jpg')),
'width': 90, 'height': 90,
'md5': '',
'content_type': 'image/jpeg'},
{'size': 'b',
'file_path': 'มัสมั่น-b.jpg',
'local_path': str(source.with_name('512x256-16bit-grey-b.jpg')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/jpeg'},
{'size': 't',
'file_path': 'มัสมั่น-t.jpg',
'local_path': str(source.with_name('512x256-16bit-grey-t.jpg')),
'width': 160, 'height': 80,
'md5': '',
'content_type': 'image/jpeg',
'is_public': True},
{'size': 'm',
'file_path': 'มัสมั่น-m.jpg',
'local_path': str(source.with_name('512x256-16bit-grey-m.jpg')),
'width': 320, 'height': 160,
'md5': '',
'content_type': 'image/jpeg'},
],
thumbs)
def test_thumbgen_png_16bit_greyscale_alpha(self):
source = self._tmpcopy('512x256-16bit-grey-alpha.png')
thumbs = self._thumb_test(source)
self.assertEqual(
[
{'size': 's',
'file_path': 'มัสมั่น-s.png',
'local_path': str(source.with_name('512x256-16bit-grey-alpha-s.png')),
'width': 90, 'height': 90,
'md5': '',
'content_type': 'image/png'},
{'size': 'b',
'file_path': 'มัสมั่น-b.png',
'local_path': str(source.with_name('512x256-16bit-grey-alpha-b.png')),
'width': 160, 'height': 160,
'md5': '',
'content_type': 'image/png'},
{'size': 't',
'file_path': 'มัสมั่น-t.png',
'local_path': str(source.with_name('512x256-16bit-grey-alpha-t.png')),
'width': 160, 'height': 80,
'md5': '',
'content_type': 'image/png',
'is_public': True},
{'size': 'm',
'file_path': 'มัสมั่น-m.png',
'local_path': str(source.with_name('512x256-16bit-grey-alpha-m.png')),
'width': 320, 'height': 160,
'md5': '',
'content_type': 'image/png'},
],
thumbs)

View File

@ -0,0 +1,53 @@
import copy
from pillar.tests import AbstractPillarTest
from pillar.tests import common_test_data as ctd
class CoerceMarkdownTest(AbstractPillarTest):
def test_node_description(self):
from pillar.markdown import markdown
pid, uid = self.create_project_with_admin(24 * 'a')
self.create_valid_auth_token(uid, 'token-a')
node = {
'node_type': 'group',
'name': 'Test group',
'description': '# Title\n\nThis is content.',
'properties': {},
'project': pid,
'user': uid,
}
created_data = self.post('/api/nodes', json=node, expected_status=201,
auth_token='token-a').json()
node_id = created_data['_id']
json_node = self.get(f'/api/nodes/{node_id}', auth_token='token-a').json()
self.assertEqual(markdown(node['description']), json_node['_description_html'])
def test_project_description(self):
from pillar.markdown import markdown
from pillar.api.utils import remove_private_keys
uid = self.create_user(24 * 'a', token='token-a')
# Go through Eve to create the project.
proj = {
**ctd.EXAMPLE_PROJECT,
'description': '# Title\n\nThis is content.',
'user': uid,
}
proj.pop('picture_header')
proj.pop('picture_square')
proj.pop('permissions')
r, _, _, status = self.app.post_internal('projects', remove_private_keys(proj))
self.assertEqual(201, status, f'failed because {r}')
pid = r['_id']
json_proj = self.get(f'/api/projects/{pid}', auth_token='token-a').json()
json_proj.pop('node_types', None) # just to make it easier to print
import pprint
pprint.pprint(json_proj)
self.assertEqual(markdown(proj['description']), json_proj['_description_html'])

View File

@ -187,14 +187,14 @@ class EditCommentTest(AbstractPatchCommentTest):
json={'op': 'edit', 'content': 'Je moeder is niet je vader.'},
auth_token=token).json()
self.assertEqual('<p>Je moeder is niet je vader.</p>\n',
res['properties']['content_html'])
res['properties']['_content_html'])
# Get the node again, to inspect its changed state.
patched_node = self.get(self.node_url, auth_token=token).json()
self.assertEqual('Je moeder is niet je vader.',
patched_node['properties']['content'])
self.assertEqual('<p>Je moeder is niet je vader.</p>\n',
patched_node['properties']['content_html'])
patched_node['properties']['_content_html'])
self.assertNotEqual(pre_node['_etag'], patched_node['_etag'])
def test_comment_edit_other_user_admin(self):
@ -214,7 +214,7 @@ class EditCommentTest(AbstractPatchCommentTest):
self.assertEqual('Purrrr kittycat',
patched_node['properties']['content'])
self.assertEqual('<p>Purrrr kittycat</p>\n',
patched_node['properties']['content_html'])
patched_node['properties']['_content_html'])
def test_edit_noncomment_node(self):
url = '/api/nodes/%s' % self.asset_id

View File

@ -107,6 +107,27 @@ class DocDiffTest(unittest.TestCase):
('props.status2', DoesNotExist, 'todo')},
set(diff))
def test_diff_list_values(self):
from pillar.api.utils import doc_diff
diff = doc_diff({'a': 'b', 'props': ['status', 'todo', 'notes', 'jemoeder']},
{'a': 'b', 'props': ['todo', 'others', 'notes', 'jemoeder']})
self.assertEqual({
('props[0]', 'status', 'todo'),
('props[1]', 'todo', 'others'),
}, set(diff))
def test_diff_list_unequal_lengths(self):
from pillar.api.utils import doc_diff, DoesNotExist
diff = doc_diff({'a': 'b', 'props': ['status', 'todo', 'notes']},
{'a': 'b', 'props': ['todo', 'others', 'notes', 'jemoeder']})
self.assertEqual({
('props[0]', 'status', 'todo'),
('props[1]', 'todo', 'others'),
('props[3]', DoesNotExist, 'jemoeder'),
}, set(diff))
class NodeSetattrTest(unittest.TestCase):
def test_simple(self):
@ -163,4 +184,3 @@ class NodeSetattrTest(unittest.TestCase):
node_setattr(node, 'b.complex', {None: 5})
self.assertEqual({'b': {'complex': {None: 5}}}, node)

View File

View File

@ -0,0 +1,35 @@
from bson import ObjectId
from pillar.tests import AbstractPillarTest
class PurgeHomeProjectsTest(AbstractPillarTest):
def test_purge(self):
self.create_standard_groups()
# user_a will be soft-deleted, user_b will be hard-deleted.
# We don't support soft-deleting users yet, but the code should be
# handling that properly anyway.
user_a = self.create_user(user_id=24 * 'a', roles={'subscriber'}, token='token-a')
user_b = self.create_user(user_id=24 * 'b', roles={'subscriber'}, token='token-b')
# GET the home project to create it.
home_a = self.get('/api/bcloud/home-project', auth_token='token-a').json()
home_b = self.get('/api/bcloud/home-project', auth_token='token-b').json()
with self.app.app_context():
users_coll = self.app.db('users')
res = users_coll.update_one({'_id': user_a}, {'$set': {'_deleted': True}})
self.assertEqual(1, res.modified_count)
res = users_coll.delete_one({'_id': user_b})
self.assertEqual(1, res.deleted_count)
from pillar.cli.maintenance import purge_home_projects
with self.app.app_context():
self.assertEqual(2, purge_home_projects(go=True))
proj_coll = self.app.db('projects')
self.assertEqual(True, proj_coll.find_one({'_id': ObjectId(home_a['_id'])})['_deleted'])
self.assertEqual(True, proj_coll.find_one({'_id': ObjectId(home_b['_id'])})['_deleted'])

View File

@ -2,6 +2,8 @@ import unittest
import flask
from pillar.tests import AbstractPillarTest
class FlaskExtraTest(unittest.TestCase):
def test_vary_xhr(self):
@ -84,3 +86,25 @@ class EnsureSchemaTest(unittest.TestCase):
self.assertEqual('/some/path/only', pillar.flask_extra.ensure_schema('/some/path/only'))
self.assertEqual('https://hostname/path',
pillar.flask_extra.ensure_schema('//hostname/path'))
class HashedPathConverterTest(AbstractPillarTest):
def test_to_python(self):
from pillar.flask_extra import HashedPathConverter
hpc = HashedPathConverter({})
self.assertEqual('/path/to/file.min.js', hpc.to_python('/path/to/file.min.abcd1234.js'))
self.assertEqual('/path/to/file.js', hpc.to_python('/path/to/file.abcd1234.js'))
self.assertEqual('/path/to/file', hpc.to_python('/path/to/file'))
self.assertEqual('', hpc.to_python(''))
def test_to_url(self):
from pillar.flask_extra import HashedPathConverter
hpc = HashedPathConverter({})
with self.app.app_context():
self.assertEqual('/path/to/file.min.abcd1234.js', hpc.to_url('/path/to/file.min.js'))
self.assertEqual('/path/to/file.abcd1234.js', hpc.to_url('/path/to/file.js'))
self.assertEqual('/path/to/file', hpc.to_url('/path/to/file'))
self.assertEqual('', hpc.to_url(''))

View File

@ -19,3 +19,13 @@ class MarkdownTest(unittest.TestCase):
self.assertEqual(None, jinja.do_markdown(None))
self.assertEqual('', jinja.do_markdown(''))
def test_markdowned(self):
from pillar.web import jinja
self.assertEqual(None, jinja.do_markdowned({'eek': None}, 'eek'))
self.assertEqual('<p>ook</p>\n', jinja.do_markdowned({'eek': 'ook'}, 'eek'))
self.assertEqual('<p>ook</p>\n', jinja.do_markdowned(
{'eek': 'ook', '_eek_html': None}, 'eek'))
self.assertEqual('prerendered', jinja.do_markdowned(
{'eek': 'ook', '_eek_html': 'prerendered'}, 'eek'))