Compare commits

..

1 Commits

Author SHA1 Message Date
d0e12401c0 Introduce support for confidence calculations 2018-11-26 23:44:16 +01:00
214 changed files with 15626 additions and 17789 deletions

4
.gitignore vendored
View File

@ -13,11 +13,10 @@ config_local.py
/build
/.cache
/.pytest_cache/
*.egg-info/
/*.egg-info/
profile.stats
/dump/
/.eggs
/devdeps/pip-wheel-metadata/
/node_modules
/.sass-cache
@ -32,4 +31,3 @@ pillar/web/static/assets/js/vendor/video.min.js
pillar/web/static/storage/
pillar/web/static/uploads/
pillar/web/templates/
/poetry.lock

View File

@ -3,7 +3,7 @@ Pillar
This is the latest iteration on the Attract project. We are building a unified
framework called Pillar. Pillar will combine Blender Cloud and Attract. You
can see Pillar in action on the [Blender Cloud](https://cloud.blender.org).
can see Pillar in action on the [Blender Cloud](https://cloud.bender.org).
## Custom fonts
@ -25,16 +25,15 @@ Don't forget to Gulp!
## Installation
Dependencies are managed via [Poetry](https://poetry.eustace.io/).
Make sure your /data directory exists and is writable by the current user.
Alternatively, provide a `pillar/config_local.py` that changes the relevant
settings.
```
git clone git@git.blender.org:pillar-python-sdk.git ../pillar-python-sdk
pip install -U --user poetry
poetry install
pip install -e ../pillar-python-sdk
pip install -U -r requirements.txt
pip install -e .
```
## HDRi viewer
@ -70,7 +69,7 @@ Find other Celery operations with the `manage.py celery` command.
Pillar uses [Elasticsearch](https://www.elastic.co/products/elasticsearch) to power the search engine.
You will need to run the `manage.py elastic reset_index` command to initialize the indexing.
If you need to reindex your documents in elastic you run the `manage.py elastic reindex` command.
If you need to reindex your documents in elastic you run the `manage.py elastic reindex` command.
## Translations

View File

@ -1,16 +0,0 @@
[tool.poetry]
name = "pillar-devdeps"
version = "1.0"
description = ""
authors = [
"Francesco Siddi <francesco@blender.org>",
"Pablo Vazquez <pablo@blender.studio>",
"Sybren Stüvel <sybren@blender.studio>",
]
[tool.poetry.dependencies]
python = "~3.6"
mypy = "^0.501"
pytest = "~4.4"
pytest-cov = "~2.7"
responses = "~0.10"

View File

@ -40,8 +40,7 @@ let destination = {
let source = {
bootstrap: 'node_modules/bootstrap/',
jquery: 'node_modules/jquery/',
popper: 'node_modules/popper.js/',
vue: 'node_modules/vue/',
popper: 'node_modules/popper.js/'
}
/* Stylesheets */
@ -107,26 +106,10 @@ function browserify_base(entry) {
}));
}
/**
* Transcompile and package common modules to be included in tutti.js.
*
* Example:
* src/scripts/js/es6/common/api/init.js
* src/scripts/js/es6/common/events/init.js
* Everything exported in api/init.js will end up in module pillar.api.*, and everything exported in events/init.js
* will end up in pillar.events.*
*/
function browserify_common() {
return glob.sync('src/scripts/js/es6/common/**/init.js').map(browserify_base);
}
/**
* Transcompile and package individual modules.
*
* Example:
* src/scripts/js/es6/individual/coolstuff/init.js
* Will create a coolstuff.js and everything exported in init.js will end up in namespace pillar.coolstuff.*
*/
gulp.task('scripts_browserify', function(done) {
glob('src/scripts/js/es6/individual/**/init.js', function(err, files) {
if(err) done(err);
@ -144,7 +127,7 @@ gulp.task('scripts_browserify', function(done) {
});
/* Collection of scripts in src/scripts/tutti/ and src/scripts/js/es6/common/ to merge into tutti.min.js
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js
* Since it's always loaded, it's only for functions that we want site-wide.
* It also includes jQuery and Bootstrap (and its dependency popper), since
* the site doesn't work without it anyway.*/
@ -152,7 +135,6 @@ gulp.task('scripts_concat_tutti', function(done) {
let toUglify = [
source.jquery + 'dist/jquery.min.js',
source.vue + (enabled.uglify ? 'dist/vue.min.js' : 'dist/vue.js'),
source.popper + 'dist/umd/popper.min.js',
source.bootstrap + 'js/dist/index.js',
source.bootstrap + 'js/dist/util.js',

23268
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -25,30 +25,22 @@
"gulp-plumber": "1.2.0",
"gulp-pug": "4.0.1",
"gulp-rename": "1.4.0",
"gulp-sass": "4.1.0",
"gulp-sass": "4.0.1",
"gulp-sourcemaps": "2.6.4",
"gulp-uglify-es": "1.0.4",
"jest": "^24.8.0",
"jest": "23.6.0",
"minimist": "1.2.0",
"vinyl-buffer": "1.0.1",
"vinyl-source-stream": "2.0.0"
},
"dependencies": {
"bootstrap": "^4.3.1",
"bootstrap": "4.1.3",
"glob": "7.1.3",
"jquery": "^3.4.1",
"natives": "^1.1.6",
"jquery": "3.3.1",
"popper.js": "1.14.4",
"video.js": "7.2.2",
"vue": "2.5.17"
"video.js": "7.2.2"
},
"scripts": {
"test": "jest"
},
"__COMMENTS__": [
"natives@1.1.6 for Gulp 3.x on Node 10.x: https://github.com/gulpjs/gulp/issues/2162#issuecomment-385197164"
],
"resolutions": {
"natives": "1.1.6"
}
}

View File

@ -12,25 +12,10 @@ import typing
import os
import os.path
import pathlib
import warnings
# These warnings have to be suppressed before the first import.
# Eve is falling behind on Cerberus. See https://github.com/pyeve/eve/issues/1278
warnings.filterwarnings(
'ignore', category=DeprecationWarning,
message="Methods for type testing are deprecated, use TypeDefinition and the "
"'types_mapping'-property of a Validator-instance instead")
# Werkzeug deprecated Request.is_xhr, but it works fine with jQuery and we don't need a reminder
# every time a unit test is run.
warnings.filterwarnings('ignore', category=DeprecationWarning,
message="'Request.is_xhr' is deprecated as of version 0.13 and will be "
"removed in version 1.0.")
import jinja2
import flask
from eve import Eve
import flask
from flask import g, render_template, request
from flask_babel import Babel, gettext as _
from flask.templating import TemplateNotFound
@ -85,7 +70,7 @@ class BlinkerCompatibleEve(Eve):
class PillarServer(BlinkerCompatibleEve):
def __init__(self, app_root: str, **kwargs) -> None:
def __init__(self, app_root, **kwargs):
from .extension import PillarExtension
from celery import Celery
from flask_wtf.csrf import CSRFProtect
@ -492,7 +477,6 @@ class PillarServer(BlinkerCompatibleEve):
# Pillar-defined Celery task modules:
celery_task_modules = [
'pillar.celery.avatar',
'pillar.celery.badges',
'pillar.celery.email_tasks',
'pillar.celery.file_link_tasks',
@ -663,7 +647,7 @@ class PillarServer(BlinkerCompatibleEve):
return self.pillar_error_handler(error)
def handle_sdk_resource_invalid(self, error):
self.log.exception('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
self.log.info('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
# Raising a Werkzeug 422 exception doens't work, as Flask turns it into a 500.
return _('The submitted data could not be validated.'), 422
@ -806,18 +790,17 @@ class PillarServer(BlinkerCompatibleEve):
return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
def post_internal(self, resource: str, payl=None, skip_validation=False):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.post import post_internal
url = self.config['URLS'][resource]
path = '%s/%s' % (self.api_prefix, url)
with self.__fake_request_url_rule('POST', path):
return post_internal(resource, payl=payl, skip_validation=skip_validation)[:4]
def put_internal(self, resource: str, payload=None, concurrency_check=False,
skip_validation=False, **lookup):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.put import put_internal
url = self.config['URLS'][resource]
@ -828,7 +811,7 @@ class PillarServer(BlinkerCompatibleEve):
def patch_internal(self, resource: str, payload=None, concurrency_check=False,
skip_validation=False, **lookup):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.patch import patch_internal
url = self.config['URLS'][resource]
@ -839,7 +822,7 @@ class PillarServer(BlinkerCompatibleEve):
def delete_internal(self, resource: str, concurrency_check=False,
suppress_callbacks=False, **lookup):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.delete import deleteitem_internal
url = self.config['URLS'][resource]
@ -920,8 +903,7 @@ class PillarServer(BlinkerCompatibleEve):
yield ctx
def validator_for_resource(self,
resource_name: str) -> custom_field_validation.ValidateCustomFields:
def validator_for_resource(self, resource_name: str) -> custom_field_validation.ValidateCustomFields:
schema = self.config['DOMAIN'][resource_name]['schema']
validator = self.validator(schema, resource_name)
return validator

View File

@ -1,7 +1,7 @@
import logging
from flask import request, current_app
import pillar.api.users.avatar
from pillar.api.utils import gravatar
from pillar.auth import current_user
log = logging.getLogger(__name__)
@ -68,7 +68,7 @@ def notification_parse(notification):
if actor:
parsed_actor = {
'username': actor['username'],
'avatar': pillar.api.users.avatar.url(actor)}
'avatar': gravatar(actor['email'])}
else:
parsed_actor = None
@ -91,14 +91,14 @@ def notification_parse(notification):
def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id):
subscriptions_collection = current_app.db('activities-subscriptions')
subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
lookup = {
'user': {"$ne": actor_user_id},
'context_object_type': context_object_type,
'context_object': context_object_id,
'is_subscribed': True,
}
return subscriptions_collection.find(lookup), subscriptions_collection.count_documents(lookup)
return subscriptions_collection.find(lookup)
def activity_subscribe(user_id, context_object_type, context_object_id):
@ -119,8 +119,6 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
# If no subscription exists, we create one
if not subscription:
# Workaround for issue: https://github.com/pyeve/eve/issues/1174
lookup['notifications'] = {}
current_app.post_internal('activities-subscriptions', lookup)
@ -140,10 +138,10 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
:param object_id: object id, to be traced with object_type_id
"""
subscriptions, subscription_count = notification_get_subscriptions(
subscriptions = notification_get_subscriptions(
context_object_type, context_object_id, actor_user_id)
if subscription_count == 0:
if subscriptions.count() == 0:
return
info, status = register_activity(actor_user_id, verb, object_type, object_id,

View File

@ -257,10 +257,10 @@ def has_home_project(user_id):
"""Returns True iff the user has a home project."""
proj_coll = current_app.data.driver.db['projects']
return proj_coll.count_documents({'user': user_id, 'category': 'home', '_deleted': False}) > 0
return proj_coll.count({'user': user_id, 'category': 'home', '_deleted': False}) > 0
def get_home_project(user_id: ObjectId, projection=None) -> dict:
def get_home_project(user_id, projection=None):
"""Returns the home project"""
proj_coll = current_app.data.driver.db['projects']
@ -272,10 +272,10 @@ def is_home_project(project_id, user_id):
"""Returns True iff the given project exists and is the user's home project."""
proj_coll = current_app.data.driver.db['projects']
return proj_coll.count_documents({'_id': project_id,
'user': user_id,
'category': 'home',
'_deleted': False}) > 0
return proj_coll.count({'_id': project_id,
'user': user_id,
'category': 'home',
'_deleted': False}) > 0
def mark_node_updated(node_id):

View File

@ -104,7 +104,7 @@ def has_texture_node(proj, return_hdri=True):
if return_hdri:
node_types.append('group_hdri')
count = nodes_collection.count_documents(
count = nodes_collection.count(
{'node_type': {'$in': node_types},
'project': proj['_id'],
'parent': None})

View File

@ -13,10 +13,8 @@ from bson import tz_util
from rauth import OAuth2Session
from flask import Blueprint, request, jsonify, session
from requests.adapters import HTTPAdapter
import urllib3.util.retry
from pillar import current_app
from pillar.auth import get_blender_id_oauth_token
from pillar.api.utils import authentication, utcnow
from pillar.api.utils.authentication import find_user_in_db, upsert_user
@ -31,30 +29,6 @@ class LogoutUser(Exception):
"""
class Session(requests.Session):
"""Requests Session suitable for Blender ID communication."""
def __init__(self):
super().__init__()
retries = urllib3.util.retry.Retry(
total=10,
backoff_factor=0.05,
)
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount('https://', http_adapter)
self.mount('http://', http_adapter)
def authenticate(self):
"""Attach the current user's authentication token to the request."""
bid_token = get_blender_id_oauth_token()
if not bid_token:
raise TypeError('authenticate() requires current user to be logged in with Blender ID')
self.headers['Authorization'] = f'Bearer {bid_token}'
@blender_id.route('/store_scst', methods=['POST'])
def store_subclient_token():
"""Verifies & stores a user's subclient-specific token."""
@ -145,8 +119,12 @@ def validate_token(user_id, token, oauth_subclient_id):
url = urljoin(blender_id_endpoint, 'u/validate_token')
log.debug('POSTing to %r', url)
# Retry a few times when POSTing to BlenderID fails.
# Source: http://stackoverflow.com/a/15431343/875379
s = requests.Session()
s.mount(blender_id_endpoint, HTTPAdapter(max_retries=5))
# POST to Blender ID, handling errors as negative verification results.
s = Session()
try:
r = s.post(url, data=payload, timeout=5,
verify=current_app.config['TLS_CERT_FILE'])
@ -280,16 +258,6 @@ def fetch_blenderid_user() -> dict:
return payload
def avatar_url(blenderid_user_id: str) -> str:
"""Return the URL to the user's avatar on Blender ID.
This avatar should be downloaded, and not served from the Blender ID URL.
"""
bid_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'],
f'api/user/{blenderid_user_id}/avatar')
return bid_url
def setup_app(app, url_prefix):
app.register_api_blueprint(blender_id, url_prefix=url_prefix)

View File

@ -1,3 +1,4 @@
import copy
from datetime import datetime
import logging
@ -5,12 +6,36 @@ from bson import ObjectId, tz_util
from eve.io.mongo import Validator
from flask import current_app
from pillar import markdown
import pillar.markdown
log = logging.getLogger(__name__)
class ValidateCustomFields(Validator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Will be reference to the actual document being validated, so that we can
# modify it during validation.
self.__real_document = None
def validate(self, document, *args, **kwargs):
# Keep a reference to the actual document, because Cerberus validates copies.
self.__real_document = document
result = super().validate(document, *args, **kwargs)
# Store the in-place modified document as self.document, so that Eve's post_internal
# can actually pick it up as the validated document. We need to make a copy so that
# further modifications (like setting '_etag' etc.) aren't done in-place.
self.document = copy.deepcopy(document)
return result
def _get_child_validator(self, *args, **kwargs):
child = super()._get_child_validator(*args, **kwargs)
# Pass along our reference to the actual document.
child.__real_document = self.__real_document
return child
# TODO: split this into a convert_property(property, schema) and call that from this function.
def convert_properties(self, properties, node_schema):
@ -29,11 +54,7 @@ class ValidateCustomFields(Validator):
dict_valueschema = schema_prop['schema']
properties[prop] = self.convert_properties(properties[prop], dict_valueschema)
except KeyError:
# Cerberus 1.3 changed valueschema to valuesrules.
dict_valueschema = schema_prop.get('valuesrules') or \
schema_prop.get('valueschema')
if dict_valueschema is None:
raise KeyError(f"missing 'valuesrules' key in schema of property {prop}")
dict_valueschema = schema_prop['valueschema']
self.convert_dict_values(properties[prop], dict_valueschema)
elif prop_type == 'list':
@ -116,7 +137,8 @@ class ValidateCustomFields(Validator):
if val:
# This ensures the modifications made by v's coercion rules are
# visible to this validator's output.
self.document[field] = v.document
# TODO(fsiddi): this no longer works due to Cerberus internal changes.
# self.current[field] = v.current
return True
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
@ -144,7 +166,7 @@ class ValidateCustomFields(Validator):
if not value:
self._error(field, "Value is required once the document was created")
def _check_with_iprange(self, field_name: str, value: str):
def _validator_iprange(self, field_name: str, value: str):
"""Ensure the field contains a valid IP address.
Supports both IPv6 and IPv4 ranges. Requires the IPy module.
@ -161,19 +183,36 @@ class ValidateCustomFields(Validator):
if ip.prefixlen() == 0:
self._error(field_name, 'Zero-length prefix is not allowed')
def _normalize_coerce_markdown(self, markdown_field: str) -> str:
def _validator_markdown(self, field, value):
"""Convert MarkDown.
"""
Cache markdown as html.
my_log = log.getChild('_validator_markdown')
:param markdown_field: name of the field containing Markdown
:return: html string
# Find this field inside the original document
my_subdoc = self._subdoc_in_real_document()
if my_subdoc is None:
# If self.update==True we are validating an update document, which
# may not contain all fields, so then a missing field is fine.
if not self.update:
self._error(field, f'validator_markdown: unable to find sub-document '
f'for path {self.document_path}')
return
my_log.debug('validating field %r with value %r', field, value)
save_to = pillar.markdown.cache_field_name(field)
html = pillar.markdown.markdown(value)
my_log.debug('saving result to %r in doc with id %s', save_to, id(my_subdoc))
my_subdoc[save_to] = html
def _subdoc_in_real_document(self):
"""Return a reference to the current sub-document inside the real document.
This allows modification of the document being validated.
"""
my_log = log.getChild('_normalize_coerce_markdown')
mdown = self.document.get(markdown_field, '')
html = markdown.markdown(mdown)
my_log.debug('Generated html for markdown field %s in doc with id %s',
markdown_field, id(self.document))
return html
my_subdoc = getattr(self, 'persisted_document') or self.__real_document
for item in self.document_path:
my_subdoc = my_subdoc[item]
return my_subdoc
if __name__ == '__main__':
@ -181,12 +220,12 @@ if __name__ == '__main__':
v = ValidateCustomFields()
v.schema = {
'foo': {'type': 'string', 'check_with': 'markdown'},
'foo': {'type': 'string', 'validator': 'markdown'},
'foo_html': {'type': 'string'},
'nested': {
'type': 'dict',
'schema': {
'bar': {'type': 'string', 'check_with': 'markdown'},
'bar': {'type': 'string', 'validator': 'markdown'},
'bar_html': {'type': 'string'},
}
}

View File

@ -1,8 +1,5 @@
import os
from pillar.api.node_types.utils import markdown_fields
STORAGE_BACKENDS = ["local", "pillar", "cdnsun", "gcs", "unittest"]
URL_PREFIX = 'api'
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
@ -125,25 +122,6 @@ users_schema = {
'type': 'dict',
'allow_unknown': True,
},
'avatar': {
'type': 'dict',
'schema': {
'file': {
'type': 'objectid',
'data_relation': {
'resource': 'files',
'field': '_id',
},
},
# For only downloading when things really changed:
'last_downloaded_url': {
'type': 'string',
},
'last_modified': {
'type': 'string',
},
},
},
# Node-specific information for this user.
'nodes': {
@ -154,8 +132,8 @@ users_schema = {
'type': 'dict',
# Keyed by Node ID of the video asset. MongoDB doesn't support using
# ObjectIds as key, so we cast them to string instead.
'keysrules': {'type': 'string'},
'valuesrules': {
'keyschema': {'type': 'string'},
'valueschema': {
'type': 'dict',
'schema': {
'progress_in_sec': {'type': 'float', 'min': 0},
@ -205,7 +183,12 @@ organizations_schema = {
'maxlength': 128,
'required': True
},
**markdown_fields('description', maxlength=256),
'description': {
'type': 'string',
'maxlength': 256,
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
'website': {
'type': 'string',
'maxlength': 256,
@ -275,7 +258,7 @@ organizations_schema = {
'start': {'type': 'binary', 'required': True},
'end': {'type': 'binary', 'required': True},
'prefix': {'type': 'integer', 'required': True},
'human': {'type': 'string', 'required': True, 'check_with': 'iprange'},
'human': {'type': 'string', 'required': True, 'validator': 'iprange'},
}
},
},
@ -338,7 +321,11 @@ nodes_schema = {
'maxlength': 128,
'required': True,
},
**markdown_fields('description'),
'description': {
'type': 'string',
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
'picture': _file_embedded_schema,
'order': {
'type': 'integer',
@ -476,7 +463,7 @@ files_schema = {
'backend': {
'type': 'string',
'required': True,
'allowed': STORAGE_BACKENDS,
'allowed': ["local", "pillar", "cdnsun", "gcs", "unittest"]
},
# Where the file is in the backend storage itself. In the case of GCS,
@ -588,7 +575,11 @@ projects_schema = {
'maxlength': 128,
'required': True,
},
**markdown_fields('description'),
'description': {
'type': 'string',
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
# Short summary for the project
'summary': {
'type': 'string',
@ -598,8 +589,6 @@ projects_schema = {
'picture_square': _file_embedded_schema,
# Header
'picture_header': _file_embedded_schema,
# Picture with a 16:9 aspect ratio (for Open Graph)
'picture_16_9': _file_embedded_schema,
'header_node': dict(
nullable=True,
**_node_embedded_schema

View File

@ -5,7 +5,6 @@ import mimetypes
import os
import pathlib
import tempfile
import time
import typing
import uuid
from hashlib import md5
@ -186,8 +185,8 @@ def _video_duration_seconds(filename: pathlib.Path) -> typing.Optional[int]:
str(filename),
]
duration = run(ffprobe_from_stream_args) or \
run(ffprobe_from_container_args) or \
duration = run(ffprobe_from_stream_args) or\
run(ffprobe_from_container_args) or\
None
return duration
@ -470,7 +469,7 @@ def before_returning_files(response):
ensure_valid_link(item)
def ensure_valid_link(response: dict) -> None:
def ensure_valid_link(response):
"""Ensures the file item has valid file links using generate_link(...)."""
# Log to function-specific logger, so we can easily turn it off.
@ -495,13 +494,12 @@ def ensure_valid_link(response: dict) -> None:
generate_all_links(response, now)
def generate_all_links(response: dict, now: datetime.datetime) -> None:
def generate_all_links(response, now):
"""Generate a new link for the file and all its variations.
:param response: the file document that should be updated.
:param now: datetime that reflects 'now', for consistent expiry generation.
"""
assert isinstance(response, dict), f'response must be dict, is {response!r}'
project_id = str(
response['project']) if 'project' in response else None
@ -566,10 +564,13 @@ def on_pre_get_files(_, lookup):
lookup_expired = lookup.copy()
lookup_expired['link_expires'] = {'$lte': now}
cursor, _ = current_app.data.find('files', parsed_req, lookup_expired, perform_count=False)
for idx, file_doc in enumerate(cursor):
if idx == 0:
log.debug('Updating expired links for files that matched lookup %s', lookup_expired)
cursor = current_app.data.find('files', parsed_req, lookup_expired)
if cursor.count() == 0:
return
log.debug('Updating expired links for %d files that matched lookup %s',
cursor.count(), lookup_expired)
for file_doc in cursor:
# log.debug('Updating expired links for file %r.', file_doc['_id'])
generate_all_links(file_doc, now)
@ -593,21 +594,21 @@ def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
'link_expires': {'$lt': expire_before},
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size)
refresh_count = 0
if to_refresh.count() == 0:
log.info('No links to refresh.')
return
for file_doc in to_refresh:
log.debug('Refreshing links for file %s', file_doc['_id'])
generate_all_links(file_doc, now)
refresh_count += 1
if refresh_count:
log.info('Refreshed %i links', refresh_count)
log.info('Refreshed %i links', min(chunk_size, to_refresh.count()))
def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
import gcloud.exceptions
my_log = log.getChild(f'refresh_links_for_backend.{backend_name}')
start_time = time.time()
# Retrieve expired links.
files_collection = current_app.data.driver.db['files']
@ -618,27 +619,23 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
my_log.info('Limiting to links that expire before %s', expire_before)
base_query = {'backend': backend_name, '_deleted': {'$ne': True}}
to_refresh_query = {
'$or': [{'link_expires': None, **base_query},
{'link_expires': {'$lt': expire_before}, **base_query},
{'link': None, **base_query}]
}
to_refresh = files_collection.find(
{'$or': [{'link_expires': None, **base_query},
{'link_expires': {'$lt': expire_before}, **base_query},
{'link': None, **base_query}]
}).sort([('link_expires', pymongo.ASCENDING)]).limit(
chunk_size).batch_size(5)
document_count = files_collection.count_documents(to_refresh_query)
document_count = to_refresh.count()
if document_count == 0:
my_log.info('No links to refresh.')
return
if 0 < chunk_size == document_count:
my_log.info('Found %d documents to refresh, probably limited by the chunk size %d',
document_count, chunk_size)
my_log.info('Found %d documents to refresh, probably limited by the chunk size.',
document_count)
else:
my_log.info('Found %d documents to refresh, chunk size=%d', document_count, chunk_size)
to_refresh = files_collection.find(to_refresh_query)\
.sort([('link_expires', pymongo.ASCENDING)])\
.limit(chunk_size)\
.batch_size(5)
my_log.info('Found %d documents to refresh.', document_count)
refreshed = 0
report_chunks = min(max(5, document_count // 25), 100)
@ -650,7 +647,7 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
my_log.debug('Skipping file %s, it has no project.', file_id)
continue
count = proj_coll.count_documents({'_id': project_id, '$or': [
count = proj_coll.count({'_id': project_id, '$or': [
{'_deleted': {'$exists': False}},
{'_deleted': False},
]})
@ -682,10 +679,8 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
'links', refreshed)
return
if refreshed % report_chunks != 0:
my_log.info('Refreshed %i links', refreshed)
my_log.info('Refreshed %i links', refreshed)
my_log.info('Refresh took %s', datetime.timedelta(seconds=time.time() - start_time))
@require_login()
def create_file_doc(name, filename, content_type, length, project,
@ -821,10 +816,6 @@ def stream_to_storage(project_id: str):
local_file = uploaded_file.stream
result = upload_and_process(local_file, uploaded_file, project_id)
# Local processing is done, we can close the local file so it is removed.
local_file.close()
resp = jsonify(result)
resp.status_code = result['status_code']
add_access_control_headers(resp)
@ -833,9 +824,7 @@ def stream_to_storage(project_id: str):
def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
uploaded_file: werkzeug.datastructures.FileStorage,
project_id: str,
*,
may_process_file=True) -> dict:
project_id: str):
# Figure out the file size, as we need to pass this in explicitly to GCloud.
# Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
# supported by a BytesIO object (even though it does have a fileno
@ -862,15 +851,18 @@ def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
'size=%i as "queued_for_processing"',
file_id, internal_fname, file_size)
update_file_doc(file_id,
status='queued_for_processing' if may_process_file else 'complete',
status='queued_for_processing',
file_path=internal_fname,
length=blob.size,
content_type=uploaded_file.mimetype)
if may_process_file:
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
internal_fname, blob.size)
process_file(bucket, file_id, local_file)
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
internal_fname, blob.size)
process_file(bucket, file_id, local_file)
# Local processing is done, we can close the local file so it is removed.
if local_file is not None:
local_file.close()
log.debug('Handled uploaded file id=%s, fname=%s, size=%i, status=%i',
file_id, internal_fname, blob.size, status)
@ -984,50 +976,7 @@ def compute_aggregate_length_items(file_docs):
compute_aggregate_length(file_doc)
def get_file_url(file_id: ObjectId, variation='') -> str:
"""Return the URL of a file in storage.
Note that this function is cached, see setup_app().
:param file_id: the ID of the file
:param variation: if non-empty, indicates the variation of of the file
to return the URL for; if empty, returns the URL of the original.
:return: the URL, or an empty string if the file/variation does not exist.
"""
file_coll = current_app.db('files')
db_file = file_coll.find_one({'_id': file_id})
if not db_file:
return ''
ensure_valid_link(db_file)
if variation:
variations = file_doc.get('variations', ())
for file_var in variations:
if file_var['size'] == variation:
return file_var['link']
return ''
return db_file['link']
def update_file_doc(file_id, **updates):
files = current_app.data.driver.db['files']
res = files.update_one({'_id': ObjectId(file_id)},
{'$set': updates})
log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
file_id, updates, res.matched_count, res.modified_count)
return res
def setup_app(app, url_prefix):
global get_file_url
cached = app.cache.memoize(timeout=10)
get_file_url = cached(get_file_url)
app.on_pre_GET_files += on_pre_get_files
app.on_fetched_item_files += before_returning_file
@ -1038,3 +987,12 @@ def setup_app(app, url_prefix):
app.on_insert_files += compute_aggregate_length_items
app.register_api_blueprint(file_storage, url_prefix=url_prefix)
def update_file_doc(file_id, **updates):
files = current_app.data.driver.db['files']
res = files.update_one({'_id': ObjectId(file_id)},
{'$set': updates})
log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
file_id, updates, res.matched_count, res.modified_count)
return res

View File

@ -90,11 +90,12 @@ class Blob(metaclass=abc.ABCMeta):
def __init__(self, name: str, bucket: Bucket) -> None:
self.name = name
"""Name of this blob in the bucket."""
self.bucket = bucket
self._size_in_bytes: typing.Optional[int] = None
self.filename: str = None
"""Name of the file for the Content-Disposition header when downloading it."""
self._log = logging.getLogger(f'{__name__}.Blob')
def __repr__(self):
@ -132,19 +133,12 @@ class Blob(metaclass=abc.ABCMeta):
file_size=file_size)
@abc.abstractmethod
def update_filename(self, filename: str, *, is_attachment=True):
def update_filename(self, filename: str):
"""Sets the filename which is used when downloading the file.
Not all storage backends support this, and will use the on-disk filename instead.
"""
@abc.abstractmethod
def update_content_type(self, content_type: str, content_encoding: str = ''):
"""Set the content type (and optionally content encoding).
Not all storage backends support this.
"""
@abc.abstractmethod
def get_url(self, *, is_public: bool) -> str:
"""Returns the URL to access this blob.

View File

@ -174,7 +174,7 @@ class GoogleCloudStorageBlob(Blob):
self.gblob.reload()
self._size_in_bytes = self.gblob.size
def update_filename(self, filename: str, *, is_attachment=True):
def update_filename(self, filename: str):
"""Set the ContentDisposition metadata so that when a file is downloaded
it has a human-readable name.
"""
@ -182,17 +182,7 @@ class GoogleCloudStorageBlob(Blob):
if '"' in filename:
raise ValueError(f'Filename is not allowed to have double quote in it: {filename!r}')
if is_attachment:
self.gblob.content_disposition = f'attachment; filename="{filename}"'
else:
self.gblob.content_disposition = f'filename="{filename}"'
self.gblob.patch()
def update_content_type(self, content_type: str, content_encoding: str = ''):
"""Set the content type (and optionally content encoding)."""
self.gblob.content_type = content_type
self.gblob.content_encoding = content_encoding
self.gblob.content_disposition = f'attachment; filename="{filename}"'
self.gblob.patch()
def get_url(self, *, is_public: bool) -> str:

View File

@ -113,13 +113,10 @@ class LocalBlob(Blob):
self._size_in_bytes = file_size
def update_filename(self, filename: str, *, is_attachment=True):
def update_filename(self, filename: str):
# TODO: implement this for local storage.
self._log.info('update_filename(%r) not supported', filename)
def update_content_type(self, content_type: str, content_encoding: str = ''):
self._log.info('update_content_type(%r, %r) not supported', content_type, content_encoding)
def make_public(self):
# No-op on this storage backend.
pass

View File

@ -11,17 +11,26 @@ ATTACHMENT_SLUG_REGEX = r'[a-zA-Z0-9_\-]+'
attachments_embedded_schema = {
'type': 'dict',
'keysrules': {
# TODO: will be renamed to 'keyschema' in Cerberus 1.0
'keyschema': {
'type': 'string',
'regex': '^%s$' % ATTACHMENT_SLUG_REGEX,
},
'valuesrules': {
'valueschema': {
'type': 'dict',
'schema': {
'oid': {
'type': 'objectid',
'required': True,
},
'link': {
'type': 'string',
'allowed': ['self', 'none', 'custom'],
'default': 'self',
},
'link_custom': {
'type': 'string',
},
'collection': {
'type': 'string',
'allowed': ['files'],

View File

@ -1,15 +1,15 @@
from pillar.api.node_types import attachments_embedded_schema
from pillar.api.node_types.utils import markdown_fields
node_type_comment = {
'name': 'comment',
'description': 'Comments for asset nodes, pages, etc.',
'dyn_schema': {
# The actual comment content
**markdown_fields(
'content',
minlength=5,
required=True),
'content': {
'type': 'string',
'minlength': 5,
'required': True,
'validator': 'markdown',
},
'_content_html': {'type': 'string'},
'status': {
'type': 'string',
'allowed': [
@ -51,8 +51,7 @@ node_type_comment = {
}
},
'confidence': {'type': 'float'},
'is_reply': {'type': 'boolean'},
'attachments': attachments_embedded_schema,
'is_reply': {'type': 'boolean'}
},
'form_schema': {},
'parent': ['asset', 'comment'],

View File

@ -1,14 +1,17 @@
from pillar.api.node_types import attachments_embedded_schema
from pillar.api.node_types.utils import markdown_fields
node_type_post = {
'name': 'post',
'description': 'A blog post, for any project',
'dyn_schema': {
**markdown_fields('content',
minlength=5,
maxlength=90000,
required=True),
'content': {
'type': 'string',
'minlength': 5,
'maxlength': 90000,
'required': True,
'validator': 'markdown',
},
'_content_html': {'type': 'string'},
'status': {
'type': 'string',
'allowed': [

View File

@ -1,34 +0,0 @@
from pillar import markdown
def markdown_fields(field: str, **kwargs) -> dict:
"""
Creates a field for the markdown, and a field for the cached html.
Example usage:
schema = {'myDoc': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
**markdown_fields('content', required=True),
}
},
}}
:param field:
:return:
"""
cache_field = markdown.cache_field_name(field)
return {
field: {
'type': 'string',
**kwargs
},
cache_field: {
'type': 'string',
'readonly': True,
'default': field, # Name of the field containing the markdown. Will be input to the coerce function.
'coerce': 'markdown',
}
}

View File

@ -6,14 +6,14 @@ import pymongo.errors
import werkzeug.exceptions as wz_exceptions
from flask import current_app, Blueprint, request
from pillar.api.nodes import eve_hooks, comments, activities
from pillar.api.nodes import eve_hooks
from pillar.api.utils import str2id, jsonify
from pillar.api.utils.authorization import check_permissions, require_login
from pillar.web.utils import pretty_date
log = logging.getLogger(__name__)
blueprint = Blueprint('nodes_api', __name__)
ROLES_FOR_SHARING = ROLES_FOR_COMMENTING = {'subscriber', 'demo'}
ROLES_FOR_SHARING = {'subscriber', 'demo'}
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
@ -51,47 +51,6 @@ def share_node(node_id):
return jsonify(eve_hooks.short_link_info(short_code), status=status)
@blueprint.route('/<string(length=24):node_path>/comments', methods=['GET'])
def get_node_comments(node_path: str):
node_id = str2id(node_path)
return comments.get_node_comments(node_id)
@blueprint.route('/<string(length=24):node_path>/comments', methods=['POST'])
@require_login(require_roles=ROLES_FOR_COMMENTING)
def post_node_comment(node_path: str):
node_id = str2id(node_path)
msg = request.json['msg']
attachments = request.json.get('attachments', {})
return comments.post_node_comment(node_id, msg, attachments)
@blueprint.route('/<string(length=24):node_path>/comments/<string(length=24):comment_path>', methods=['PATCH'])
@require_login(require_roles=ROLES_FOR_COMMENTING)
def patch_node_comment(node_path: str, comment_path: str):
node_id = str2id(node_path)
comment_id = str2id(comment_path)
msg = request.json['msg']
attachments = request.json.get('attachments', {})
return comments.patch_node_comment(node_id, comment_id, msg, attachments)
@blueprint.route('/<string(length=24):node_path>/comments/<string(length=24):comment_path>/vote', methods=['POST'])
@require_login(require_roles=ROLES_FOR_COMMENTING)
def post_node_comment_vote(node_path: str, comment_path: str):
node_id = str2id(node_path)
comment_id = str2id(comment_path)
vote_str = request.json['vote']
vote = int(vote_str)
return comments.post_node_comment_vote(node_id, comment_id, vote)
@blueprint.route('/<string(length=24):node_path>/activities', methods=['GET'])
def activities_for_node(node_path: str):
node_id = str2id(node_path)
return jsonify(activities.for_node(node_id))
@blueprint.route('/tagged/')
@blueprint.route('/tagged/<tag>')
def tagged(tag=''):
@ -253,12 +212,14 @@ def setup_app(app, url_prefix):
app.on_fetched_resource_nodes += eve_hooks.before_returning_nodes
app.on_replace_nodes += eve_hooks.before_replacing_node
app.on_replace_nodes += eve_hooks.parse_markdown
app.on_replace_nodes += eve_hooks.texture_sort_files
app.on_replace_nodes += eve_hooks.deduct_content_type_and_duration
app.on_replace_nodes += eve_hooks.node_set_default_picture
app.on_replaced_nodes += eve_hooks.after_replacing_node
app.on_insert_nodes += eve_hooks.before_inserting_nodes
app.on_insert_nodes += eve_hooks.parse_markdowns
app.on_insert_nodes += eve_hooks.nodes_deduct_content_type_and_duration
app.on_insert_nodes += eve_hooks.nodes_set_default_picture
app.on_insert_nodes += eve_hooks.textures_sort_files
@ -270,5 +231,3 @@ def setup_app(app, url_prefix):
app.on_deleted_item_nodes += eve_hooks.after_deleting_node
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
activities.setup_app(app)

View File

@ -1,43 +0,0 @@
from eve.methods import get
import pillar.api.users.avatar
def for_node(node_id):
activities, _, _, status, _ =\
get('activities',
{
'$or': [
{'object_type': 'node',
'object': node_id},
{'context_object_type': 'node',
'context_object': node_id},
],
},)
for act in activities['_items']:
act['actor_user'] = _user_info(act['actor_user'])
return activities
def _user_info(user_id):
users, _, _, status, _ = get('users', {'_id': user_id})
if len(users['_items']) > 0:
user = users['_items'][0]
user['avatar'] = pillar.api.users.avatar.url(user)
public_fields = {'full_name', 'username', 'avatar'}
for field in list(user.keys()):
if field not in public_fields:
del user[field]
return user
return {}
def setup_app(app):
global _user_info
decorator = app.cache.memoize(timeout=300, make_name='%s.public_user_info' % __name__)
_user_info = decorator(_user_info)

View File

@ -1,302 +0,0 @@
import logging
from datetime import datetime
import pymongo
import typing
import bson
import attr
import werkzeug.exceptions as wz_exceptions
import pillar
from pillar import current_app, shortcodes
import pillar.api.users.avatar
from pillar.api.nodes.custom.comment import patch_comment
from pillar.api.utils import jsonify
from pillar.auth import current_user
import pillar.markdown
log = logging.getLogger(__name__)
@attr.s(auto_attribs=True)
class UserDO:
id: str
full_name: str
avatar_url: str
badges_html: str
@attr.s(auto_attribs=True)
class CommentPropertiesDO:
attachments: typing.Dict
rating_positive: int = 0
rating_negative: int = 0
@attr.s(auto_attribs=True)
class CommentDO:
id: bson.ObjectId
parent: bson.ObjectId
project: bson.ObjectId
user: UserDO
msg_html: str
msg_markdown: str
properties: CommentPropertiesDO
created: datetime
updated: datetime
etag: str
replies: typing.List['CommentDO'] = []
current_user_rating: typing.Optional[bool] = None
@attr.s(auto_attribs=True)
class CommentTreeDO:
node_id: bson.ObjectId
project: bson.ObjectId
nbr_of_comments: int = 0
comments: typing.List[CommentDO] = []
def _get_markdowned_html(document: dict, field_name: str) -> str:
cache_field_name = pillar.markdown.cache_field_name(field_name)
html = document.get(cache_field_name)
if html is None:
markdown_src = document.get(field_name) or ''
html = pillar.markdown.markdown(markdown_src)
return html
def jsonify_data_object(data_object: attr):
return jsonify(
attr.asdict(data_object,
recurse=True)
)
class CommentTreeBuilder:
def __init__(self, node_id: bson.ObjectId):
self.node_id = node_id
self.nbr_of_Comments: int = 0
def build(self) -> CommentTreeDO:
enriched_comments = self.child_comments(
self.node_id,
sort={'properties.rating_positive': pymongo.DESCENDING,
'_created': pymongo.DESCENDING})
project_id = self.get_project_id()
return CommentTreeDO(
node_id=self.node_id,
project=project_id,
nbr_of_comments=self.nbr_of_Comments,
comments=enriched_comments
)
def child_comments(self, node_id: bson.ObjectId, sort: dict) -> typing.List[CommentDO]:
raw_comments = self.mongodb_comments(node_id, sort)
return [self.enrich(comment) for comment in raw_comments]
def enrich(self, mongo_comment: dict) -> CommentDO:
self.nbr_of_Comments += 1
comment = to_comment_data_object(mongo_comment)
comment.replies = self.child_comments(mongo_comment['_id'],
sort={'_created': pymongo.ASCENDING})
return comment
def get_project_id(self):
nodes_coll = current_app.db('nodes')
result = nodes_coll.find_one({'_id': self.node_id})
return result['project']
@classmethod
def mongodb_comments(cls, node_id: bson.ObjectId, sort: dict) -> typing.Iterator:
nodes_coll = current_app.db('nodes')
return nodes_coll.aggregate([
{'$match': {'node_type': 'comment',
'_deleted': {'$ne': True},
'properties.status': 'published',
'parent': node_id}},
{'$lookup': {"from": "users",
"localField": "user",
"foreignField": "_id",
"as": "user"}},
{'$unwind': {'path': "$user"}},
{'$sort': sort},
])
def get_node_comments(node_id: bson.ObjectId):
comments_tree = CommentTreeBuilder(node_id).build()
return jsonify_data_object(comments_tree)
def post_node_comment(parent_id: bson.ObjectId, markdown_msg: str, attachments: dict):
parent_node = find_node_or_raise(parent_id,
'User %s tried to update comment with bad parent_id %s',
current_user.objectid,
parent_id)
is_reply = parent_node['node_type'] == 'comment'
comment = dict(
parent=parent_id,
project=parent_node['project'],
name='Comment',
user=current_user.objectid,
node_type='comment',
properties=dict(
content=markdown_msg,
status='published',
is_reply=is_reply,
confidence=0,
rating_positive=0,
rating_negative=0,
attachments=attachments,
),
permissions=dict(
users=[dict(
user=current_user.objectid,
methods=['PUT'])
]
)
)
r, _, _, status = current_app.post_internal('nodes', comment)
if status != 201:
log.warning('Unable to post comment on %s as %s: %s',
parent_id, current_user.objectid, r)
raise wz_exceptions.InternalServerError('Unable to create comment')
comment_do = get_comment(parent_id, r['_id'])
return jsonify_data_object(comment_do), 201
def find_node_or_raise(node_id, *args):
nodes_coll = current_app.db('nodes')
node_to_comment = nodes_coll.find_one({
'_id': node_id,
'_deleted': {'$ne': True},
})
if not node_to_comment:
log.warning(args)
raise wz_exceptions.UnprocessableEntity()
return node_to_comment
def patch_node_comment(parent_id: bson.ObjectId,
comment_id: bson.ObjectId,
markdown_msg: str,
attachments: dict):
_, _ = find_parent_and_comment_or_raise(parent_id, comment_id)
patch = dict(
op='edit',
content=markdown_msg,
attachments=attachments
)
json_result = patch_comment(comment_id, patch)
if json_result.json['result'] != 200:
raise wz_exceptions.InternalServerError('Failed to update comment')
comment_do = get_comment(parent_id, comment_id)
return jsonify_data_object(comment_do), 200
def find_parent_and_comment_or_raise(parent_id, comment_id):
parent = find_node_or_raise(parent_id,
'User %s tried to update comment with bad parent_id %s',
current_user.objectid,
parent_id)
comment = find_node_or_raise(comment_id,
'User %s tried to update comment with bad id %s',
current_user.objectid,
comment_id)
validate_comment_parent_relation(comment, parent)
return parent, comment
def validate_comment_parent_relation(comment, parent):
if comment['parent'] != parent['_id']:
log.warning('User %s tried to update comment with bad parent/comment pair.'
' parent_id: %s comment_id: %s',
current_user.objectid, parent['_id'], comment['_id'])
raise wz_exceptions.BadRequest()
def get_comment(parent_id: bson.ObjectId, comment_id: bson.ObjectId) -> CommentDO:
nodes_coll = current_app.db('nodes')
mongo_comment = list(nodes_coll.aggregate([
{'$match': {'node_type': 'comment',
'_deleted': {'$ne': True},
'properties.status': 'published',
'parent': parent_id,
'_id': comment_id}},
{'$lookup': {"from": "users",
"localField": "user",
"foreignField": "_id",
"as": "user"}},
{'$unwind': {'path': "$user"}},
]))[0]
return to_comment_data_object(mongo_comment)
def to_comment_data_object(mongo_comment: dict) -> CommentDO:
def current_user_rating():
if current_user.is_authenticated:
for rating in mongo_comment['properties'].get('ratings', ()):
if str(rating['user']) != current_user.objectid:
continue
return rating['is_positive']
return None
user_dict = mongo_comment['user']
user = UserDO(
id=str(mongo_comment['user']['_id']),
full_name=user_dict['full_name'],
avatar_url=pillar.api.users.avatar.url(user_dict),
badges_html=user_dict.get('badges', {}).get('html', '')
)
html = _get_markdowned_html(mongo_comment['properties'], 'content')
html = shortcodes.render_commented(html, context=mongo_comment['properties'])
return CommentDO(
id=mongo_comment['_id'],
parent=mongo_comment['parent'],
project=mongo_comment['project'],
user=user,
msg_html=html,
msg_markdown=mongo_comment['properties']['content'],
current_user_rating=current_user_rating(),
created=mongo_comment['_created'],
updated=mongo_comment['_updated'],
etag=mongo_comment['_etag'],
properties=CommentPropertiesDO(
attachments=mongo_comment['properties'].get('attachments', {}),
rating_positive=mongo_comment['properties']['rating_positive'],
rating_negative=mongo_comment['properties']['rating_negative']
)
)
def post_node_comment_vote(parent_id: bson.ObjectId, comment_id: bson.ObjectId, vote: int):
normalized_vote = min(max(vote, -1), 1)
_, _ = find_parent_and_comment_or_raise(parent_id, comment_id)
actions = {
1: 'upvote',
0: 'revoke',
-1: 'downvote',
}
patch = dict(
op=actions[normalized_vote]
)
json_result = patch_comment(comment_id, patch)
if json_result.json['_status'] != 'OK':
raise wz_exceptions.InternalServerError('Failed to vote on comment')
comment_do = get_comment(parent_id, comment_id)
return jsonify_data_object(comment_do), 200

View File

@ -5,7 +5,8 @@ import logging
from flask import current_app
import werkzeug.exceptions as wz_exceptions
from pillar.api.utils import authorization, authentication, jsonify, remove_private_keys
from pillar.api.utils import authorization, authentication, jsonify
from pillar.api.utils.rating import confidence
from . import register_patch_handler
@ -25,6 +26,13 @@ def patch_comment(node_id, patch):
assert patch['op'] == 'edit', 'Invalid patch operation %s' % patch['op']
result, node = edit_comment(user_id, node_id, patch)
# Calculate and update confidence.
rating_confidence = confidence(
node['properties']['rating_positive'], node['properties']['rating_negative'])
current_app.data.driver.db['nodes'].update_one(
{'_id': node_id},
{'$set': {'properties.confidence': rating_confidence}})
return jsonify({'_status': 'OK',
'result': result,
'properties': node['properties']
@ -135,7 +143,10 @@ def edit_comment(user_id, node_id, patch):
# we can pass this stuff to Eve's patch_internal; that way the validation &
# authorisation system has enough info to work.
nodes_coll = current_app.data.driver.db['nodes']
node = nodes_coll.find_one(node_id)
projection = {'user': 1,
'project': 1,
'node_type': 1}
node = nodes_coll.find_one(node_id, projection=projection)
if node is None:
log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
raise wz_exceptions.NotFound('Node %s not found' % node_id)
@ -143,14 +154,14 @@ def edit_comment(user_id, node_id, patch):
if node['user'] != user_id and not authorization.user_has_role('admin'):
raise wz_exceptions.Forbidden('You can only edit your own comments.')
node = remove_private_keys(node)
node['properties']['content'] = patch['content']
node['properties']['attachments'] = patch.get('attachments', {})
# Use Eve to PUT this node, as that also updates the etag and we want to replace attachments.
r, _, _, status = current_app.put_internal('nodes',
node,
concurrency_check=False,
_id=node_id)
# Use Eve to PATCH this node, as that also updates the etag.
r, _, _, status = current_app.patch_internal('nodes',
{'properties.content': patch['content'],
'project': node['project'],
'user': node['user'],
'node_type': node['node_type']},
concurrency_check=False,
_id=node_id)
if status != 200:
log.error('Error %i editing comment %s for user %s: %s',
status, node_id, user_id, r)

View File

@ -7,6 +7,7 @@ from bson import ObjectId
from werkzeug import exceptions as wz_exceptions
from pillar import current_app
import pillar.markdown
from pillar.api.activities import activity_subscribe, activity_object_add
from pillar.api.file_storage_backends.gcs import update_file_name
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
@ -122,49 +123,46 @@ def before_inserting_nodes(items):
item.setdefault('user', current_user.user_id)
def get_comment_verb_and_context_object_id(comment):
nodes_collection = current_app.data.driver.db['nodes']
verb = 'commented'
parent = nodes_collection.find_one({'_id': comment['parent']})
context_object_id = comment['parent']
while parent['node_type'] == 'comment':
# If the parent is a comment, we provide its own parent as
# context. We do this in order to point the user to an asset
# or group when viewing the notification.
verb = 'replied'
context_object_id = parent['parent']
parent = nodes_collection.find_one({'_id': parent['parent']})
return verb, context_object_id
def after_inserting_nodes(items):
for item in items:
context_object_id = None
# Skip subscriptions for first level items (since the context is not a
# node, but a project).
# TODO: support should be added for mixed context
if item['node_type'] in PILLAR_NAMED_NODE_TYPES:
activity_subscribe(item['user'], 'node', item['_id'])
verb = 'posted'
context_object_id = item.get('parent')
if item['node_type'] == 'comment':
# Always subscribe to the parent node
activity_subscribe(item['user'], 'node', item['parent'])
verb, context_object_id = get_comment_verb_and_context_object_id(item)
if 'parent' not in item:
return
context_object_id = item['parent']
if item['node_type'] == 'comment':
nodes_collection = current_app.data.driver.db['nodes']
parent = nodes_collection.find_one({'_id': item['parent']})
# Always subscribe to the parent node
activity_subscribe(item['user'], 'node', item['parent'])
if parent['node_type'] == 'comment':
# If the parent is a comment, we provide its own parent as
# context. We do this in order to point the user to an asset
# or group when viewing the notification.
verb = 'replied'
context_object_id = parent['parent']
# Subscribe to the parent of the parent comment (post or group)
activity_subscribe(item['user'], 'node', context_object_id)
if context_object_id and item['node_type'] in PILLAR_NAMED_NODE_TYPES:
# * Skip activity for first level items (since the context is not a
# node, but a project).
# * Don't automatically create activities for non-Pillar node types,
activity_subscribe(item['user'], 'node', parent['parent'])
else:
activity_subscribe(item['user'], 'node', item['_id'])
verb = 'commented'
elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
verb = 'posted'
activity_subscribe(item['user'], 'node', item['_id'])
else:
# Don't automatically create activities for non-Pillar node types,
# as we don't know what would be a suitable verb (among other things).
activity_object_add(
item['user'],
verb,
'node',
item['_id'],
'node',
context_object_id
)
continue
activity_object_add(
item['user'],
verb,
'node',
item['_id'],
'node',
context_object_id
)
def deduct_content_type_and_duration(node_doc, original=None):
@ -324,6 +322,46 @@ def textures_sort_files(nodes):
texture_sort_files(node)
def parse_markdown(node, original=None):
import copy
projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': node['project']}, {'node_types': 1})
# Query node type directly using the key
node_type = next(nt for nt in project['node_types']
if nt['name'] == node['node_type'])
# Create a copy to not overwrite the actual schema.
schema = copy.deepcopy(current_app.config['DOMAIN']['nodes']['schema'])
schema['properties'] = node_type['dyn_schema']
def find_markdown_fields(schema, node):
"""Find and process all makrdown validated fields."""
for k, v in schema.items():
if not isinstance(v, dict):
continue
if v.get('validator') == 'markdown':
# If there is a match with the validator: markdown pair, assign the sibling
# property (following the naming convention _<property>_html)
# the processed value.
if k in node:
html = pillar.markdown.markdown(node[k])
field_name = pillar.markdown.cache_field_name(k)
node[field_name] = html
if isinstance(node, dict) and k in node:
find_markdown_fields(v, node[k])
find_markdown_fields(schema, node)
return 'ok'
def parse_markdowns(items):
for item in items:
parse_markdown(item)
def short_link_info(short_code):
"""Returns the short link info in a dict."""

View File

@ -153,7 +153,7 @@ class OrgManager:
org_coll = current_app.db('organizations')
users_coll = current_app.db('users')
if users_coll.count_documents({'_id': user_id}) == 0:
if users_coll.count({'_id': user_id}) == 0:
raise ValueError('User not found')
self._log.info('Updating organization %s, setting admin user to %s', org_id, user_id)
@ -189,7 +189,7 @@ class OrgManager:
if user_doc is not None:
user_id = user_doc['_id']
if user_id and not users_coll.count_documents({'_id': user_id}):
if user_id and not users_coll.count({'_id': user_id}):
raise wz_exceptions.UnprocessableEntity('User does not exist')
self._log.info('Removing user %s / %s from organization %s', user_id, email, org_id)
@ -374,7 +374,7 @@ class OrgManager:
member_ids = [str2id(uid) for uid in member_sting_ids]
users_coll = current_app.db('users')
users = users_coll.find({'_id': {'$in': member_ids}},
projection={'_id': 1, 'full_name': 1, 'email': 1, 'avatar': 1})
projection={'_id': 1, 'full_name': 1, 'email': 1})
return list(users)
def user_has_organizations(self, user_id: bson.ObjectId) -> bool:
@ -385,7 +385,7 @@ class OrgManager:
org_coll = current_app.db('organizations')
org_count = org_coll.count_documents({'$or': [
org_count = org_coll.count({'$or': [
{'admin_uid': user_id},
{'members': user_id}
]})
@ -396,7 +396,7 @@ class OrgManager:
"""Return True iff the email is an unknown member of some org."""
org_coll = current_app.db('organizations')
org_count = org_coll.count_documents({'unknown_members': member_email})
org_count = org_coll.count({'unknown_members': member_email})
return bool(org_count)
def roles_for_ip_address(self, remote_addr: str) -> typing.Set[str]:

View File

@ -194,7 +194,7 @@ class OrganizationPatchHandler(patch_handler.AbstractPatchHandler):
self.log.info('User %s edits Organization %s: %s', current_user_id, org_id, update)
validator = current_app.validator_for_resource('organizations')
if not validator.validate_update(update, org_id, persisted_document={}):
if not validator.validate_update(update, org_id):
resp = jsonify({
'_errors': validator.errors,
'_message': ', '.join(f'{field}: {error}'

View File

@ -9,7 +9,6 @@ def setup_app(app, api_prefix):
app.on_replace_projects += hooks.override_is_private_field
app.on_replace_projects += hooks.before_edit_check_permissions
app.on_replace_projects += hooks.protect_sensitive_fields
app.on_replace_projects += hooks.parse_markdown
app.on_update_projects += hooks.override_is_private_field
app.on_update_projects += hooks.before_edit_check_permissions
@ -20,8 +19,6 @@ def setup_app(app, api_prefix):
app.on_insert_projects += hooks.before_inserting_override_is_private_field
app.on_insert_projects += hooks.before_inserting_projects
app.on_insert_projects += hooks.parse_markdowns
app.on_inserted_projects += hooks.after_inserting_projects
app.on_fetched_item_projects += hooks.before_returning_project_permissions

View File

@ -3,7 +3,6 @@ import logging
from flask import request, abort
import pillar
from pillar import current_app
from pillar.api.node_types.asset import node_type_asset
from pillar.api.node_types.comment import node_type_comment
@ -247,37 +246,3 @@ def project_node_type_has_method(response):
def projects_node_type_has_method(response):
for project in response['_items']:
project_node_type_has_method(project)
def parse_markdown(project, original=None):
schema = current_app.config['DOMAIN']['projects']['schema']
def find_markdown_fields(schema, project):
"""Find and process all Markdown coerced fields.
- look for fields with a 'coerce': 'markdown' property
- parse the name of the field and generate the sibling field name (_<field_name>_html -> <field_name>)
- parse the content of the <field_name> field as markdown and save it in _<field_name>_html
"""
for field_name, field_value in schema.items():
if not isinstance(field_value, dict):
continue
if field_value.get('coerce') != 'markdown':
continue
if field_name not in project:
continue
# Construct markdown source field name (strip the leading '_' and the trailing '_html')
source_field_name = field_name[1:-5]
html = pillar.markdown.markdown(project[source_field_name])
project[field_name] = html
if isinstance(project, dict) and field_name in project:
find_markdown_fields(field_value, project[field_name])
find_markdown_fields(schema, project)
def parse_markdowns(items):
for item in items:
parse_markdown(item)

View File

@ -25,11 +25,8 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
# Move the files first. Since this requires API calls to an external
# service, this is more likely to go wrong than moving the nodes.
query = {'project': pid_from}
to_move = files_coll.find(query, projection={'_id': 1})
to_move_count = files_coll.count_documents(query)
log.info('Moving %d files to project %s', to_move_count, pid_to)
to_move = files_coll.find({'project': pid_from}, projection={'_id': 1})
log.info('Moving %d files to project %s', to_move.count(), pid_to)
for file_doc in to_move:
fid = file_doc['_id']
log.debug('moving file %s to project %s', fid, pid_to)
@ -38,7 +35,7 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
# Mass-move the nodes.
etag = random_etag()
result = nodes_coll.update_many(
query,
{'project': pid_from},
{'$set': {'project': pid_to,
'_etag': etag,
'_updated': utcnow(),

View File

@ -5,7 +5,6 @@ from bson import ObjectId
from flask import Blueprint, request, current_app, make_response, url_for
from werkzeug import exceptions as wz_exceptions
import pillar.api.users.avatar
from pillar.api.utils import authorization, jsonify, str2id
from pillar.api.utils import mongo
from pillar.api.utils.authorization import require_login, check_permissions
@ -55,13 +54,10 @@ def project_manage_users():
project = projects_collection.find_one({'_id': ObjectId(project_id)})
admin_group_id = project['permissions']['groups'][0]['group']
users = list(users_collection.find(
users = users_collection.find(
{'groups': {'$in': [admin_group_id]}},
{'username': 1, 'email': 1, 'full_name': 1, 'avatar': 1}))
for user in users:
user['avatar_url'] = pillar.api.users.avatar.url(user)
user.pop('avatar', None)
return jsonify({'_status': 'OK', '_items': users})
{'username': 1, 'email': 1, 'full_name': 1})
return jsonify({'_status': 'OK', '_items': list(users)})
# The request is not a form, since it comes from the API sdk
data = json.loads(request.data)
@ -96,8 +92,8 @@ def project_manage_users():
action, current_user_id)
raise wz_exceptions.UnprocessableEntity()
users_collection.update_one({'_id': target_user_id},
{operation: {'groups': admin_group['_id']}})
users_collection.update({'_id': target_user_id},
{operation: {'groups': admin_group['_id']}})
user = users_collection.find_one({'_id': target_user_id},
{'username': 1, 'email': 1,
@ -145,3 +141,5 @@ def get_allowed_methods(project_id=None, node_type=None):
resp.status_code = 204
return resp

View File

@ -7,7 +7,6 @@ from werkzeug.exceptions import abort
from pillar import current_app
from pillar.auth import current_user
from pillar.api import file_storage_backends
log = logging.getLogger(__name__)
@ -156,18 +155,6 @@ def project_id(project_url: str) -> ObjectId:
return proj['_id']
def get_project_url(project_id: ObjectId) -> str:
"""Returns the project URL, or raises a ValueError when not found."""
proj_coll = current_app.db('projects')
proj = proj_coll.find_one({'_id': project_id, '_deleted': {'$ne': True}},
projection={'url': True})
if not proj:
raise ValueError(f'project with id={project_id} not found')
return proj['url']
def get_project(project_url: str) -> dict:
"""Find a project in the database, raises ValueError if not found.
@ -198,17 +185,5 @@ def put_project(project: dict):
result, _, _, status_code = current_app.put_internal('projects', proj_no_none, _id=pid)
if status_code != 200:
message = f"Can't update project {pid}, status {status_code} with issues: {result}"
log.error(message)
raise ValueError(message)
def storage(project_id: ObjectId) -> file_storage_backends.Bucket:
"""Return the storage bucket for this project.
For now this returns a bucket in the default storage backend, since
individual projects do not have a 'storage backend' setting (this is
set per file, not per project).
"""
return file_storage_backends.default_storage_backend(str(project_id))
raise ValueError(f"Can't update project {pid}, "
f"status {status_code} with issues: {result}")

View File

@ -49,12 +49,13 @@ def search_nodes():
result = queries.do_node_search(searchword, terms, page_idx, project_id)
return jsonify(result)
@blueprint_search.route('/multisearch', methods=['POST'])
@blueprint_search.route('/multisearch', methods=['GET'])
def multi_search_nodes():
import json
if len(request.args) != 1:
log.info(f'Expected 1 argument, received {len(request.args)}')
json_obj = request.json
json_obj = json.loads([a for a in request.args][0])
q = []
for row in json_obj:
q.append({

View File

@ -46,7 +46,6 @@ class SearchHelper:
created = {'_created': {'$gt': continue_from}}
return {'_deleted': {'$ne': True},
'node_type': {'$in': ['asset', 'post']},
'properties.status': {'$eq': 'published'},
'project': {'$in': self._project_ids},
**created,
}
@ -91,7 +90,7 @@ class SearchHelper:
def has_more(self, continue_from: datetime) -> bool:
nodes_coll = current_app.db('nodes')
result = nodes_coll.count_documents(self._match(continue_from))
result = nodes_coll.count(self._match(continue_from))
return bool(result)

View File

@ -61,9 +61,6 @@ def _update_search_user_changed_role(sender, user: dict):
def setup_app(app, api_prefix):
from pillar.api import service
from . import patch
patch.setup_app(app, url_prefix=api_prefix)
app.on_pre_GET_users += hooks.check_user_access
app.on_post_GET_users += hooks.post_GET_user

View File

@ -1,159 +0,0 @@
import functools
import io
import logging
import mimetypes
import typing
from bson import ObjectId
from eve.methods.get import getitem_internal
import flask
from pillar import current_app
from pillar.api import blender_id
from pillar.api.blender_cloud import home_project
import pillar.api.file_storage
from werkzeug.datastructures import FileStorage
log = logging.getLogger(__name__)
DEFAULT_AVATAR = 'assets/img/default_user_avatar.png'
def url(user: dict) -> str:
"""Return the avatar URL for this user.
:param user: dictionary from the MongoDB 'users' collection.
"""
assert isinstance(user, dict), f'user must be dict, not {type(user)}'
avatar_id = user.get('avatar', {}).get('file')
if not avatar_id:
return _default_avatar()
# The file may not exist, in which case we get an empty string back.
return pillar.api.file_storage.get_file_url(avatar_id) or _default_avatar()
@functools.lru_cache(maxsize=1)
def _default_avatar() -> str:
"""Return the URL path of the default avatar.
Doesn't change after the app has started, so we just cache it.
"""
return flask.url_for('static_pillar', filename=DEFAULT_AVATAR)
def _extension_for_mime(mime_type: str) -> str:
# Take the longest extension. I'd rather have '.jpeg' than the weird '.jpe'.
extensions: typing.List[str] = mimetypes.guess_all_extensions(mime_type)
try:
return max(extensions, key=len)
except ValueError:
# Raised when extensions is empty, e.g. when the mime type is unknown.
return ''
def _get_file_link(file_id: ObjectId) -> str:
# Get the file document via Eve to make it update the link.
file_doc, _, _, status = getitem_internal('files', _id=file_id)
assert status == 200
return file_doc['link']
def sync_avatar(user_id: ObjectId) -> str:
"""Fetch the user's avatar from Blender ID and save to storage.
Errors are logged but do not raise an exception.
:return: the link to the avatar, or '' if it was not processed.
"""
users_coll = current_app.db('users')
db_user = users_coll.find_one({'_id': user_id})
old_avatar_info = db_user.get('avatar', {})
if isinstance(old_avatar_info, ObjectId):
old_avatar_info = {'file': old_avatar_info}
home_proj = home_project.get_home_project(user_id)
if not home_project:
log.error('Home project of user %s does not exist, unable to store avatar', user_id)
return ''
bid_userid = blender_id.get_user_blenderid(db_user)
if not bid_userid:
log.error('User %s has no Blender ID user-id, unable to fetch avatar', user_id)
return ''
avatar_url = blender_id.avatar_url(bid_userid)
bid_session = blender_id.Session()
# Avoid re-downloading the same avatar.
request_headers = {}
if avatar_url == old_avatar_info.get('last_downloaded_url') and \
old_avatar_info.get('last_modified'):
request_headers['If-Modified-Since'] = old_avatar_info.get('last_modified')
log.info('Downloading avatar for user %s from %s', user_id, avatar_url)
resp = bid_session.get(avatar_url, headers=request_headers, allow_redirects=True)
if resp.status_code == 304:
# File was not modified, we can keep the old file.
log.debug('Avatar for user %s was not modified on Blender ID, not re-downloading', user_id)
return _get_file_link(old_avatar_info['file'])
resp.raise_for_status()
mime_type = resp.headers['Content-Type']
file_extension = _extension_for_mime(mime_type)
if not file_extension:
log.error('No file extension known for mime type %s, unable to handle avatar of user %s',
mime_type, user_id)
return ''
filename = f'avatar-{user_id}{file_extension}'
fake_local_file = io.BytesIO(resp.content)
fake_local_file.name = filename
# Act as if this file was just uploaded by the user, so we can reuse
# existing Pillar file-handling code.
log.debug("Uploading avatar for user %s to storage", user_id)
uploaded_file = FileStorage(
stream=fake_local_file,
filename=filename,
headers=resp.headers,
content_type=mime_type,
content_length=resp.headers['Content-Length'],
)
with pillar.auth.temporary_user(db_user):
upload_data = pillar.api.file_storage.upload_and_process(
fake_local_file,
uploaded_file,
str(home_proj['_id']),
# Disallow image processing, as it's a tiny file anyway and
# we'll just serve the original.
may_process_file=False,
)
file_id = ObjectId(upload_data['file_id'])
avatar_info = {
'file': file_id,
'last_downloaded_url': resp.url,
'last_modified': resp.headers.get('Last-Modified'),
}
# Update the user to store the reference to their avatar.
old_avatar_file_id = old_avatar_info.get('file')
update_result = users_coll.update_one({'_id': user_id},
{'$set': {'avatar': avatar_info}})
if update_result.matched_count == 1:
log.debug('Updated avatar for user ID %s to file %s', user_id, file_id)
else:
log.warning('Matched %d users while setting avatar for user ID %s to file %s',
update_result.matched_count, user_id, file_id)
if old_avatar_file_id:
current_app.delete_internal('files', _id=old_avatar_file_id)
return _get_file_link(file_id)

View File

@ -1,12 +1,13 @@
import copy
import json
import bson
from eve.utils import parse_request
from werkzeug import exceptions as wz_exceptions
from pillar import current_app
from pillar.api.users.routes import log
import pillar.api.users.avatar
from pillar.api.utils.authorization import user_has_role
import pillar.auth
USER_EDITABLE_FIELDS = {'full_name', 'username', 'email', 'settings'}
@ -125,7 +126,7 @@ def check_put_access(request, lookup):
raise wz_exceptions.Forbidden()
def after_fetching_user(user: dict) -> None:
def after_fetching_user(user):
# Deny access to auth block; authentication stuff is managed by
# custom end-points.
user.pop('auth', None)

View File

@ -1,45 +0,0 @@
"""User patching support."""
import logging
import bson
from flask import Blueprint
import werkzeug.exceptions as wz_exceptions
from pillar import current_app
from pillar.auth import current_user
from pillar.api.utils import authorization, jsonify, remove_private_keys
from pillar.api import patch_handler
log = logging.getLogger(__name__)
patch_api_blueprint = Blueprint('users.patch', __name__)
class UserPatchHandler(patch_handler.AbstractPatchHandler):
item_name = 'user'
@authorization.require_login()
def patch_set_username(self, user_id: bson.ObjectId, patch: dict):
"""Updates a user's username."""
if user_id != current_user.user_id:
log.info('User %s tried to change username of user %s',
current_user.user_id, user_id)
raise wz_exceptions.Forbidden('You may only change your own username')
new_username = patch['username']
log.info('User %s uses PATCH to set username to %r', current_user.user_id, new_username)
users_coll = current_app.db('users')
db_user = users_coll.find_one({'_id': user_id})
db_user['username'] = new_username
# Save via Eve to check the schema and trigger update hooks.
response, _, _, status = current_app.put_internal(
'users', remove_private_keys(db_user), _id=user_id)
return jsonify(response), status
def setup_app(app, url_prefix):
UserPatchHandler(patch_api_blueprint)
app.register_api_blueprint(patch_api_blueprint, url_prefix=url_prefix)

View File

@ -8,7 +8,6 @@ import logging
import random
import typing
import urllib.request, urllib.parse, urllib.error
import warnings
import bson.objectid
import bson.tz_util
@ -45,16 +44,10 @@ def remove_private_keys(document):
"""Removes any key that starts with an underscore, returns result as new
dictionary.
"""
def do_remove(doc):
for key in list(doc.keys()):
if key.startswith('_'):
del doc[key]
elif isinstance(doc[key], dict):
doc[key] = do_remove(doc[key])
return doc
doc_copy = copy.deepcopy(document)
do_remove(doc_copy)
for key in list(doc_copy.keys()):
if key.startswith('_'):
del doc_copy[key]
try:
del doc_copy['allowed_methods']
@ -64,7 +57,7 @@ def remove_private_keys(document):
return doc_copy
def pretty_duration(seconds: typing.Union[None, int, float]):
def pretty_duration(seconds):
if seconds is None:
return ''
seconds = round(seconds)
@ -76,27 +69,6 @@ def pretty_duration(seconds: typing.Union[None, int, float]):
return f'{minutes:02}:{seconds:02}'
def pretty_duration_fractional(seconds: typing.Union[None, int, float]):
if seconds is None:
return ''
# Remove fraction of seconds from the seconds so that the rest is done as integers.
seconds, fracs = divmod(seconds, 1)
hours, seconds = divmod(int(seconds), 3600)
minutes, seconds = divmod(seconds, 60)
msec = int(round(fracs * 1000))
if msec == 0:
msec_str = ''
else:
msec_str = f'.{msec:03}'
if hours > 0:
return f'{hours:02}:{minutes:02}:{seconds:02}{msec_str}'
else:
return f'{minutes:02}:{seconds:02}{msec_str}'
class PillarJSONEncoder(json.JSONEncoder):
"""JSON encoder with support for Pillar resources."""
@ -187,16 +159,6 @@ def str2id(document_id: str) -> bson.ObjectId:
def gravatar(email: str, size=64) -> typing.Optional[str]:
"""Deprecated: return the Gravatar URL.
.. deprecated::
Use of Gravatar is deprecated, in favour of our self-hosted avatars.
See pillar.api.users.avatar.url(user).
"""
warnings.warn('pillar.api.utils.gravatar() is deprecated, '
'use pillar.api.users.avatar.url() instead',
category=DeprecationWarning)
if email is None:
return None
@ -234,8 +196,7 @@ def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
function won't report differences between DoesNotExist, False, '', and 0.
"""
def is_private(key):
return str(key).startswith('_')
private_keys = {'_id', '_etag', '_deleted', '_updated', '_created'}
def combine_key(some_key):
"""Combine this key with the superkey.
@ -256,7 +217,7 @@ def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
if isinstance(doc1, dict) and isinstance(doc2, dict):
for key in set(doc1.keys()).union(set(doc2.keys())):
if is_private(key):
if key in private_keys:
continue
val1 = doc1.get(key, DoesNotExist)

View File

@ -60,7 +60,7 @@ def find_user_in_db(user_info: dict, provider='blender-id') -> dict:
email address.
Does NOT update the user in the database.
:param user_info: Information (id, email and full_name) from the auth provider
:param provider: One of the supported providers
"""
@ -169,6 +169,8 @@ def validate_this_token(token, oauth_subclient=None):
# Check the users to see if there is one with this Blender ID token.
db_token = find_token(token, oauth_subclient)
if not db_token:
log.debug('Token %r not found in our local database.', token)
# If no valid token is found in our local database, we issue a new
# request to the Blender ID server to verify the validity of the token
# passed via the HTTP header. We will get basic user info if the user
@ -375,10 +377,6 @@ def current_user():
def setup_app(app):
@app.before_request
def validate_token_at_each_request():
# Skip token validation if this is a static asset
# to avoid spamming Blender ID for no good reason
if request.path.startswith('/static/'):
return
validate_token()

View File

@ -331,9 +331,8 @@ def require_login(*, require_roles=set(),
def render_error() -> Response:
if error_view is None:
resp = Forbidden().get_response()
else:
resp = error_view()
abort(403)
resp: Response = error_view()
resp.status_code = 403
return resp

View File

@ -9,8 +9,12 @@ string = functools.partial(attr.ib, validator=attr.validators.instance_of(str))
def log(name):
"""Returns a logger
"""Returns a logger attr.ib
:param name: name to pass to logging.getLogger()
:rtype: attr.ib
"""
return logging.getLogger(name)
return attr.ib(default=logging.getLogger(name),
repr=False,
hash=False,
cmp=False)

View File

@ -1,14 +1,11 @@
"""Authentication code common to the web and api modules."""
import collections
import contextlib
import copy
import functools
import logging
import typing
import blinker
from bson import ObjectId
import bson
from flask import session, g
import flask_login
from werkzeug.local import LocalProxy
@ -34,22 +31,19 @@ class UserClass(flask_login.UserMixin):
def __init__(self, token: typing.Optional[str]):
# We store the Token instead of ID
self.id = token
self.auth_token = token
self.username: str = None
self.full_name: str = None
self.user_id: ObjectId = None
self.user_id: bson.ObjectId = None
self.objectid: str = None
self.gravatar: str = None
self.email: str = None
self.roles: typing.List[str] = []
self.groups: typing.List[str] = [] # NOTE: these are stringified object IDs.
self.group_ids: typing.List[ObjectId] = []
self.group_ids: typing.List[bson.ObjectId] = []
self.capabilities: typing.Set[str] = set()
self.nodes: dict = {} # see the 'nodes' key in eve_settings.py::user_schema.
self.badges_html: str = ''
# Stored when constructing a user from the database
self._db_user = {}
# Lazily evaluated
self._has_organizations: typing.Optional[bool] = None
@ -57,9 +51,10 @@ class UserClass(flask_login.UserMixin):
def construct(cls, token: str, db_user: dict) -> 'UserClass':
"""Constructs a new UserClass instance from a Mongo user document."""
from ..api import utils
user = cls(token)
user._db_user = copy.deepcopy(db_user)
user.user_id = db_user.get('_id')
user.roles = db_user.get('roles') or []
user.group_ids = db_user.get('groups') or []
@ -68,13 +63,14 @@ class UserClass(flask_login.UserMixin):
user.full_name = db_user.get('full_name') or ''
user.badges_html = db_user.get('badges', {}).get('html') or ''
# Be a little more specific than just db_user['nodes'] or db_user['avatar']
# Be a little more specific than just db_user['nodes']
user.nodes = {
'view_progress': db_user.get('nodes', {}).get('view_progress', {}),
}
# Derived properties
user.objectid = str(user.user_id or '')
user.gravatar = utils.gravatar(user.email)
user.groups = [str(g) for g in user.group_ids]
user.collect_capabilities()
@ -167,31 +163,6 @@ class UserClass(flask_login.UserMixin):
return bool(self._has_organizations)
def frontend_info(self) -> dict:
"""Return a dictionary of user info for injecting into the page."""
return {
'user_id': str(self.user_id),
'username': self.username,
'full_name': self.full_name,
'avatar_url': self.avatar_url,
'email': self.email,
'capabilities': list(self.capabilities),
'badges_html': self.badges_html,
'is_authenticated': self.is_authenticated,
}
@property
@functools.lru_cache(maxsize=1)
def avatar_url(self) -> str:
"""Return the Avatar image URL for this user.
:return: The avatar URL (the default one if the user has no avatar).
"""
import pillar.api.users.avatar
return pillar.api.users.avatar.url(self._db_user)
class AnonymousUser(flask_login.AnonymousUserMixin, UserClass):
def __init__(self):
@ -275,25 +246,6 @@ def logout_user():
g.current_user = AnonymousUser()
@contextlib.contextmanager
def temporary_user(db_user: dict):
"""Temporarily sets the given user as 'current user'.
Does not trigger login signals, as this is not a real login action.
"""
try:
actual_current_user = g.current_user
except AttributeError:
actual_current_user = AnonymousUser()
temp_user = UserClass.construct('', db_user)
try:
g.current_user = temp_user
yield
finally:
g.current_user = actual_current_user
def get_blender_id_oauth_token() -> str:
"""Returns the Blender ID auth token, or an empty string if there is none."""

View File

@ -1,48 +0,0 @@
"""Support for adding CORS headers to responses."""
import functools
import flask
import werkzeug.wrappers as wz_wrappers
import werkzeug.exceptions as wz_exceptions
def allow(*, allow_credentials=False):
"""Flask endpoint decorator, adds CORS headers to the response.
If the request has a non-empty 'Origin' header, the response header
'Access-Control-Allow-Origin' is set to the value of that request header,
and some other CORS headers are set.
"""
def decorator(wrapped):
@functools.wraps(wrapped)
def wrapper(*args, **kwargs):
request_origin = flask.request.headers.get('Origin')
if not request_origin:
# No CORS headers requested, so don't bother touching the response.
return wrapped(*args, **kwargs)
try:
response = wrapped(*args, **kwargs)
except wz_exceptions.HTTPException as ex:
response = ex.get_response()
else:
if isinstance(response, tuple):
response = flask.make_response(*response)
elif isinstance(response, str):
response = flask.make_response(response)
elif isinstance(response, wz_wrappers.Response):
pass
else:
raise TypeError(f'unknown response type {type(response)}')
assert isinstance(response, wz_wrappers.Response)
response.headers.set('Access-Control-Allow-Origin', request_origin)
response.headers.set('Access-Control-Allow-Headers', 'x-requested-with')
if allow_credentials:
response.headers.set('Access-Control-Allow-Credentials', 'true')
return response
return wrapper
return decorator

View File

@ -1,29 +0,0 @@
"""Avatar synchronisation.
Note that this module can only be imported when an application context is
active. Best to late-import this in the functions where it's needed.
"""
import logging
from bson import ObjectId
import celery
from pillar import current_app
from pillar.api.users.avatar import sync_avatar
log = logging.getLogger(__name__)
@current_app.celery.task(bind=True, ignore_result=True, acks_late=True)
def sync_avatar_for_user(self: celery.Task, user_id: str):
"""Downloads the user's avatar from Blender ID."""
# WARNING: when changing the signature of this function, also change the
# self.retry() call below.
uid = ObjectId(user_id)
try:
sync_avatar(uid)
except (IOError, OSError):
log.exception('Error downloading Blender ID avatar for user %s, will retry later')
self.retry((user_id, ), countdown=current_app.config['AVATAR_DOWNLOAD_CELERY_RETRY'])

View File

@ -1,9 +1,7 @@
import collections
import copy
import datetime
import json
import logging
from pathlib import PurePosixPath, Path
from pathlib import PurePosixPath
import re
import typing
@ -14,7 +12,6 @@ from flask_script import Manager
import pymongo
from pillar import current_app
import pillar.api.utils
# Collections to skip when finding file references (during orphan file detection).
# This collection can be added to from PillarExtension.setup_app().
@ -306,7 +303,7 @@ def purge_home_projects(go=False):
yield pid
continue
if users_coll.count_documents({'_id': uid, '_deleted': {'$ne': True}}) == 0:
if users_coll.find({'_id': uid, '_deleted': {'$ne': True}}).count() == 0:
log.info('Project %s has non-existing owner %s', pid, uid)
bad += 1
yield pid
@ -727,7 +724,7 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
to_visit.append((subdoc, definition['schema']))
continue
coerce = definition.get('coerce') # Eve < 0.8
validator = definition.get('check_with') or definition.get('validator') # Eve >= 0.8
validator = definition.get('validator') # Eve >= 0.8
if coerce != 'markdown' and validator != 'markdown':
continue
@ -739,6 +736,113 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
doc[key] = new_value
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
help='Project URL')
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
help='Replace on all projects.')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def upgrade_attachment_usage(proj_url=None, all_projects=False, go=False):
"""Replaces '@[slug]' with '{attachment slug}'.
Also moves links from the attachment dict to the attachment shortcode.
"""
if bool(proj_url) == all_projects:
log.error('Use either --project or --all.')
return 1
import html
from pillar.api.projects.utils import node_type_dict
from pillar.api.utils import remove_private_keys
from pillar.api.utils.authentication import force_cli_user
force_cli_user()
nodes_coll = current_app.db('nodes')
total_nodes = 0
failed_node_ids = set()
# Use a mixture of the old slug RE that still allowes spaces in the slug
# name and the new RE that allows dashes.
old_slug_re = re.compile(r'@\[([a-zA-Z0-9_\- ]+)\]')
for proj in _db_projects(proj_url, all_projects, go=go):
proj_id = proj['_id']
proj_url = proj.get('url', '-no-url-')
nodes = nodes_coll.find({
'_deleted': {'$ne': True},
'project': proj_id,
'properties.attachments': {'$exists': True},
})
node_count = nodes.count()
if node_count == 0:
log.debug('Skipping project %s (%s)', proj_url, proj_id)
continue
proj_node_types = node_type_dict(proj)
for node in nodes:
attachments = node['properties']['attachments']
replaced = False
# Inner functions because of access to the node's attachments.
def replace(match):
nonlocal replaced
slug = match.group(1)
log.debug(' - OLD STYLE attachment slug %r', slug)
try:
att = attachments[slug]
except KeyError:
log.info("Attachment %r not found for node %s", slug, node['_id'])
link = ''
else:
link = att.get('link', '')
if link == 'self':
link = " link='self'"
elif link == 'custom':
url = att.get('link_custom')
if url:
link = " link='%s'" % html.escape(url)
replaced = True
return '{attachment %r%s}' % (slug.replace(' ', '-'), link)
def update_markdown(value: str) -> str:
return old_slug_re.sub(replace, value)
iter_markdown(proj_node_types, node, update_markdown)
# Remove no longer used properties from attachments
new_attachments = {}
for slug, attachment in attachments.items():
replaced |= 'link' in attachment # link_custom implies link
attachment.pop('link', None)
attachment.pop('link_custom', None)
new_attachments[slug.replace(' ', '-')] = attachment
node['properties']['attachments'] = new_attachments
if replaced:
total_nodes += 1
else:
# Nothing got replaced,
continue
if go:
# Use Eve to PUT, so we have schema checking.
db_node = remove_private_keys(node)
r, _, _, status = current_app.put_internal('nodes', db_node, _id=node['_id'])
if status != 200:
log.error('Error %i storing altered node %s %s', status, node['_id'], r)
failed_node_ids.add(node['_id'])
# raise SystemExit('Error storing node; see log.')
log.debug('Updated node %s: %s', node['_id'], r)
log.info('Project %s (%s) has %d nodes with attachments',
proj_url, proj_id, node_count)
log.info('%s %d nodes', 'Updated' if go else 'Would update', total_nodes)
if failed_node_ids:
log.warning('Failed to update %d of %d nodes: %s', len(failed_node_ids), total_nodes,
', '.join(str(nid) for nid in failed_node_ids))
def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool) \
-> typing.Iterable[dict]:
"""Yields a subset of the projects in the database.
@ -778,38 +882,14 @@ def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool)
log.info('Command took %s', duration)
def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]:
"""Generator, yields all ObjectIDs referenced by the given object.
Assumes 'something' comes from a MongoDB. This function wasn't made for
generic Python objects.
"""
if isinstance(something, bson.ObjectId):
yield something
elif isinstance(something, str) and len(something) == 24:
try:
yield bson.ObjectId(something)
except (bson.objectid.InvalidId, TypeError):
# It apparently wasn't an ObjectID after all.
pass
elif isinstance(something, (list, set, tuple)):
for item in something:
yield from find_object_ids(item)
elif isinstance(something, dict):
for item in something.keys():
yield from find_object_ids(item)
for item in something.values():
yield from find_object_ids(item)
def _find_orphan_files() -> typing.Set[bson.ObjectId]:
"""Finds all non-referenced files.
"""Finds all non-referenced files for the given project.
Returns an iterable of all orphan file IDs.
"""
log.debug('Finding orphan files')
# Get all file IDs and make a set; we'll remove any referenced object ID later.
# Get all file IDs that belong to this project.
files_coll = current_app.db('files')
cursor = files_coll.find({'_deleted': {'$ne': True}}, projection={'_id': 1})
file_ids = {doc['_id'] for doc in cursor}
@ -820,10 +900,26 @@ def _find_orphan_files() -> typing.Set[bson.ObjectId]:
total_file_count = len(file_ids)
log.debug('Found %d files in total', total_file_count)
def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]:
if isinstance(something, bson.ObjectId):
yield something
elif isinstance(something, str) and len(something) == 24:
try:
yield bson.ObjectId(something)
except (bson.objectid.InvalidId, TypeError):
# It apparently wasn't an ObjectID after all.
pass
elif isinstance(something, (list, set, tuple)):
for item in something:
yield from find_object_ids(item)
elif isinstance(something, dict):
for item in something.values():
yield from find_object_ids(item)
# Find all references by iterating through the project itself and every document that has a
# 'project' key set to this ObjectId.
db = current_app.db()
for coll_name in sorted(db.list_collection_names()):
for coll_name in sorted(db.collection_names(include_system_collections=False)):
if coll_name in ORPHAN_FINDER_SKIP_COLLECTIONS:
continue
@ -849,6 +945,7 @@ def find_orphan_files():
This is a heavy operation that inspects *everything* in MongoDB. Use with care.
"""
from jinja2.filters import do_filesizeformat
from pathlib import Path
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
if output_fpath.exists():
@ -894,6 +991,7 @@ def delete_orphan_files():
Use 'find_orphan_files' first to generate orphan-files.txt.
"""
import pymongo.results
from pathlib import Path
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
with output_fpath.open('r', encoding='ascii') as infile:
@ -932,6 +1030,7 @@ def find_video_files_without_duration():
This is a heavy operation. Use with care.
"""
from pathlib import Path
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_files_without_duration.txt'
if output_fpath.exists():
@ -963,13 +1062,13 @@ def find_video_files_without_duration():
with output_fpath.open('w', encoding='ascii') as outfile:
outfile.write('\n'.join(sorted(file_ids)))
@manager_maintenance.command
def find_video_nodes_without_duration():
"""Finds video nodes without any duration
This is a heavy operation. Use with care.
"""
from pathlib import Path
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_nodes_without_duration.txt'
if output_fpath.exists():
@ -1041,8 +1140,7 @@ def reconcile_node_video_duration(nodes_to_update=None, all_nodes=False, go=Fals
{'$unwind': '$_files.variations'},
{'$match': {'_files.variations.duration': {'$gt': 0}}},
{'$addFields': {
'need_update': {
'$ne': ['$_files.variations.duration', '$properties.duration_seconds']}
'need_update': {'$ne': ['$_files.variations.duration', '$properties.duration_seconds']}
}},
{'$match': {'need_update': True}},
{'$project': {
@ -1077,257 +1175,3 @@ def reconcile_node_video_duration(nodes_to_update=None, all_nodes=False, go=Fals
duration = end_timestamp - start_timestamp
log.info('Operation took %s', duration)
return 0
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def delete_projectless_files(go=False):
"""Soft-deletes file documents of projects that have been deleted.
WARNING: this also soft-deletes file documents that do not have a project
property at all.
"""
start_timestamp = datetime.datetime.now()
files_coll = current_app.db('files')
aggr = files_coll.aggregate([
{'$match': {'_deleted': {'$ne': True}}},
{'$lookup': {
'from': 'projects',
'localField': 'project',
'foreignField': '_id',
'as': '_project'
}},
{'$match': {'$or': [
{'_project': []},
{'_project._deleted': True},
]}},
{'$project': {'_id': True}},
])
files_to_delete: typing.List[ObjectId] = [doc['_id'] for doc in aggr]
orphan_count = len(files_to_delete)
log.info('Total number of files to soft-delete: %d', orphan_count)
total_count = files_coll.count_documents({'_deleted': {'$ne': True}})
log.info('Total nr of orphan files: %d', orphan_count)
log.info('Total nr of files : %d', total_count)
log.info('Orphan percentage : %d%%', 100 * orphan_count / total_count)
if go:
log.info('Soft-deleting all %d projectless files', orphan_count)
now = pillar.api.utils.utcnow()
etag = pillar.api.utils.random_etag()
result = files_coll.update_many(
{'_id': {'$in': files_to_delete}},
{'$set': {
'_deleted': True,
'_updated': now,
'_etag': etag,
}},
)
log.info('Matched count: %d', result.matched_count)
log.info('Modified count: %d', result.modified_count)
end_timestamp = datetime.datetime.now()
duration = end_timestamp - start_timestamp
if go:
verb = 'Soft-deleting'
else:
verb = 'Finding'
log.info('%s orphans took %s', verb, duration)
@manager_maintenance.command
def find_projects_for_files():
"""For file documents without project, tries to find in which project files are used.
This is a heavy operation that inspects *everything* in MongoDB. Use with care.
"""
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'files-without-project.json'
if output_fpath.exists():
log.error('Output filename %s already exists, remove it first.', output_fpath)
return 1
start_timestamp = datetime.datetime.now()
log.info('Finding files to fix...')
files_coll = current_app.db('files')
query = {'project': {'$exists': False},
'_deleted': {'$ne': True}}
files_to_fix = {file_doc['_id']: None for file_doc in files_coll.find(query)}
if not files_to_fix:
log.info('No files without projects found, congratulations.')
return 0
# Find all references by iterating through every node and project, and
# hoping that they reference the file.
projects_coll = current_app.db('projects')
existing_projects: typing.MutableSet[ObjectId] = set()
for doc in projects_coll.find():
project_id = doc['_id']
existing_projects.add(project_id)
for obj_id in find_object_ids(doc):
if obj_id not in files_to_fix:
continue
files_to_fix[obj_id] = project_id
nodes_coll = current_app.db('nodes')
for doc in nodes_coll.find():
project_id = doc.get('project')
if not project_id:
log.warning('Skipping node %s, as it is not part of any project', doc['_id'])
continue
if project_id not in existing_projects:
log.warning('Skipping node %s, as its project %s does not exist',
doc['_id'], project_id)
continue
for obj_id in find_object_ids(doc):
if obj_id not in files_to_fix:
continue
files_to_fix[obj_id] = project_id
orphans = {oid for oid, project_id in files_to_fix.items()
if project_id is None}
fixable = {str(oid): str(project_id)
for oid, project_id in files_to_fix.items()
if project_id is not None}
log.info('Total nr of orphan files : %d', len(orphans))
log.info('Total nr of fixable files: %d', len(fixable))
projects = set(fixable.values())
log.info('Fixable project count : %d', len(projects))
for project_id in projects:
project = projects_coll.find_one(ObjectId(project_id))
log.info(' - %40s /p/%-20s created on %s, ',
project['name'], project['url'], project['_created'])
end_timestamp = datetime.datetime.now()
duration = end_timestamp - start_timestamp
log.info('Finding projects took %s', duration)
log.info('Writing {file_id: project_id} mapping to %s', output_fpath)
with output_fpath.open('w', encoding='ascii') as outfile:
json.dump(fixable, outfile, indent=4, sort_keys=True)
@manager_maintenance.option('filepath', type=Path,
help='JSON file produced by find_projects_for_files')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def fix_projects_for_files(filepath: Path, go=False):
"""Assigns file documents to projects.
Use 'manage.py maintenance find_projects_for_files` to produce the JSON
file that contains the file ID to project ID mapping.
"""
log.info('Loading %s', filepath)
with filepath.open('r', encoding='ascii') as infile:
mapping: typing.Mapping[str, str] = json.load(infile)
# Group IDs per project for more efficient querying.
log.info('Grouping per project')
project_to_file_ids: typing.Mapping[ObjectId, typing.List[ObjectId]] = \
collections.defaultdict(list)
for file_id, project_id in mapping.items():
project_to_file_ids[ObjectId(project_id)].append(ObjectId(file_id))
MockUpdateResult = collections.namedtuple('MockUpdateResult', 'matched_count modified_count')
files_coll = current_app.db('files')
total_matched = total_modified = 0
for project_oid, file_oids in project_to_file_ids.items():
query = {'_id': {'$in': file_oids}}
if go:
result = files_coll.update_many(query, {'$set': {'project': project_oid}})
else:
found = files_coll.count_documents(query)
result = MockUpdateResult(found, 0)
total_matched += result.matched_count
total_modified += result.modified_count
if result.matched_count != len(file_oids):
log.warning('Matched only %d of %d files; modified %d; for project %s',
result.matched_count, len(file_oids), result.modified_count, project_oid)
else:
log.info('Matched all %d files; modified %d; for project %s',
result.matched_count, result.modified_count, project_oid)
log.info('Done updating %d files (found %d, modified %d) on %d projects',
len(mapping), total_matched, total_modified, len(project_to_file_ids))
@manager_maintenance.option('-u', '--user', dest='user', nargs='?',
help='Update subscriptions for single user.')
@manager_maintenance.option('-o', '--object', dest='context_object', nargs='?',
help='Update subscriptions for context_object.')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def fix_missing_activities_subscription_defaults(user=None, context_object=None, go=False):
"""Assign default values to activities-subscriptions documents where values are missing.
"""
subscriptions_collection = current_app.db('activities-subscriptions')
lookup_is_subscribed = {
'is_subscribed': {'$exists': False},
}
lookup_notifications = {
'notifications.web': {'$exists': False},
}
if user:
lookup_is_subscribed['user'] = ObjectId(user)
lookup_notifications['user'] = ObjectId(user)
if context_object:
lookup_is_subscribed['context_object'] = ObjectId(context_object)
lookup_notifications['context_object'] = ObjectId(context_object)
num_need_is_subscribed_update = subscriptions_collection.count_documents(lookup_is_subscribed)
log.info("Found %d documents that needs to be update 'is_subscribed'", num_need_is_subscribed_update)
num_need_notification_web_update = subscriptions_collection.count_documents(lookup_notifications)
log.info("Found %d documents that needs to be update 'notifications.web'", num_need_notification_web_update)
if not go:
return
if num_need_is_subscribed_update > 0:
log.info("Updating 'is_subscribed'")
resp = subscriptions_collection.update_many(
lookup_is_subscribed,
{
'$set': {'is_subscribed': True}
},
upsert=False
)
if resp.modified_count != num_need_is_subscribed_update:
log.warning("Expected % documents to be update, was %d",
num_need_is_subscribed_update, resp['nModified'])
if num_need_notification_web_update > 0:
log.info("Updating 'notifications.web'")
resp = subscriptions_collection.update_many(
lookup_notifications,
{
'$set': {'notifications.web': True}
},
upsert=False
)
if resp.modified_count != num_need_notification_web_update:
log.warning("Expected % documents to be update, was %d",
num_need_notification_web_update, resp['nModified'])
log.info("Done updating 'activities-subscriptions' documents")

View File

@ -165,6 +165,49 @@ def merge_project(src_proj_url, dest_proj_url):
log.info('Done moving.')
@manager_operations.command
def index_users_rebuild():
"""Clear users index, update settings and reindex all users."""
import concurrent.futures
from pillar.api.utils.algolia import algolia_index_user_save
users_index = current_app.algolia_index_users
if users_index is None:
log.error('Algolia is not configured properly, unable to do anything!')
return 1
log.info('Dropping existing index: %s', users_index)
users_index.clear_index()
index_users_update_settings()
db = current_app.db()
users = db['users'].find({'_deleted': {'$ne': True}})
user_count = users.count()
log.info('Reindexing all %i users', user_count)
real_current_app = current_app._get_current_object()._get_current_object()
def do_user(user):
with real_current_app.app_context():
algolia_index_user_save(user)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
future_to_user = {executor.submit(do_user, user): user
for user in users}
for idx, future in enumerate(concurrent.futures.as_completed(future_to_user)):
user = future_to_user[future]
user_ident = user.get('email') or user.get('_id')
try:
future.result()
except Exception:
log.exception('Error updating user %i/%i %s', idx + 1, user_count, user_ident)
else:
log.info('Updated user %i/%i %s', idx + 1, user_count, user_ident)
@manager_operations.command
def index_users_update_settings():
"""Configure indexing backend as required by the project"""
@ -191,7 +234,7 @@ def hash_auth_tokens():
tokens_coll = current_app.db('tokens')
query = {'token': {'$exists': True}}
cursor = tokens_coll.find(query, projection={'token': 1, '_id': 1})
log.info('Updating %d tokens', tokens_coll.count_documents(query))
log.info('Updating %d tokens', cursor.count())
for token_doc in cursor:
hashed_token = hash_auth_token(token_doc['token'])

View File

@ -195,7 +195,7 @@ BLENDER_CLOUD_ADDON_VERSION = '1.4'
TLS_CERT_FILE = requests.certs.where()
CELERY_BACKEND = 'redis://redis/1'
CELERY_BROKER = 'redis://redis/2'
CELERY_BROKER = 'amqp://guest:guest@rabbit//'
# This configures the Celery task scheduler in such a way that we don't
# have to import the pillar.celery.XXX modules. Remember to run
@ -217,8 +217,6 @@ CELERY_BEAT_SCHEDULE = {
# TODO(Sybren): A proper value should be determined after we actually have users with badges.
BLENDER_ID_BADGE_EXPIRY = datetime.timedelta(hours=4)
# How many times the Celery task for downloading an avatar is retried.
AVATAR_DOWNLOAD_CELERY_RETRY = 3
# Mapping from user role to capabilities obtained by users with that role.
USER_CAPABILITIES = defaultdict(**{

View File

@ -4,7 +4,7 @@ This is for user-generated stuff, like comments.
"""
import bleach
import commonmark
import CommonMark
from . import shortcodes
@ -44,7 +44,7 @@ ALLOWED_STYLES = [
def markdown(s: str) -> str:
commented_shortcodes = shortcodes.comment_shortcodes(s)
tainted_html = commonmark.commonmark(commented_shortcodes)
tainted_html = CommonMark.commonmark(commented_shortcodes)
# Create a Cleaner that supports parsing of bare links (see filters).
cleaner = bleach.Cleaner(tags=ALLOWED_TAGS,

View File

@ -174,10 +174,6 @@ class AbstractPillarTest(TestMinimal):
for modname in remove:
del sys.modules[modname]
def url_for(self, endpoint, **values):
with self.app.app_context():
return flask.url_for(endpoint, **values)
def ensure_file_exists(self, file_overrides=None, *, example_file=None) -> (ObjectId, dict):
if example_file is None:
example_file = ctd.EXAMPLE_FILE
@ -355,15 +351,13 @@ class AbstractPillarTest(TestMinimal):
# TODO: rename to 'create_auth_token' now that 'expire_in_days' can be negative.
def create_valid_auth_token(self,
user_id: typing.Union[str, ObjectId],
user_id: ObjectId,
token='token',
*,
oauth_scopes: typing.Optional[typing.List[str]]=None,
expire_in_days=1) -> dict:
from pillar.api.utils import utcnow
if isinstance(user_id, str):
user_id = ObjectId(user_id)
future = utcnow() + datetime.timedelta(days=expire_in_days)
with self.app.test_request_context():

View File

@ -73,9 +73,9 @@ EXAMPLE_PROJECT = {
'nodes_featured': [],
'nodes_latest': [],
'permissions': {'groups': [{'group': EXAMPLE_ADMIN_GROUP_ID,
'methods': ['GET', 'POST', 'PUT', 'DELETE']}],
'users': [],
'world': ['GET']},
'methods': ['GET', 'POST', 'PUT', 'DELETE']}],
'users': [],
'world': ['GET']},
'picture_header': ObjectId('5673f260c379cf0007b31bc4'),
'picture_square': ObjectId('5673f256c379cf0007b31bc3'),
'status': 'published',

View File

@ -1,6 +1,7 @@
"""Our custom Jinja filters and other template stuff."""
import functools
import json
import logging
import typing
import urllib.parse
@ -10,8 +11,6 @@ import flask_login
import jinja2.filters
import jinja2.utils
import werkzeug.exceptions as wz_exceptions
from werkzeug.local import LocalProxy
import pillarsdk
import pillar.api.utils
@ -35,10 +34,6 @@ def format_pretty_duration(s):
return pretty_duration(s)
def format_pretty_duration_fractional(s):
return pillar.api.utils.pretty_duration_fractional(s)
def format_undertitle(s):
"""Underscore-replacing title filter.
@ -211,23 +206,16 @@ def do_yesno(value, arg=None):
return no
def do_json(some_object: typing.Any) -> str:
import pillar.auth
if isinstance(some_object, LocalProxy):
return do_json(some_object._get_current_object())
def do_json(some_object) -> str:
if isinstance(some_object, pillarsdk.Resource):
some_object = some_object.to_dict()
if isinstance(some_object, pillar.auth.UserClass):
some_object = some_object.frontend_info()
return pillar.api.utils.dumps(some_object)
return json.dumps(some_object)
def setup_jinja_env(jinja_env, app_config: dict):
jinja_env.filters['pretty_date'] = format_pretty_date
jinja_env.filters['pretty_date_time'] = format_pretty_date_time
jinja_env.filters['pretty_duration'] = format_pretty_duration
jinja_env.filters['pretty_duration_fractional'] = format_pretty_duration_fractional
jinja_env.filters['undertitle'] = format_undertitle
jinja_env.filters['hide_none'] = do_hide_none
jinja_env.filters['pluralize'] = do_pluralize

View File

@ -1,6 +1,5 @@
import logging
import urllib.parse
import warnings
from pillarsdk import Node
from flask import Blueprint
@ -8,6 +7,7 @@ from flask import current_app
from flask import render_template
from flask import redirect
from flask import request
from werkzeug.contrib.atom import AtomFeed
from pillar.flask_extra import ensure_schema
from pillar.web.utils import system_util
@ -91,11 +91,6 @@ def error_403():
@blueprint.route('/feeds/blogs.atom')
def feeds_blogs():
"""Global feed generator for latest blogposts across all projects"""
# Werkzeug deprecated their Atom feed. Tracked in https://developer.blender.org/T65274.
with warnings.catch_warnings():
from werkzeug.contrib.atom import AtomFeed
@current_app.cache.cached(60*5)
def render_page():
feed = AtomFeed('Blender Cloud - Latest updates',

View File

@ -19,19 +19,10 @@ def attachment_form_group_create(schema_prop):
def _attachment_build_single_field(schema_prop):
# 'keyschema' was renamed to 'keysrules' in Cerberus 1.3, but our data may still have the old
# names. Same for 'valueschema' and 'valuesrules'.
keysrules = schema_prop.get('keysrules') or schema_prop.get('keyschema')
if keysrules is None:
raise KeyError(f"missing 'keysrules' key in schema {schema_prop}")
valuesrules = schema_prop.get('valuesrules') or schema_prop.get('valueschema')
if valuesrules is None:
raise KeyError(f"missing 'valuesrules' key in schema {schema_prop}")
# Ugly hard-coded schema.
fake_schema = {
'slug': keysrules,
'oid': valuesrules['schema']['oid'],
'slug': schema_prop['keyschema'],
'oid': schema_prop['valueschema']['schema']['oid'],
}
file_select_form_group = build_file_select_form(fake_schema)
return file_select_form_group

View File

@ -0,0 +1,246 @@
import logging
from flask import current_app
from flask import request
from flask import jsonify
from flask import render_template
from flask_login import login_required, current_user
from pillarsdk import Node
from pillarsdk import Project
import werkzeug.exceptions as wz_exceptions
from pillar.api.utils import utcnow
from pillar.web import subquery
from pillar.web.nodes.routes import blueprint
from pillar.web.utils import gravatar
from pillar.web.utils import pretty_date
from pillar.web.utils import system_util
log = logging.getLogger(__name__)
@blueprint.route('/comments/create', methods=['POST'])
@login_required
def comments_create():
content = request.form['content']
parent_id = request.form.get('parent_id')
if not parent_id:
log.warning('User %s tried to create comment without parent_id', current_user.objectid)
raise wz_exceptions.UnprocessableEntity()
api = system_util.pillar_api()
parent_node = Node.find(parent_id, api=api)
if not parent_node:
log.warning('Unable to create comment for user %s, parent node %r not found',
current_user.objectid, parent_id)
raise wz_exceptions.UnprocessableEntity()
log.info('Creating comment for user %s on parent node %r',
current_user.objectid, parent_id)
comment_props = dict(
project=parent_node.project,
name='Comment',
user=current_user.objectid,
node_type='comment',
properties=dict(
content=content,
status='published',
confidence=0,
rating_positive=0,
rating_negative=0))
if parent_id:
comment_props['parent'] = parent_id
# Get the parent node and check if it's a comment. In which case we flag
# the current comment as a reply.
parent_node = Node.find(parent_id, api=api)
if parent_node.node_type == 'comment':
comment_props['properties']['is_reply'] = True
comment = Node(comment_props)
comment.create(api=api)
return jsonify({'node_id': comment._id}), 201
@blueprint.route('/comments/<string(length=24):comment_id>', methods=['POST'])
@login_required
def comment_edit(comment_id):
"""Allows a user to edit their comment."""
from pillar.web import jinja
api = system_util.pillar_api()
comment = Node({'_id': comment_id})
result = comment.patch({'op': 'edit', 'content': request.form['content']}, api=api)
assert result['_status'] == 'OK'
return jsonify({
'status': 'success',
'data': {
'content': result.properties.content or '',
'content_html': jinja.do_markdowned(result.properties, 'content'),
}})
def format_comment(comment, is_reply=False, is_team=False, replies=None):
"""Format a comment node into a simpler dictionary.
:param comment: the comment object
:param is_reply: True if the comment is a reply to another comment
:param is_team: True if the author belongs to the group that owns the node
:param replies: list of replies (formatted with this function)
"""
try:
is_own = (current_user.objectid == comment.user._id) \
if current_user.is_authenticated else False
except AttributeError:
current_app.bugsnag.notify(Exception(
'Missing user for embedded user ObjectId'),
meta_data={'nodes_info': {'node_id': comment['_id']}})
return
is_rated = False
is_rated_positive = None
if comment.properties.ratings:
for rating in comment.properties.ratings:
if current_user.is_authenticated and rating.user == current_user.objectid:
is_rated = True
is_rated_positive = rating.is_positive
break
return dict(_id=comment._id,
gravatar=gravatar(comment.user.email, size=32),
time_published=pretty_date(comment._created or utcnow(), detail=True),
rating=comment.properties.rating_positive - comment.properties.rating_negative,
author=comment.user.full_name,
author_username=comment.user.username,
content=comment.properties.content,
is_reply=is_reply,
is_own=is_own,
is_rated=is_rated,
is_rated_positive=is_rated_positive,
is_team=is_team,
replies=replies)
@blueprint.route('/<string(length=24):node_id>/comments')
def comments_for_node(node_id):
"""Shows the comments attached to the given node.
The URL can be overridden in order to define can_post_comments in a different way
"""
api = system_util.pillar_api()
node = Node.find(node_id, api=api)
project = Project({'_id': node.project})
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
can_comment_override = request.args.get('can_comment', 'True') == 'True'
can_post_comments = can_post_comments and can_comment_override
return render_comments_for_node(node_id, can_post_comments=can_post_comments)
def render_comments_for_node(node_id: str, *, can_post_comments: bool):
"""Render the list of comments for a node.
Comments are first sorted by confidence, see:
https://redditblog.com/2009/10/15/reddits-new-comment-sorting-system/
and then by creation date.
"""
# TODO(fsiddi) Implement confidence calculation on node rating in Pillar core.
# Currently this feature is being developed in the Dillo extension.
api = system_util.pillar_api()
# Query for all children, i.e. comments on the node.
comments = Node.all({
'where': {'node_type': 'comment', 'parent': node_id},
'sort': [('properties.confidence', -1), ('_created', -1)],
}, api=api)
def enrich(some_comment):
some_comment['_user'] = subquery.get_user_info(some_comment['user'])
some_comment['_is_own'] = some_comment['user'] == current_user.objectid
some_comment['_current_user_rating'] = None # tri-state boolean
some_comment[
'_rating'] = some_comment.properties.rating_positive - some_comment.properties.rating_negative
if current_user.is_authenticated:
for rating in some_comment.properties.ratings or ():
if rating.user != current_user.objectid:
continue
some_comment['_current_user_rating'] = rating.is_positive
for comment in comments['_items']:
# Query for all grandchildren, i.e. replies to comments on the node.
comment['_replies'] = Node.all({
'where': {'node_type': 'comment', 'parent': comment['_id']},
'sort': [('properties.confidence', -1), ('_created', -1)],
}, api=api)
enrich(comment)
for reply in comment['_replies']['_items']:
enrich(reply)
nr_of_comments = sum(1 + comment['_replies']['_meta']['total']
for comment in comments['_items'])
return render_template('nodes/custom/comment/list_embed.html',
node_id=node_id,
comments=comments,
nr_of_comments=nr_of_comments,
show_comments=True,
can_post_comments=can_post_comments)
@blueprint.route('/<string(length=24):node_id>/commentform')
def commentform_for_node(node_id):
"""Shows only the comment for for comments attached to the given node.
i.e. does not show the comments themselves, just the form to post a new comment.
"""
api = system_util.pillar_api()
node = Node.find(node_id, api=api)
project = Project({'_id': node.project})
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
return render_template('nodes/custom/comment/list_embed.html',
node_id=node_id,
show_comments=False,
can_post_comments=can_post_comments)
@blueprint.route("/comments/<comment_id>/rate/<operation>", methods=['POST'])
@login_required
def comments_rate(comment_id, operation):
"""Comment rating function
:param comment_id: the comment id
:type comment_id: str
:param rating: the rating (is cast from 0 to False and from 1 to True)
:type rating: int
"""
if operation not in {'revoke', 'upvote', 'downvote'}:
raise wz_exceptions.BadRequest('Invalid operation')
api = system_util.pillar_api()
# PATCH the node and return the result.
comment = Node({'_id': comment_id})
result = comment.patch({'op': operation}, api=api)
assert result['_status'] == 'OK'
return jsonify({
'status': 'success',
'data': {
'op': operation,
'rating_positive': result.properties.rating_positive,
'rating_negative': result.properties.rating_negative,
}})

View File

@ -109,7 +109,6 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
project.blog_archive_prev = None
navigation_links = project_navigation_links(project, api)
extension_sidebar_links = current_app.extension_sidebar_links(project)
return render_template(
template_path,
@ -122,7 +121,6 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
node_type_post=project.get_node_type('post'),
can_create_blog_posts=can_create_blog_posts,
navigation_links=navigation_links,
extension_sidebar_links=extension_sidebar_links,
api=api)

View File

@ -48,10 +48,7 @@ def find_for_comment(project, node):
continue
try:
parent = Node.find_one({'where': {
'_id': parent.parent,
'_deleted': {'$ne': True}
}}, api=api)
parent = Node.find(parent.parent, api=api)
except ResourceNotFound:
log.warning(
'url_for_node(node_id=%r): Unable to find parent node %r',

View File

@ -1,9 +1,9 @@
import os
import json
import logging
from datetime import datetime
import pillarsdk
from pillar import shortcodes
from pillarsdk import Node
from pillarsdk import Project
from pillarsdk.exceptions import ResourceNotFound
@ -17,12 +17,15 @@ from flask import request
from flask import jsonify
from flask import abort
from flask_login import current_user
from flask_wtf.csrf import validate_csrf
import werkzeug.exceptions as wz_exceptions
from wtforms import SelectMultipleField
from flask_login import login_required
from jinja2.exceptions import TemplateNotFound
from pillar.api.utils.authorization import check_permissions
from pillar.web.utils import caching
from pillar.markdown import markdown
from pillar.web.nodes.forms import get_node_form
from pillar.web.nodes.forms import process_node_form
@ -105,11 +108,6 @@ def view(node_id, extra_template_args: dict=None):
node_type_name = node.node_type
if node_type_name == 'page':
# HACK: The 'edit node' page GETs this endpoint, but for pages it's plain wrong,
# so we just redirect to the correct URL.
return redirect(url_for_node(node=node))
if node_type_name == 'post' and not request.args.get('embed'):
# Posts shouldn't be shown at this route (unless viewed embedded, tipically
# after an edit. Redirect to the correct one.
@ -489,14 +487,11 @@ def preview_markdown():
current_app.csrf.protect()
try:
content = request.json['content']
content = request.form['content']
except KeyError:
return jsonify({'_status': 'ERR',
'message': 'The field "content" was not specified.'}), 400
html = markdown(content)
attachmentsdict = request.json.get('attachments', {})
html = shortcodes.render_commented(html, context={'attachments': attachmentsdict})
return jsonify(content=html)
return jsonify(content=markdown(content))
def ensure_lists_exist_as_empty(node_doc, node_type):
@ -609,94 +604,5 @@ def url_for_node(node_id=None, node=None):
return finders.find_url_for_node(node)
@blueprint.route("/<node_id>/breadcrumbs")
def breadcrumbs(node_id: str):
"""Return breadcrumbs for the given node, as JSON.
Note that a missing parent is still returned in the breadcrumbs,
but with `{_exists: false, name: '-unknown-'}`.
The breadcrumbs start with the top-level parent, and end with the node
itself (marked by {_self: true}). Returns JSON like this:
{breadcrumbs: [
...,
{_id: "parentID",
name: "The Parent Node",
node_type: "group",
url: "/p/project/parentID"},
{_id: "deadbeefbeefbeefbeeffeee",
_self: true,
name: "The Node Itself",
node_type: "asset",
url: "/p/project/nodeID"},
]}
When a parent node is missing, it has a breadcrumb like this:
{_id: "deadbeefbeefbeefbeeffeee",
_exists': false,
name': '-unknown-'}
"""
api = system_util.pillar_api()
is_self = True
def make_crumb(some_node: None) -> dict:
"""Construct a breadcrumb for this node."""
nonlocal is_self
crumb = {
'_id': some_node._id,
'name': some_node.name,
'node_type': some_node.node_type,
'url': finders.find_url_for_node(some_node),
}
if is_self:
crumb['_self'] = True
is_self = False
return crumb
def make_missing_crumb(some_node_id: None) -> dict:
"""Construct 'missing parent' breadcrumb."""
return {
'_id': some_node_id,
'_exists': False,
'name': '-unknown-',
}
# The first node MUST exist.
try:
node = Node.find(node_id, api=api)
except ResourceNotFound:
log.warning('breadcrumbs(node_id=%r): Unable to find node', node_id)
raise wz_exceptions.NotFound(f'Unable to find node {node_id}')
except ForbiddenAccess:
log.warning('breadcrumbs(node_id=%r): access denied to current user', node_id)
raise wz_exceptions.Forbidden(f'No access to node {node_id}')
crumbs = []
while True:
crumbs.append(make_crumb(node))
child_id = node._id
node_id = node.parent
if not node_id:
break
# If a subsequent node doesn't exist any more, include that in the breadcrumbs.
# Forbidden nodes are handled as if they don't exist.
try:
node = Node.find(node_id, api=api)
except (ResourceNotFound, ForbiddenAccess):
log.warning('breadcrumbs: Unable to find node %r but it is marked as parent of %r',
node_id, child_id)
crumbs.append(make_missing_crumb(node_id))
break
return jsonify({'breadcrumbs': list(reversed(crumbs))})
# Import of custom modules (using the same nodes decorator)
from .custom import groups, storage, posts
from .custom import comments, groups, storage, posts

View File

@ -6,8 +6,7 @@ from flask_login import current_user
import pillar.flask_extra
from pillar import current_app
import pillar.api.users.avatar
from pillar.api.utils import authorization, str2id, jsonify
from pillar.api.utils import authorization, str2id, gravatar, jsonify
from pillar.web.system_util import pillar_api
from pillarsdk import Organization, User
@ -48,7 +47,7 @@ def view_embed(organization_id: str):
members = om.org_members(organization.members)
for member in members:
member['avatar'] = pillar.api.users.avatar.url(member)
member['avatar'] = gravatar(member.get('email'))
member['_id'] = str(member['_id'])
admin_user = User.find(organization.admin_uid, api=api)

View File

@ -30,7 +30,6 @@ class ProjectForm(FlaskForm):
('deleted', 'Deleted')])
picture_header = FileSelectField('Picture header', file_format='image')
picture_square = FileSelectField('Picture square', file_format='image')
picture_16_9 = FileSelectField('Picture 16:9', file_format='image')
def validate(self):
rv = FlaskForm.validate(self)

View File

@ -22,7 +22,6 @@ import werkzeug.exceptions as wz_exceptions
from pillar import current_app
from pillar.api.utils import utcnow
import pillar.api.users.avatar
from pillar.web import system_util
from pillar.web import utils
from pillar.web.nodes import finders
@ -110,6 +109,7 @@ def index():
return render_template(
'projects/index_dashboard.html',
gravatar=utils.gravatar(current_user.email, size=128),
projects_user=projects_user['_items'],
projects_deleted=projects_deleted['_items'],
projects_shared=projects_shared['_items'],
@ -349,7 +349,8 @@ def project_navigation_links(project: typing.Type[Project], api) -> list:
def render_project(project, api, extra_context=None, template_name=None):
utils.attach_project_pictures(project, api)
project.picture_square = utils.get_file(project.picture_square, api=api)
project.picture_header = utils.get_file(project.picture_header, api=api)
def load_latest(list_of_ids, node_type=None):
"""Loads a list of IDs in reversed order."""
@ -402,6 +403,7 @@ def render_project(project, api, extra_context=None, template_name=None):
template_name = template_name or 'projects/home_index.html'
return render_template(
template_name,
gravatar=utils.gravatar(current_user.email, size=128),
project=project,
api=system_util.pillar_api(),
**extra_context)
@ -413,16 +415,17 @@ def render_project(project, api, extra_context=None, template_name=None):
embed_string = ''
template_name = "projects/view{0}.html".format(embed_string)
navigation_links = project_navigation_links(project, api)
extension_sidebar_links = current_app.extension_sidebar_links(project)
navigation_links = project_navigation_links(project, api)
return render_template(template_name,
api=api,
project=project,
node=None,
show_node=False,
show_project=True,
og_picture=project.picture_16_9,
og_picture=project.picture_header,
activity_stream=activity_stream,
navigation_links=navigation_links,
extension_sidebar_links=extension_sidebar_links,
@ -487,14 +490,12 @@ def view_node(project_url, node_id):
raise wz_exceptions.NotFound('No such project')
navigation_links = []
extension_sidebar_links = ''
og_picture = node.picture = utils.get_file(node.picture, api=api)
if project:
utils.attach_project_pictures(project, api)
if not node.picture:
og_picture = project.picture_16_9
og_picture = utils.get_file(project.picture_header, api=api)
project.picture_square = utils.get_file(project.picture_square, api=api)
navigation_links = project_navigation_links(project, api)
extension_sidebar_links = current_app.extension_sidebar_links(project)
# Append _theatre to load the proper template
theatre = '_theatre' if theatre_mode else ''
@ -505,9 +506,10 @@ def view_node(project_url, node_id):
node=node,
project=project,
navigation_links=navigation_links,
extension_sidebar_links=extension_sidebar_links,
og_picture=og_picture,)
extension_sidebar_links = current_app.extension_sidebar_links(project)
return render_template('projects/view{}.html'.format(theatre),
api=api,
project=project,
@ -516,7 +518,7 @@ def view_node(project_url, node_id):
show_project=False,
og_picture=og_picture,
navigation_links=navigation_links,
extension_sidebar_links=extension_sidebar_links,)
extension_sidebar_links=extension_sidebar_links)
def find_project_or_404(project_url, embedded=None, api=None):
@ -539,7 +541,8 @@ def search(project_url):
"""Search into a project"""
api = system_util.pillar_api()
project = find_project_or_404(project_url, api=api)
utils.attach_project_pictures(project, api)
project.picture_square = utils.get_file(project.picture_square, api=api)
project.picture_header = utils.get_file(project.picture_header, api=api)
return render_template('nodes/search.html',
project=project,
@ -580,8 +583,6 @@ def edit(project_url):
project.picture_square = form.picture_square.data
if form.picture_header.data:
project.picture_header = form.picture_header.data
if form.picture_16_9.data:
project.picture_16_9 = form.picture_16_9.data
# Update world permissions from is_private checkbox
if form.is_private.data:
@ -597,8 +598,6 @@ def edit(project_url):
form.picture_square.data = project.picture_square._id
if project.picture_header:
form.picture_header.data = project.picture_header._id
if project.picture_16_9:
form.picture_16_9.data = project.picture_16_9._id
# List of fields from the form that should be hidden to regular users
if current_user.has_role('admin'):
@ -707,12 +706,15 @@ def sharing(project_url):
api = system_util.pillar_api()
# Fetch the project or 404
try:
project = Project.find_one({'where': {'url': project_url}}, api=api)
project = Project.find_one({
'where': '{"url" : "%s"}' % (project_url)}, api=api)
except ResourceNotFound:
return abort(404)
# Fetch users that are part of the admin group
users = project.get_users(api=api)
for user in users['_items']:
user['avatar'] = utils.gravatar(user['email'])
if request.method == 'POST':
user_id = request.form['user_id']
@ -722,14 +724,13 @@ def sharing(project_url):
user = project.add_user(user_id, api=api)
elif action == 'remove':
user = project.remove_user(user_id, api=api)
else:
raise wz_exceptions.BadRequest(f'invalid action {action}')
except ResourceNotFound:
log.info('/p/%s/edit/sharing: User %s not found', project_url, user_id)
return jsonify({'_status': 'ERROR',
'message': 'User %s not found' % user_id}), 404
user['avatar'] = pillar.api.users.avatar.url(user)
# Add gravatar to user
user['avatar'] = utils.gravatar(user['email'])
return jsonify(user)
utils.attach_project_pictures(project, api)

View File

@ -1,18 +1,13 @@
import json
import logging
import urllib.parse
from flask import Blueprint, flash, render_template
from flask_login import login_required
from flask_login import login_required, current_user
from werkzeug.exceptions import abort
from pillar import current_app
from pillar.api.utils import jsonify
import pillar.api.users.avatar
from pillar.auth import current_user
from pillar.web import system_util
from pillar.web.users import forms
from pillarsdk import File, User, exceptions as sdk_exceptions
from pillarsdk import User, exceptions as sdk_exceptions
log = logging.getLogger(__name__)
blueprint = Blueprint('settings', __name__)
@ -32,20 +27,14 @@ def profile():
if form.validate_on_submit():
try:
response = user.set_username(form.username.data, api=api)
log.info('updated username of %s: %s', current_user, response)
user.username = form.username.data
user.update(api=api)
flash("Profile updated", 'success')
except sdk_exceptions.ResourceInvalid as ex:
log.warning('unable to set username %s to %r: %s', current_user, form.username.data, ex)
message = json.loads(ex.content)
except sdk_exceptions.ResourceInvalid as e:
message = json.loads(e.content)
flash(message)
blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
blender_profile_url = urllib.parse.urljoin(blender_id_endpoint, 'settings/profile')
return render_template('users/settings/profile.html',
form=form, title='profile',
blender_profile_url=blender_profile_url)
return render_template('users/settings/profile.html', form=form, title='profile')
@blueprint.route('/roles')
@ -53,19 +42,3 @@ def profile():
def roles():
"""Show roles and capabilties of the current user."""
return render_template('users/settings/roles.html', title='roles')
@blueprint.route('/profile/sync-avatar', methods=['POST'])
@login_required
def sync_avatar():
"""Fetch the user's avatar from Blender ID and save to storage.
This is an API-like endpoint, in the sense that it returns JSON.
It's here in this file to have it close to the endpoint that
serves the only page that calls on this endpoint.
"""
new_url = pillar.api.users.avatar.sync_avatar(current_user.user_id)
if not new_url:
return jsonify({'_message': 'Your avatar could not be updated'})
return new_url

File diff suppressed because one or more lines are too long

Binary file not shown.

Before

Width:  |  Height:  |  Size: 496 B

View File

@ -31,10 +31,8 @@ def check_oauth_provider(provider):
@blueprint.route('/authorize/<provider>')
def oauth_authorize(provider):
if current_user.is_authenticated:
next_after_login = session.pop('next_after_login', None) or url_for('main.homepage')
log.debug('Redirecting user to %s', next_after_login)
return redirect(next_after_login)
if not current_user.is_anonymous:
return redirect(url_for('main.homepage'))
try:
oauth = OAuthSignIn.get_provider(provider)
@ -54,10 +52,8 @@ def oauth_callback(provider):
from pillar.api.utils.authentication import store_token
from pillar.api.utils import utcnow
next_after_login = session.pop('next_after_login', None) or url_for('main.homepage')
if current_user.is_authenticated:
log.debug('Redirecting user to %s', next_after_login)
return redirect(next_after_login)
return redirect(url_for('main.homepage'))
oauth = OAuthSignIn.get_provider(provider)
try:
@ -67,7 +63,7 @@ def oauth_callback(provider):
raise wz_exceptions.Forbidden()
if oauth_user.id is None:
log.debug('Authentication failed for user with {}'.format(provider))
return redirect(next_after_login)
return redirect(url_for('main.homepage'))
# Find or create user
user_info = {'id': oauth_user.id, 'email': oauth_user.email, 'full_name': ''}
@ -92,8 +88,11 @@ def oauth_callback(provider):
# Check with Blender ID to update certain user roles.
update_subscription()
log.debug('Redirecting user to %s', next_after_login)
return redirect(next_after_login)
next_after_login = session.pop('next_after_login', None)
if next_after_login:
log.debug('Redirecting user to %s', next_after_login)
return redirect(next_after_login)
return redirect(url_for('main.homepage'))
@blueprint.route('/login')

View File

@ -43,38 +43,8 @@ def attach_project_pictures(project, api):
This function should be moved in the API, attached to a new Project object.
"""
# When adding to the list of pictures dealt with here, make sure
# you update unattach_project_pictures() too.
project.picture_square = get_file(project.picture_square, api=api)
project.picture_header = get_file(project.picture_header, api=api)
project.picture_16_9 = get_file(project.picture_16_9, api=api)
def unattach_project_pictures(project: dict):
"""Reverts the operation of 'attach_project_pictures'.
This makes it possible to PUT the project again.
"""
def unattach(property_name: str):
picture_info = project.get(property_name, None)
if not picture_info:
project.pop(property_name, None)
return
if not isinstance(picture_info, dict):
# Assume it's already is an ID.
return
try:
picture_id = picture_info['_id']
project[property_name] = picture_id
except KeyError:
return
unattach('picture_square')
unattach('picture_header')
unattach('picture_16_9')
def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *,
@ -136,16 +106,9 @@ def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *
def gravatar(email: str, size=64):
"""Deprecated: return the Gravatar URL.
.. deprecated::
Use of Gravatar is deprecated, in favour of our self-hosted avatars.
See pillar.api.users.avatar.url(user).
"""
import warnings
warnings.warn('pillar.web.utils.gravatar() is deprecated, '
'use pillar.api.users.avatar.url() instead',
category=DeprecationWarning, stacklevel=2)
warnings.warn("the pillar.web.gravatar function is deprecated; use hashlib instead",
DeprecationWarning, 2)
from pillar.api.utils import gravatar as api_gravatar
return api_gravatar(email, size)

View File

@ -1,64 +0,0 @@
[tool.poetry]
name = "pillar"
version = "2.0"
description = ""
authors = [
"Francesco Siddi <francesco@blender.org>",
"Pablo Vazquez <pablo@blender.studio>",
"Sybren Stüvel <sybren@blender.studio>",
]
[tool.poetry.scripts]
# Must be run after installing/updating:
translations = 'pillar.cli.translations:main'
[tool.poetry.dependencies]
python = "~3.6"
attrs = "~19"
algoliasearch = "~1"
bcrypt = "~3"
blinker = "~1.4"
bleach = "~3.1"
celery = {version = "~4.3",extras = ["redis"]}
cryptography = "2.7"
commonmark = "~0.9"
# These must match the version of ElasticSearch used:
elasticsearch = "~6.1"
elasticsearch-dsl = "~6.1"
Eve = "~0.9"
Flask = "~1.0"
Flask-Babel = "~0.12"
Flask-Caching = "~1.7"
Flask-DebugToolbar = "~0.10"
Flask-Script = "~2.0"
Flask-Login = "~0.4"
Flask-WTF = "~0.14"
gcloud = "~0.18"
google-apitools = "~0.5"
IPy = "~1.00"
MarkupSafe = "~1.1"
ndg-httpsclient = "~0.5"
Pillow = "~6.0"
python-dateutil = "~2.8"
rauth = "~0.7"
raven = {version = "~6.10",extras = ["flask"]}
redis = "~3.2"
shortcodes = "~2.5"
zencoder = "~0.6"
pillarsdk = {path = "../pillar-python-sdk"}
# Secondary requirements that weren't installed automatically:
idna = "~2.8"
[tool.poetry.dev-dependencies]
pillar-devdeps = {path = "./devdeps"}
[build-system]
requires = ["poetry==1.0","cryptography==2.7","setuptools==51.0.0","wheel==0.35.1"]
build-backend = "poetry.masonry.api"

17
requirements-dev.txt Normal file
View File

@ -0,0 +1,17 @@
-r requirements.txt
-r ../pillar-python-sdk/requirements-dev.txt
-e ../pillar # also works from parent project, like blender-cloud
# Development requirements
pytest==3.0.6
responses==0.5.1
pytest-cov==2.4.0
mock==2.0.0
mypy==0.501
# Secondary development requirements
cookies==2.2.1
coverage==4.3.4
pbr==2.0.0
py==1.4.32
typed-ast==1.0.2

76
requirements.txt Normal file
View File

@ -0,0 +1,76 @@
# Primary requirements
-r ../pillar-python-sdk/requirements.txt
attrs==18.2.0
algoliasearch==1.12.0
bcrypt==3.1.3
blinker==1.4
bleach==2.1.3
celery[redis]==4.2.1
CommonMark==0.7.2
elasticsearch==6.1.1
elasticsearch-dsl==6.1.0
Eve==0.8
Flask==1.0.2
Flask-Babel==0.11.2
Flask-Caching==1.4.0
Flask-DebugToolbar==0.10.1
Flask-Script==2.0.6
Flask-Login==0.4.1
Flask-WTF==0.14.2
gcloud==0.12.0
google-apitools==0.4.11
httplib2==0.9.2
IPy==0.83
MarkupSafe==0.23
ndg-httpsclient==0.4.0
Pillow==4.1.1
python-dateutil==2.5.3
rauth==0.7.3
raven[flask]==6.3.0
requests==2.13.0
redis==2.10.5
shortcodes==2.5.0
WebOb==1.5.0
wheel==0.29.0
zencoder==0.6.5
# Secondary requirements
amqp==2.3.2
asn1crypto==0.24.0
Babel==2.6.0
billiard==3.5.0.4
Cerberus==1.2
cffi==1.10.0
click==6.7
cryptography==2.0.3
Events==0.3
future==0.16.0
googleapis-common-protos==1.5.3
html5lib==1.0.1
idna==2.5
ipaddress==1.0.22
itsdangerous==0.24
Jinja2==2.10
kombu==4.2.1
oauth2client==4.1.2
oauthlib==2.1.0
olefile==0.45.1
protobuf==3.6.0
protorpc==0.12.0
pyasn1==0.4.4
pyasn1-modules==0.2.2
pycparser==2.17
pymongo==3.7.0
pyOpenSSL==16.2.0
pytz==2018.5
requests-oauthlib==1.0.0
rsa==3.4.2
simplejson==3.16.0
six==1.10.0
urllib3==1.22
vine==1.1.4
webencodings==0.5.1
Werkzeug==0.14.1
WTForms==2.2.1

77
setup.py Normal file
View File

@ -0,0 +1,77 @@
#!/usr/bin/env python
"""Setup file for testing, not for packaging/distribution."""
import setuptools
from setuptools.command.develop import develop
from setuptools.command.install import install
def translations_compile():
"""Compile any existent translation.
"""
from pillar import cli
cli.translations.compile()
class PostDevelopCommand(develop):
"""Post-installation for develop mode."""
def run(self):
super().run()
translations_compile()
class PostInstallCommand(install):
"""Post-installation for installation mode."""
def run(self):
super().run()
translations_compile()
setuptools.setup(
name='pillar',
version='2.0',
packages=setuptools.find_packages('.', exclude=['test']),
install_requires=[
'Flask>=0.12',
'Eve>=0.7.3',
'Flask-Caching>=1.4.0',
'Flask-Script>=2.0.5',
'Flask-Login>=0.3.2',
'Flask-OAuthlib>=0.9.3',
'Flask-WTF>=0.14.2',
'algoliasearch>=1.12.0',
# Limit the major version to the major version of ElasticSearch we're using.
'elasticsearch>=6.0.0,<7.0.0',
'elasticsearch_dsl>=6.0.0,<7.0.0',
'attrs>=16.2.0',
'bugsnag>=2.3.1',
'gcloud>=0.12.0',
'google-apitools>=0.4.11',
'MarkupSafe>=0.23',
'Pillow>=2.8.1',
'requests>=2.9.1',
'rsa>=3.3',
'shortcodes>=2.5', # 2.4.0 and earlier corrupted unicode
'zencoder>=0.6.5',
'bcrypt>=2.0.0',
'blinker>=1.4',
'pillarsdk',
],
tests_require=[
'pytest>=2.9.1',
'responses>=0.5.1',
'pytest-cov>=2.2.1',
'mock>=2.0.0',
],
entry_points = {'console_scripts': [
'translations = pillar.cli.translations:main',
]},
cmdclass={
'install': PostInstallCommand,
'develop': PostDevelopCommand,
},
zip_safe=False,
)

View File

@ -1,2 +0,0 @@
Gulp will transpile everything in this folder. Every sub folder containing a init.js file exporting functions/classes
will be packed into a module in tutti.js under the namespace pillar.FOLDER_NAME.

View File

@ -1,46 +0,0 @@
function thenGetComments(parentId) {
return $.getJSON(`/api/nodes/${parentId}/comments`);
}
function thenCreateComment(parentId, msg, attachments) {
let data = JSON.stringify({
msg: msg,
attachments: attachments
});
return $.ajax({
url: `/api/nodes/${parentId}/comments`,
type: 'POST',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
}
function thenUpdateComment(parentId, commentId, msg, attachments) {
let data = JSON.stringify({
msg: msg,
attachments: attachments
});
return $.ajax({
url: `/api/nodes/${parentId}/comments/${commentId}`,
type: 'PATCH',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
}
function thenVoteComment(parentId, commentId, vote) {
let data = JSON.stringify({
vote: vote
});
return $.ajax({
url: `/api/nodes/${parentId}/comments/${commentId}/vote`,
type: 'POST',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
}
export { thenGetComments, thenCreateComment, thenUpdateComment, thenVoteComment }

View File

@ -1,54 +0,0 @@
function thenUploadFile(projectId, file, progressCB=(total, loaded)=>{}) {
let formData = createFormData(file)
return $.ajax({
url: `/api/storage/stream/${projectId}`,
type: 'POST',
data: formData,
cache: false,
contentType: false,
processData: false,
xhr: () => {
let myxhr = $.ajaxSettings.xhr();
if (myxhr.upload) {
// For handling the progress of the upload
myxhr.upload.addEventListener('progress', function(e) {
if (e.lengthComputable) {
progressCB(e.total, e.loaded);
}
}, false);
}
return myxhr;
}
});
}
function createFormData(file) {
let formData = new FormData();
formData.append('file', file);
return formData;
}
function thenGetFileDocument(fileId) {
return $.get(`/api/files/${fileId}`);
}
function getFileVariation(fileDoc, size = 'm') {
var show_variation = null;
if (typeof fileDoc.variations != 'undefined') {
for (var variation of fileDoc.variations) {
if (variation.size != size) continue;
show_variation = variation;
break;
}
}
if (show_variation == null) {
throw 'Image not found: ' + fileDoc._id + ' size: ' + size;
}
return show_variation;
}
export { thenUploadFile, thenGetFileDocument, getFileVariation }

View File

@ -1,7 +0,0 @@
/**
* Functions for communicating with the pillar server api
*/
export { thenMarkdownToHtml } from './markdown'
export { thenGetProject } from './projects'
export { thenGetNodes, thenGetNode, thenGetNodeActivities, thenUpdateNode, thenDeleteNode } from './nodes'
export { thenGetProjectUsers } from './users'

View File

@ -1,17 +0,0 @@
function thenMarkdownToHtml(markdown, attachments={}) {
let data = JSON.stringify({
content: markdown,
attachments: attachments
});
return $.ajax({
url: "/nodes/preview-markdown",
type: 'POST',
headers: {"X-CSRFToken": csrf_token},
headers: {},
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
})
}
export { thenMarkdownToHtml }

View File

@ -1,82 +0,0 @@
function thenGetNodes(where, embedded={}, sort='') {
let encodedWhere = encodeURIComponent(JSON.stringify(where));
let encodedEmbedded = encodeURIComponent(JSON.stringify(embedded));
let encodedSort = encodeURIComponent(sort);
return $.ajax({
url: `/api/nodes?where=${encodedWhere}&embedded=${encodedEmbedded}&sort=${encodedSort}`,
cache: false,
});
}
function thenGetNode(nodeId) {
return $.ajax({
url: `/api/nodes/${nodeId}`,
cache: false,
});
}
function thenGetNodeActivities(nodeId, sort='[("_created", -1)]', max_results=20, page=1) {
let encodedSort = encodeURIComponent(sort);
return $.ajax({
url: `/api/nodes/${nodeId}/activities?sort=${encodedSort}&max_results=${max_results}&page=${page}`,
cache: false,
});
}
function thenUpdateNode(node) {
let id = node['_id'];
let etag = node['_etag'];
let nodeToSave = removePrivateKeys(node);
let data = JSON.stringify(nodeToSave);
return $.ajax({
url: `/api/nodes/${id}`,
type: 'PUT',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8',
headers: {'If-Match': etag},
}).then(updatedInfo => {
return thenGetNode(updatedInfo['_id'])
.then(node => {
pillar.events.Nodes.triggerUpdated(node);
return node;
})
});
}
function thenDeleteNode(node) {
let id = node['_id'];
let etag = node['_etag'];
return $.ajax({
url: `/api/nodes/${id}`,
type: 'DELETE',
headers: {'If-Match': etag},
}).then(() => {
pillar.events.Nodes.triggerDeleted(id);
});
}
function removePrivateKeys(doc) {
function doRemove(d) {
for (const key in d) {
if (key.startsWith('_')) {
delete d[key];
continue;
}
let val = d[key];
if(typeof val === 'object') {
doRemove(val);
}
}
}
let docCopy = JSON.parse(JSON.stringify(doc));
doRemove(docCopy);
delete docCopy['allowed_methods']
return docCopy;
}
export { thenGetNodes, thenGetNode, thenGetNodeActivities, thenUpdateNode, thenDeleteNode }

View File

@ -1,5 +0,0 @@
function thenGetProject(projectId) {
return $.get(`/api/projects/${projectId}`);
}
export { thenGetProject }

View File

@ -1,7 +0,0 @@
function thenGetProjectUsers(projectId) {
return $.ajax({
url: `/api/p/users?project_id=${projectId}`,
});
}
export { thenGetProjectUsers }

View File

@ -1,167 +0,0 @@
/**
* Helper class to trigger/listen to global events on new/updated/deleted nodes.
*
* @example
* function myCallback(event) {
* console.log('Updated node:', event.detail);
* }
* // Register a callback:
* Nodes.onUpdated('5c1cc4a5a013573d9787164b', myCallback);
* // When changing the node, notify the listeners:
* Nodes.triggerUpdated(myUpdatedNode);
*/
class EventName {
static parentCreated(parentId, node_type) {
return `pillar:node:${parentId}:created-${node_type}`;
}
static globalCreated(node_type) {
return `pillar:node:created-${node_type}`;
}
static updated(nodeId) {
return `pillar:node:${nodeId}:updated`;
}
static deleted(nodeId) {
return `pillar:node:${nodeId}:deleted`;
}
static loaded() {
return `pillar:node:loaded`;
}
}
function trigger(eventName, data) {
document.dispatchEvent(new CustomEvent(eventName, {detail: data}));
}
function on(eventName, cb) {
document.addEventListener(eventName, cb);
}
function off(eventName, cb) {
document.removeEventListener(eventName, cb);
}
class Nodes {
/**
* Trigger events that node has been created
* @param {Object} node
*/
static triggerCreated(node) {
if (node.parent) {
trigger(
EventName.parentCreated(node.parent, node.node_type),
node);
}
trigger(
EventName.globalCreated(node.node_type),
node);
}
/**
* Get notified when new nodes where parent === parentId and node_type === node_type
* @param {String} parentId
* @param {String} node_type
* @param {Function(Event)} cb
*/
static onParentCreated(parentId, node_type, cb){
on(
EventName.parentCreated(parentId, node_type),
cb);
}
static offParentCreated(parentId, node_type, cb){
off(
EventName.parentCreated(parentId, node_type),
cb);
}
/**
* Get notified when new nodes where node_type === node_type is created
* @param {String} node_type
* @param {Function(Event)} cb
*/
static onCreated(node_type, cb){
on(
EventName.globalCreated(node_type),
cb);
}
static offCreated(node_type, cb){
off(
EventName.globalCreated(node_type),
cb);
}
static triggerUpdated(node) {
trigger(
EventName.updated(node._id),
node);
}
/**
* Get notified when node with _id === nodeId is updated
* @param {String} nodeId
* @param {Function(Event)} cb
*/
static onUpdated(nodeId, cb) {
on(
EventName.updated(nodeId),
cb);
}
static offUpdated(nodeId, cb) {
off(
EventName.updated(nodeId),
cb);
}
/**
* Notify that node has been deleted.
* @param {String} nodeId
*/
static triggerDeleted(nodeId) {
trigger(
EventName.deleted(nodeId),
nodeId);
}
/**
* Listen to events of nodes being deleted where _id === nodeId
* @param {String} nodeId
* @param {Function(Event)} cb
*/
static onDeleted(nodeId, cb) {
on(
EventName.deleted(nodeId),
cb);
}
static offDeleted(nodeId, cb) {
off(
EventName.deleted(nodeId),
cb);
}
static triggerLoaded(nodeId) {
trigger(EventName.loaded(), {nodeId: nodeId});
}
/**
* Listen to events of nodes being loaded for display
* @param {Function(Event)} cb
*/
static onLoaded(cb) {
on(EventName.loaded(), cb);
}
static offLoaded(cb) {
off(EventName.loaded(), cb);
}
}
export { Nodes }

View File

@ -1,4 +0,0 @@
/**
* Collecting Custom Pillar events here
*/
export {Nodes} from './Nodes'

View File

@ -44,13 +44,7 @@ export class MultiSearch {
thenExecute() {
let data = JSON.stringify(this.getAllParams());
let rawAjax = $.ajax({
url: this.apiUrl,
type: 'POST',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
let rawAjax = $.getJSON(this.apiUrl, data);
let prettyPromise = rawAjax.then(this.parseResult.bind(this));
prettyPromise['abort'] = rawAjax.abort.bind(rawAjax); // Hack to be able to abort the promise down the road
return prettyPromise;

View File

@ -1,2 +0,0 @@
This module is used to render nodes/users dynamically. It was written before we introduced vue.js into the project.
Current best practice is to use vue for this type of work.

View File

@ -1,4 +1,4 @@
import { prettyDate } from '../init'
import { prettyDate } from '../utils'
describe('prettydate', () => {
beforeEach(() => {
@ -28,7 +28,7 @@ describe('prettydate', () => {
expect(pd({minutes: -5, detailed: true})).toBe('5m ago')
expect(pd({days: -7, detailed: true})).toBe('last Tuesday at 11:46')
expect(pd({days: -8, detailed: true})).toBe('1 week ago at 11:46')
// summer time below
// summer time bellow
expect(pd({days: -14, detailed: true})).toBe('2 weeks ago at 10:46')
expect(pd({days: -31, detailed: true})).toBe('8 Oct at 10:46')
expect(pd({days: -(31 + 366), detailed: true})).toBe('8 Oct 2015 at 10:46')

View File

@ -2,50 +2,25 @@ import { ComponentCreatorInterface } from './ComponentCreatorInterface'
const REGISTERED_CREATORS = []
/**
* Create a jQuery renderable element from a mongo document using registered creators.
* @deprecated use vue instead
*/
export class Component extends ComponentCreatorInterface {
/**
*
* @param {Object} doc
* @returns {$element}
*/
static create$listItem(doc) {
let creator = Component.getCreator(doc);
return creator.create$listItem(doc);
}
/**
* @param {Object} doc
* @returns {$element}
*/
static create$item(doc) {
let creator = Component.getCreator(doc);
return creator.create$item(doc);
}
/**
* @param {Object} candidate
* @returns {Boolean}
*/
static canCreate(candidate) {
return !!Component.getCreator(candidate);
}
/**
* Register component creator to handle a node type
* @param {ComponentCreatorInterface} creator
*/
static regiseterCreator(creator) {
REGISTERED_CREATORS.push(creator);
}
/**
* @param {Object} doc
* @returns {ComponentCreatorInterface}
*/
static getCreator(doc) {
if (doc) {
for (let candidate of REGISTERED_CREATORS) {
@ -56,4 +31,4 @@ export class Component extends ComponentCreatorInterface {
}
throw 'Can not create component using: ' + JSON.stringify(doc);
}
}
}

View File

@ -1,10 +1,6 @@
/**
* @deprecated use vue instead
*/
export class ComponentCreatorInterface {
/**
* Create a $element to render document in a list
* @param {Object} doc
* @param {JSON} doc
* @returns {$element}
*/
static create$listItem(doc) {
@ -12,8 +8,8 @@ export class ComponentCreatorInterface {
}
/**
* Create a $element to render the full doc
* @param {Object} doc
*
* @param {JSON} doc
* @returns {$element}
*/
static create$item(doc) {
@ -21,10 +17,11 @@ export class ComponentCreatorInterface {
}
/**
* @param {Object} candidate
* @returns {boolean}
*
* @param {JSON} candidate
* @returns {boolean}
*/
static canCreate(candidate) {
throw 'Not Implemented';
}
}
}

View File

@ -1,10 +1,6 @@
import { NodesBase } from "./NodesBase";
import { thenLoadVideoProgress } from '../utils';
/**
* Create $element from a node of type asset
* @deprecated use vue instead
*/
export class Assets extends NodesBase{
static create$listItem(node) {
var markIfPublic = true;

View File

@ -3,10 +3,6 @@ import { ComponentCreatorInterface } from '../component/ComponentCreatorInterfac
let CREATE_NODE_ITEM_MAP = {}
/**
* Create $element from node object
* @deprecated use vue instead
*/
export class Nodes extends ComponentCreatorInterface {
/**
* Creates a small list item out of a node document
@ -41,7 +37,7 @@ export class Nodes extends ComponentCreatorInterface {
let $link = $('<a>')
.addClass('btn btn-outline-primary px-5 mb-auto btn-block js-load-next')
.attr('href', 'javascript:void(0);')
.click((e)=> {
.click((e)=> {
let $target = $(e.target);
$target.replaceWith(Nodes.createListOf$nodeItems(nodesLeftToRender, loadNext, loadNext));
})
@ -64,4 +60,4 @@ export class Nodes extends ComponentCreatorInterface {
static registerTemplate(node_type, klass) {
CREATE_NODE_ITEM_MAP[node_type] = klass;
}
}
}

View File

@ -1,9 +1,6 @@
import { prettyDate } from '../../utils/prettydate';
import { thenLoadImage, prettyDate } from '../utils';
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
/**
* @deprecated use vue instead
*/
export class NodesBase extends ComponentCreatorInterface {
static create$listItem(node) {
let nid = (node._id || node.objectID); // To support both mongo and elastic nodes
@ -22,7 +19,7 @@ export class NodesBase extends ComponentCreatorInterface {
}
else {
$(window).trigger('pillar:workStart');
pillar.utils.thenLoadImage(node.picture)
thenLoadImage(node.picture)
.fail(warnNoPicture)
.then((imgVariation) => {
let img = $('<img class="card-img-top">')

View File

@ -1,16 +1,10 @@
import { NodesBase } from "./NodesBase";
/**
* Create $element from a node of type post
* @deprecated use vue instead
*/
export class Posts extends NodesBase {
static create$item(post) {
let content = [];
let $title = $('<a>')
.attr('href', '/nodes/' + post._id + '/redir')
.attr('title', post.name)
.addClass('timeline-post-title')
let $title = $('<div>')
.addClass('h1 text-uppercase mt-4 mb-3')
.text(post.name);
content.push($title);
let $post = $('<div>')

View File

@ -1,12 +1,7 @@
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
/**
* Create $elements from user objects
* @deprecated use vue instead
*/
export class Users extends ComponentCreatorInterface {
static create$listItem(userDoc) {
let roles = userDoc.roles || [];
return $('<div>')
.addClass('users p-2 border-bottom')
.attr('data-user-id', userDoc._id || userDoc.objectID )
@ -18,11 +13,11 @@ export class Users extends ComponentCreatorInterface {
.text(userDoc.username),
$('<small>')
.addClass('d-block roles text-info')
.text(roles.join(', '))
.text(userDoc.roles.join(', '))
)
}
static canCreate(candidate) {
return !!candidate.username;
}
}
}

View File

@ -1,5 +1,122 @@
function thenLoadImage(imgId, size = 'm') {
return $.get('/api/files/' + imgId)
.then((resp)=> {
var show_variation = null;
if (typeof resp.variations != 'undefined') {
for (var variation of resp.variations) {
if (variation.size != size) continue;
show_variation = variation;
break;
}
}
if (show_variation == null) {
throw 'Image not found: ' + imgId + ' size: ' + size;
}
return show_variation;
})
}
function thenLoadVideoProgress(nodeId) {
return $.get('/api/users/video/' + nodeId + '/progress')
}
export { thenLoadVideoProgress };
function prettyDate(time, detail=false) {
/**
* time is anything Date can parse, and we return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
*/
let theDate = new Date(time);
if (!time || isNaN(theDate)) {
return
}
let pretty = '';
let now = new Date(Date.now()); // Easier to mock Date.now() in tests
let second_diff = Math.round((now - theDate) / 1000);
let day_diff = Math.round(second_diff / 86400); // seconds per day (60*60*24)
if ((day_diff < 0) && (theDate.getFullYear() !== now.getFullYear())) {
// "Jul 16, 2018"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
}
else if ((day_diff < -21) && (theDate.getFullYear() == now.getFullYear())) {
// "Jul 16"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
}
else if (day_diff < -7){
let week_count = Math.round(-day_diff / 7);
if (week_count == 1)
pretty = "in 1 week";
else
pretty = "in " + week_count +" weeks";
}
else if (day_diff < -1)
// "next Tuesday"
pretty = 'next ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
else if (day_diff === 0) {
if (second_diff < 0) {
let seconds = Math.abs(second_diff);
if (seconds < 10)
return 'just now';
if (seconds < 60)
return 'in ' + seconds +'s';
if (seconds < 120)
return 'in a minute';
if (seconds < 3600)
return 'in ' + Math.round(seconds / 60) + 'm';
if (seconds < 7200)
return 'in an hour';
if (seconds < 86400)
return 'in ' + Math.round(seconds / 3600) + 'h';
} else {
let seconds = second_diff;
if (seconds < 10)
return "just now";
if (seconds < 60)
return seconds + "s ago";
if (seconds < 120)
return "a minute ago";
if (seconds < 3600)
return Math.round(seconds / 60) + "m ago";
if (seconds < 7200)
return "an hour ago";
if (seconds < 86400)
return Math.round(seconds / 3600) + "h ago";
}
}
else if (day_diff == 1)
pretty = "yesterday";
else if (day_diff <= 7)
// "last Tuesday"
pretty = 'last ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
else if (day_diff <= 22) {
let week_count = Math.round(day_diff / 7);
if (week_count == 1)
pretty = "1 week ago";
else
pretty = week_count + " weeks ago";
}
else if (theDate.getFullYear() === now.getFullYear())
// "Jul 16"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
else
// "Jul 16", 2009
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
if (detail){
// "Tuesday at 04:20"
let paddedHour = ('00' + theDate.getUTCHours()).substr(-2);
let paddedMin = ('00' + theDate.getUTCMinutes()).substr(-2);
return pretty + ' at ' + paddedHour + ':' + paddedMin;
}
return pretty;
}
export { thenLoadImage, thenLoadVideoProgress, prettyDate };

View File

@ -1,45 +0,0 @@
export const UserEvents = {
USER_LOADED: 'user-loaded',
}
let currentUserEventBus = new Vue();
class User{
constructor(kwargs) {
this.user_id = kwargs['user_id'] || '';
this.username = kwargs['username'] || '';
this.full_name = kwargs['full_name'] || '';
this.avatar_url = kwargs['avatar_url'] || '';
this.email = kwargs['email'] || '';
this.capabilities = kwargs['capabilities'] || [];
this.badges_html = kwargs['badges_html'] || '';
this.is_authenticated = kwargs['is_authenticated'] || false;
}
/**
* """Returns True iff the user has one or more of the given capabilities."""
* @param {...String} args
*/
hasCap(...args) {
for(let cap of args) {
if (this.capabilities.indexOf(cap) != -1) return true;
}
return false;
}
}
let currentUser;
function initCurrentUser(kwargs){
currentUser = new User(kwargs);
currentUserEventBus.$emit(UserEvents.USER_LOADED, currentUser);
}
function getCurrentUser() {
return currentUser;
}
function updateCurrentUser(user) {
currentUser = user;
currentUserEventBus.$emit(UserEvents.USER_LOADED, currentUser);
}
export { getCurrentUser, initCurrentUser, updateCurrentUser, currentUserEventBus }

View File

@ -1,20 +0,0 @@
function thenLoadImage(imgId, size = 'm') {
return $.get('/api/files/' + imgId)
.then((resp)=> {
var show_variation = null;
if (typeof resp.variations != 'undefined') {
for (var variation of resp.variations) {
if (variation.size != size) continue;
show_variation = variation;
break;
}
}
if (show_variation == null) {
throw 'Image not found: ' + imgId + ' size: ' + size;
}
return show_variation;
})
}
export { thenLoadImage }

View File

@ -1,36 +1 @@
export { transformPlaceholder } from './placeholder'
export { prettyDate } from './prettydate'
export { getCurrentUser, initCurrentUser, updateCurrentUser, currentUserEventBus, UserEvents } from './currentuser'
export { thenLoadImage } from './files'
export function debounced(fn, delay=1000) {
let timerId;
return function (...args) {
if (timerId) {
clearTimeout(timerId);
}
timerId = setTimeout(() => {
fn(...args);
timerId = null;
}, delay);
}
}
/**
* Extracts error message from error of type String, Error or xhrError
* @param {*} err
* @returns {String}
*/
export function messageFromError(err){
if (typeof err === "string") {
// type String
return err;
} else if(typeof err.message === "string") {
// type Error
return err.message;
} else {
// type xhr probably
return xhrErrorResponseMessage(err);
}
}
export { transformPlaceholder } from './placeholder'

View File

@ -1,97 +0,0 @@
export function prettyDate(time, detail=false) {
/**
* time is anything Date can parse, and we return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
*/
let theDate = new Date(time);
if (!time || isNaN(theDate)) {
return
}
let pretty = '';
let now = new Date(Date.now()); // Easier to mock Date.now() in tests
let second_diff = Math.round((now - theDate) / 1000);
let day_diff = Math.round(second_diff / 86400); // seconds per day (60*60*24)
if ((day_diff < 0) && (theDate.getFullYear() !== now.getFullYear())) {
// "Jul 16, 2018"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
}
else if ((day_diff < -21) && (theDate.getFullYear() == now.getFullYear())) {
// "Jul 16"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
}
else if (day_diff < -7){
let week_count = Math.round(-day_diff / 7);
if (week_count == 1)
pretty = "in 1 week";
else
pretty = "in " + week_count +" weeks";
}
else if (day_diff < 0)
// "next Tuesday"
pretty = 'next ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
else if (day_diff === 0) {
if (second_diff < 0) {
let seconds = Math.abs(second_diff);
if (seconds < 10)
return 'just now';
if (seconds < 60)
return 'in ' + seconds +'s';
if (seconds < 120)
return 'in a minute';
if (seconds < 3600)
return 'in ' + Math.round(seconds / 60) + 'm';
if (seconds < 7200)
return 'in an hour';
if (seconds < 86400)
return 'in ' + Math.round(seconds / 3600) + 'h';
} else {
let seconds = second_diff;
if (seconds < 10)
return "just now";
if (seconds < 60)
return seconds + "s ago";
if (seconds < 120)
return "a minute ago";
if (seconds < 3600)
return Math.round(seconds / 60) + "m ago";
if (seconds < 7200)
return "an hour ago";
if (seconds < 86400)
return Math.round(seconds / 3600) + "h ago";
}
}
else if (day_diff == 1)
pretty = "yesterday";
else if (day_diff <= 7)
// "last Tuesday"
pretty = 'last ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
else if (day_diff <= 22) {
let week_count = Math.round(day_diff / 7);
if (week_count == 1)
pretty = "1 week ago";
else
pretty = week_count + " weeks ago";
}
else if (theDate.getFullYear() === now.getFullYear())
// "Jul 16"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
else
// "Jul 16", 2009
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
if (detail){
// "Tuesday at 04:20"
let paddedHour = ('00' + theDate.getUTCHours()).substr(-2);
let paddedMin = ('00' + theDate.getUTCMinutes()).substr(-2);
return pretty + ' at ' + paddedHour + ':' + paddedMin;
}
return pretty;
}

View File

@ -1,35 +0,0 @@
# Vue components
[Vue.js](https://vuejs.org/) is a javascript framework for writing interactive ui components.
Vue.js is packed into tutti.js, and hence available site wide.
### Absolute must read
- https://vuejs.org/v2/api/#Options-Data
- https://vuejs.org/v2/api/#v-bind
- https://vuejs.org/v2/api/#v-model
- https://vuejs.org/v2/guide/conditional.html
- https://vuejs.org/v2/guide/list.html#v-for-with-an-Object
- https://vuejs.org/v2/api/#vm-emit
- https://vuejs.org/v2/api/#v-on
### Styling and animation of components
- https://vuejs.org/v2/guide/class-and-style.html#Binding-HTML-Classes
- https://vuejs.org/v2/guide/transitions.html
### More advanced, but important topics
- https://vuejs.org/v2/api/#is
- https://vuejs.org/v2/guide/components-slots.html#Slot-Content
- https://vuejs.org/v2/guide/mixins.html
### Rule of thumbs
- [Have a dash in your component name](https://vuejs.org/v2/guide/components-registration.html#Component-Names)
- Have one prop binding per line in component templates.
~~~
// Good!
<my-component
:propA="propX"
:propB="propY"
/>
// Bad!
<my-component :propA="propX" :propB="propY"/>
~~~

View File

@ -1,52 +0,0 @@
const TEMPLATE = `
<div class='breadcrumbs' v-if="breadcrumbs.length">
<ul>
<li v-for="crumb in breadcrumbs">
<a :href="crumb.url" v-if="!crumb._self" @click.prevent="navigateToNode(crumb._id)">{{ crumb.name }}</a>
<span v-else>{{ crumb.name }}</span>
</li>
</ul>
</div>
`
Vue.component("node-breadcrumbs", {
template: TEMPLATE,
created() {
this.loadBreadcrumbs();
pillar.events.Nodes.onLoaded(event => {
this.nodeId = event.detail.nodeId;
});
},
props: {
nodeId: String,
},
data() { return {
breadcrumbs: [],
}},
watch: {
nodeId() {
this.loadBreadcrumbs();
},
},
methods: {
loadBreadcrumbs() {
// The node ID may not exist (when at project level, for example).
if (!this.nodeId) {
this.breadcrumbs = [];
return;
}
$.get(`/nodes/${this.nodeId}/breadcrumbs`)
.done(data => {
this.breadcrumbs = data.breadcrumbs;
})
.fail(error => {
toastr.error(xhrErrorResponseMessage(error), "Unable to load breadcrumbs");
})
;
},
navigateToNode(nodeId) {
this.$emit('navigate', nodeId);
},
},
});

View File

@ -1,120 +0,0 @@
import { thenGetFileDocument, getFileVariation } from '../../api/files'
import { UnitOfWorkTracker } from '../mixins/UnitOfWorkTracker'
const VALID_NAME_REGEXP = /[a-zA-Z0-9_\-]+/g;
const NON_VALID_NAME_REGEXP = /[^a-zA-Z0-9_\-]+/g;
const TEMPLATE = `
<div class="attachment"
:class="{error: !isSlugOk}"
>
<div class="thumbnail-container"
@click="$emit('insert', oid)"
title="Click to add to comment"
>
<i :class="thumbnailBackup"
v-show="!thumbnail"
/>
<img class="preview-thumbnail"
v-if="!!thumbnail"
:src="thumbnail"
width=50
height=50
/>
</div>
<input class="form-control"
title="Slug"
v-model="newSlug"
/>
<div class="actions">
<div class="action delete"
@click="$emit('delete', oid)"
>
<i class="pi-trash"/>
Delete
</div>
</div>
</div>
`;
Vue.component('comment-attachment-editor', {
template: TEMPLATE,
mixins: [UnitOfWorkTracker],
props: {
slug: String,
allSlugs: Array,
oid: String
},
data() {
return {
newSlug: this.slug,
thumbnail: '',
thumbnailBackup: 'pi-spin spin',
}
},
computed: {
isValidAttachmentName() {
let regexpMatch = this.slug.match(VALID_NAME_REGEXP);
return !!regexpMatch && regexpMatch.length === 1 && regexpMatch[0] === this.slug;
},
isUnique() {
let countOccurrences = 0;
for (let s of this.allSlugs) {
// Don't worry about unicode. isValidAttachmentName denies those anyway
if (s.toUpperCase() === this.slug.toUpperCase()) {
countOccurrences++;
}
}
return countOccurrences === 1;
},
isSlugOk() {
return this.isValidAttachmentName && this.isUnique;
}
},
watch: {
newSlug(newValue, oldValue) {
this.$emit('rename', newValue, this.oid);
},
isSlugOk(newValue, oldValue) {
this.$emit('validation', this.oid, newValue);
}
},
created() {
this.newSlug = this.makeSafeAttachmentString(this.slug);
this.$emit('validation', this.oid, this.isSlugOk);
this.unitOfWork(
thenGetFileDocument(this.oid)
.then((fileDoc) => {
let content_type = fileDoc.content_type
if (content_type.startsWith('image')) {
try {
let imgFile = getFileVariation(fileDoc, 's');
this.thumbnail = imgFile.link;
} catch (error) {
this.thumbnailBackup = 'pi-image';
}
} else if(content_type.startsWith('video')) {
this.thumbnailBackup = 'pi-video';
} else {
this.thumbnailBackup = 'pi-file';
}
})
);
},
methods: {
/**
* Replaces all spaces with underscore and removes all o
* @param {String} unsafe
* @returns {String}
*/
makeSafeAttachmentString(unsafe) {
let candidate = (unsafe);
let matchSpace = / /g;
candidate = candidate
.replace(matchSpace, '_')
.replace(NON_VALID_NAME_REGEXP, '')
return candidate || `${this.oid}`
}
}
});

Some files were not shown because too many files have changed in this diff Show More