Compare commits

..

21 Commits

Author SHA1 Message Date
b969854592 Prevent deleted users from logging in 2020-07-24 12:45:06 +02:00
4e21b41ba6 On node save, detect changes in the download attr
If a change is detected, mark the previous file as _deleted. This
does not delete any file on the system or database document.
2019-08-28 21:41:05 +02:00
db9cb09c68 Further tweaks to notifications layout 2019-07-02 14:23:50 +02:00
d424febfeb Improve comments parsing
As part of #108 - dillo_post now becomes post, and the title of a
post, or the parsed content of a comment are displayed.
2019-06-27 01:22:45 +02:00
defa5abd18 PEP8 formatting 2019-06-27 01:17:15 +02:00
26858f01b7 Update package-lock.json 2019-05-16 19:23:10 +02:00
3bb35d0ab8 Merge branch 'master' into dillo 2019-04-24 22:24:23 +02:00
38e4c7c937 Merge branch 'master' into dillo
# Conflicts:
#	pillar/api/nodes/__init__.py
2019-04-20 22:26:51 +02:00
312b0a276a Merge branch 'master' into dillo 2019-04-08 23:24:56 +02:00
32361a0e70 Merge branch 'master' into dillo 2019-04-01 18:53:28 +02:00
b26402412b UI: Vertically center badges under comment avatar. 2019-03-21 01:04:21 +01:00
d5f2996704 Remove package-lock.json 2019-03-20 14:19:36 +01:00
d1143bad3e Merge branch 'master' into dillo 2019-03-12 20:27:54 +01:00
c64e24d80d Merge branch 'master' into dillo 2019-03-12 14:28:00 +01:00
446d31d807 Merge branch 'master' into dillo 2019-03-11 19:24:01 +01:00
145d512aa7 UI: Fix emojis margin-top on node description utility. 2019-03-11 03:13:01 +01:00
bf63148852 CSS: Remove primary buttons gradient.
Doesn't always look nice, fallback to default bootstrap primary color instead.
2019-03-11 01:32:17 +01:00
812d911195 Merge branch 'master' into dillo 2019-02-20 23:26:04 +01:00
f0031d44b2 Merge branch 'master' into dillo 2019-02-03 15:51:22 +01:00
5660f4b606 Turn log warning message into debug 2019-02-03 15:50:48 +01:00
6b6a5310f8 Temp fixes for Dillo integration 2019-02-01 19:49:58 +01:00
87 changed files with 5170 additions and 3375 deletions

4
.gitignore vendored
View File

@ -13,11 +13,10 @@ config_local.py
/build
/.cache
/.pytest_cache/
*.egg-info/
/*.egg-info/
profile.stats
/dump/
/.eggs
/devdeps/pip-wheel-metadata/
/node_modules
/.sass-cache
@ -32,4 +31,3 @@ pillar/web/static/assets/js/vendor/video.min.js
pillar/web/static/storage/
pillar/web/static/uploads/
pillar/web/templates/
/poetry.lock

View File

@ -3,7 +3,7 @@ Pillar
This is the latest iteration on the Attract project. We are building a unified
framework called Pillar. Pillar will combine Blender Cloud and Attract. You
can see Pillar in action on the [Blender Cloud](https://cloud.blender.org).
can see Pillar in action on the [Blender Cloud](https://cloud.bender.org).
## Custom fonts
@ -25,16 +25,15 @@ Don't forget to Gulp!
## Installation
Dependencies are managed via [Poetry](https://poetry.eustace.io/).
Make sure your /data directory exists and is writable by the current user.
Alternatively, provide a `pillar/config_local.py` that changes the relevant
settings.
```
git clone git@git.blender.org:pillar-python-sdk.git ../pillar-python-sdk
pip install -U --user poetry
poetry install
pip install -e ../pillar-python-sdk
pip install -U -r requirements.txt
pip install -e .
```
## HDRi viewer

View File

@ -1,16 +0,0 @@
[tool.poetry]
name = "pillar-devdeps"
version = "1.0"
description = ""
authors = [
"Francesco Siddi <francesco@blender.org>",
"Pablo Vazquez <pablo@blender.studio>",
"Sybren Stüvel <sybren@blender.studio>",
]
[tool.poetry.dependencies]
python = "~3.6"
mypy = "^0.501"
pytest = "~4.4"
pytest-cov = "~2.7"
responses = "~0.10"

6835
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -25,18 +25,18 @@
"gulp-plumber": "1.2.0",
"gulp-pug": "4.0.1",
"gulp-rename": "1.4.0",
"gulp-sass": "4.1.0",
"gulp-sass": "4.0.1",
"gulp-sourcemaps": "2.6.4",
"gulp-uglify-es": "1.0.4",
"jest": "^24.8.0",
"jest": "23.6.0",
"minimist": "1.2.0",
"vinyl-buffer": "1.0.1",
"vinyl-source-stream": "2.0.0"
},
"dependencies": {
"bootstrap": "^4.3.1",
"bootstrap": "4.1.3",
"glob": "7.1.3",
"jquery": "^3.4.1",
"jquery": "3.3.1",
"natives": "^1.1.6",
"popper.js": "1.14.4",
"video.js": "7.2.2",

View File

@ -12,25 +12,10 @@ import typing
import os
import os.path
import pathlib
import warnings
# These warnings have to be suppressed before the first import.
# Eve is falling behind on Cerberus. See https://github.com/pyeve/eve/issues/1278
warnings.filterwarnings(
'ignore', category=DeprecationWarning,
message="Methods for type testing are deprecated, use TypeDefinition and the "
"'types_mapping'-property of a Validator-instance instead")
# Werkzeug deprecated Request.is_xhr, but it works fine with jQuery and we don't need a reminder
# every time a unit test is run.
warnings.filterwarnings('ignore', category=DeprecationWarning,
message="'Request.is_xhr' is deprecated as of version 0.13 and will be "
"removed in version 1.0.")
import jinja2
import flask
from eve import Eve
import flask
from flask import g, render_template, request
from flask_babel import Babel, gettext as _
from flask.templating import TemplateNotFound
@ -85,7 +70,7 @@ class BlinkerCompatibleEve(Eve):
class PillarServer(BlinkerCompatibleEve):
def __init__(self, app_root: str, **kwargs) -> None:
def __init__(self, app_root, **kwargs):
from .extension import PillarExtension
from celery import Celery
from flask_wtf.csrf import CSRFProtect
@ -492,7 +477,6 @@ class PillarServer(BlinkerCompatibleEve):
# Pillar-defined Celery task modules:
celery_task_modules = [
'pillar.celery.avatar',
'pillar.celery.badges',
'pillar.celery.email_tasks',
'pillar.celery.file_link_tasks',
@ -663,7 +647,7 @@ class PillarServer(BlinkerCompatibleEve):
return self.pillar_error_handler(error)
def handle_sdk_resource_invalid(self, error):
self.log.exception('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
self.log.info('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
# Raising a Werkzeug 422 exception doens't work, as Flask turns it into a 500.
return _('The submitted data could not be validated.'), 422
@ -811,7 +795,6 @@ class PillarServer(BlinkerCompatibleEve):
url = self.config['URLS'][resource]
path = '%s/%s' % (self.api_prefix, url)
with self.__fake_request_url_rule('POST', path):
return post_internal(resource, payl=payl, skip_validation=skip_validation)[:4]
@ -920,8 +903,7 @@ class PillarServer(BlinkerCompatibleEve):
yield ctx
def validator_for_resource(self,
resource_name: str) -> custom_field_validation.ValidateCustomFields:
def validator_for_resource(self, resource_name: str) -> custom_field_validation.ValidateCustomFields:
schema = self.config['DOMAIN'][resource_name]['schema']
validator = self.validator(schema, resource_name)
return validator

View File

@ -1,12 +1,22 @@
import logging
from html.parser import HTMLParser
from flask import request, current_app
import pillar.api.users.avatar
from pillar.api.utils import gravatar
from pillar.auth import current_user
log = logging.getLogger(__name__)
class CommentHTMLParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.data = []
def handle_data(self, data):
self.data.append(data)
def notification_parse(notification):
activities_collection = current_app.data.driver.db['activities']
activities_subscriptions_collection = \
@ -30,9 +40,14 @@ def notification_parse(notification):
object_type = 'comment'
object_name = ''
object_id = activity['object']
context_object_type = node['parent']['node_type']
# If node_type is 'dillo_post', just call it 'post'
node_type = 'post' if context_object_type.endswith('_post') else \
context_object_type
if node['parent']['user'] == current_user.user_id:
owner = "your {0}".format(node['parent']['node_type'])
owner = f"your {node_type}"
else:
parent_comment_user = users_collection.find_one(
{'_id': node['parent']['user']})
@ -40,10 +55,22 @@ def notification_parse(notification):
user_name = 'their'
else:
user_name = "{0}'s".format(parent_comment_user['username'])
owner = "{0} {1}".format(user_name, node['parent']['node_type'])
context_object_type = node['parent']['node_type']
context_object_name = owner
owner = f"{user_name} {node_type}"
context_object_name = f"{node['parent']['name'][:50]}..."
if context_object_type == 'comment':
# Parse the comment content, which might be HTML and extract
# some text from it.
parser = CommentHTMLParser()
# Trim the comment content to 50 chars, the parser will handle it
parser.feed(node['properties']['content'][:50])
try:
comment_content = parser.data[0]
except KeyError:
comment_content = '...'
# Trim the parsed text down to 15 charss
context_object_name = f"{comment_content[:50]}..."
context_object_id = activity['context_object']
if activity['verb'] == 'replied':
action = 'replied to'
@ -52,13 +79,15 @@ def notification_parse(notification):
else:
action = activity['verb']
action = f'{action} {owner}'
lookup = {
'user': current_user.user_id,
'context_object_type': 'node',
'context_object': context_object_id,
}
subscription = activities_subscriptions_collection.find_one(lookup)
if subscription and subscription['notifications']['web'] == True:
if subscription and subscription['notifications']['web'] is True:
is_subscribed = True
else:
is_subscribed = False
@ -68,7 +97,7 @@ def notification_parse(notification):
if actor:
parsed_actor = {
'username': actor['username'],
'avatar': pillar.api.users.avatar.url(actor)}
'avatar': gravatar(actor['email'])}
else:
parsed_actor = None
@ -91,14 +120,14 @@ def notification_parse(notification):
def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id):
subscriptions_collection = current_app.db('activities-subscriptions')
subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
lookup = {
'user': {"$ne": actor_user_id},
'context_object_type': context_object_type,
'context_object': context_object_id,
'is_subscribed': True,
}
return subscriptions_collection.find(lookup), subscriptions_collection.count_documents(lookup)
return subscriptions_collection.find(lookup)
def activity_subscribe(user_id, context_object_type, context_object_id):
@ -140,10 +169,10 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
:param object_id: object id, to be traced with object_type_id
"""
subscriptions, subscription_count = notification_get_subscriptions(
subscriptions = notification_get_subscriptions(
context_object_type, context_object_id, actor_user_id)
if subscription_count == 0:
if subscriptions.count() == 0:
return
info, status = register_activity(actor_user_id, verb, object_type, object_id,

View File

@ -257,10 +257,10 @@ def has_home_project(user_id):
"""Returns True iff the user has a home project."""
proj_coll = current_app.data.driver.db['projects']
return proj_coll.count_documents({'user': user_id, 'category': 'home', '_deleted': False}) > 0
return proj_coll.count({'user': user_id, 'category': 'home', '_deleted': False}) > 0
def get_home_project(user_id: ObjectId, projection=None) -> dict:
def get_home_project(user_id, projection=None):
"""Returns the home project"""
proj_coll = current_app.data.driver.db['projects']
@ -272,10 +272,10 @@ def is_home_project(project_id, user_id):
"""Returns True iff the given project exists and is the user's home project."""
proj_coll = current_app.data.driver.db['projects']
return proj_coll.count_documents({'_id': project_id,
'user': user_id,
'category': 'home',
'_deleted': False}) > 0
return proj_coll.count({'_id': project_id,
'user': user_id,
'category': 'home',
'_deleted': False}) > 0
def mark_node_updated(node_id):

View File

@ -104,7 +104,7 @@ def has_texture_node(proj, return_hdri=True):
if return_hdri:
node_types.append('group_hdri')
count = nodes_collection.count_documents(
count = nodes_collection.count(
{'node_type': {'$in': node_types},
'project': proj['_id'],
'parent': None})

View File

@ -13,10 +13,8 @@ from bson import tz_util
from rauth import OAuth2Session
from flask import Blueprint, request, jsonify, session
from requests.adapters import HTTPAdapter
import urllib3.util.retry
from pillar import current_app
from pillar.auth import get_blender_id_oauth_token
from pillar.api.utils import authentication, utcnow
from pillar.api.utils.authentication import find_user_in_db, upsert_user
@ -31,30 +29,6 @@ class LogoutUser(Exception):
"""
class Session(requests.Session):
"""Requests Session suitable for Blender ID communication."""
def __init__(self):
super().__init__()
retries = urllib3.util.retry.Retry(
total=10,
backoff_factor=0.05,
)
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount('https://', http_adapter)
self.mount('http://', http_adapter)
def authenticate(self):
"""Attach the current user's authentication token to the request."""
bid_token = get_blender_id_oauth_token()
if not bid_token:
raise TypeError('authenticate() requires current user to be logged in with Blender ID')
self.headers['Authorization'] = f'Bearer {bid_token}'
@blender_id.route('/store_scst', methods=['POST'])
def store_subclient_token():
"""Verifies & stores a user's subclient-specific token."""
@ -145,8 +119,12 @@ def validate_token(user_id, token, oauth_subclient_id):
url = urljoin(blender_id_endpoint, 'u/validate_token')
log.debug('POSTing to %r', url)
# Retry a few times when POSTing to BlenderID fails.
# Source: http://stackoverflow.com/a/15431343/875379
s = requests.Session()
s.mount(blender_id_endpoint, HTTPAdapter(max_retries=5))
# POST to Blender ID, handling errors as negative verification results.
s = Session()
try:
r = s.post(url, data=payload, timeout=5,
verify=current_app.config['TLS_CERT_FILE'])
@ -280,16 +258,6 @@ def fetch_blenderid_user() -> dict:
return payload
def avatar_url(blenderid_user_id: str) -> str:
"""Return the URL to the user's avatar on Blender ID.
This avatar should be downloaded, and not served from the Blender ID URL.
"""
bid_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'],
f'api/user/{blenderid_user_id}/avatar')
return bid_url
def setup_app(app, url_prefix):
app.register_api_blueprint(blender_id, url_prefix=url_prefix)

View File

@ -29,11 +29,7 @@ class ValidateCustomFields(Validator):
dict_valueschema = schema_prop['schema']
properties[prop] = self.convert_properties(properties[prop], dict_valueschema)
except KeyError:
# Cerberus 1.3 changed valueschema to valuesrules.
dict_valueschema = schema_prop.get('valuesrules') or \
schema_prop.get('valueschema')
if dict_valueschema is None:
raise KeyError(f"missing 'valuesrules' key in schema of property {prop}")
dict_valueschema = schema_prop['valueschema']
self.convert_dict_values(properties[prop], dict_valueschema)
elif prop_type == 'list':
@ -144,7 +140,7 @@ class ValidateCustomFields(Validator):
if not value:
self._error(field, "Value is required once the document was created")
def _check_with_iprange(self, field_name: str, value: str):
def _validator_iprange(self, field_name: str, value: str):
"""Ensure the field contains a valid IP address.
Supports both IPv6 and IPv4 ranges. Requires the IPy module.
@ -181,12 +177,12 @@ if __name__ == '__main__':
v = ValidateCustomFields()
v.schema = {
'foo': {'type': 'string', 'check_with': 'markdown'},
'foo': {'type': 'string', 'validator': 'markdown'},
'foo_html': {'type': 'string'},
'nested': {
'type': 'dict',
'schema': {
'bar': {'type': 'string', 'check_with': 'markdown'},
'bar': {'type': 'string', 'validator': 'markdown'},
'bar_html': {'type': 'string'},
}
}

View File

@ -125,25 +125,6 @@ users_schema = {
'type': 'dict',
'allow_unknown': True,
},
'avatar': {
'type': 'dict',
'schema': {
'file': {
'type': 'objectid',
'data_relation': {
'resource': 'files',
'field': '_id',
},
},
# For only downloading when things really changed:
'last_downloaded_url': {
'type': 'string',
},
'last_modified': {
'type': 'string',
},
},
},
# Node-specific information for this user.
'nodes': {
@ -154,8 +135,8 @@ users_schema = {
'type': 'dict',
# Keyed by Node ID of the video asset. MongoDB doesn't support using
# ObjectIds as key, so we cast them to string instead.
'keysrules': {'type': 'string'},
'valuesrules': {
'keyschema': {'type': 'string'},
'valueschema': {
'type': 'dict',
'schema': {
'progress_in_sec': {'type': 'float', 'min': 0},
@ -275,7 +256,7 @@ organizations_schema = {
'start': {'type': 'binary', 'required': True},
'end': {'type': 'binary', 'required': True},
'prefix': {'type': 'integer', 'required': True},
'human': {'type': 'string', 'required': True, 'check_with': 'iprange'},
'human': {'type': 'string', 'required': True, 'validator': 'iprange'},
}
},
},

View File

@ -470,7 +470,7 @@ def before_returning_files(response):
ensure_valid_link(item)
def ensure_valid_link(response: dict) -> None:
def ensure_valid_link(response):
"""Ensures the file item has valid file links using generate_link(...)."""
# Log to function-specific logger, so we can easily turn it off.
@ -495,13 +495,12 @@ def ensure_valid_link(response: dict) -> None:
generate_all_links(response, now)
def generate_all_links(response: dict, now: datetime.datetime) -> None:
def generate_all_links(response, now):
"""Generate a new link for the file and all its variations.
:param response: the file document that should be updated.
:param now: datetime that reflects 'now', for consistent expiry generation.
"""
assert isinstance(response, dict), f'response must be dict, is {response!r}'
project_id = str(
response['project']) if 'project' in response else None
@ -566,10 +565,13 @@ def on_pre_get_files(_, lookup):
lookup_expired = lookup.copy()
lookup_expired['link_expires'] = {'$lte': now}
cursor, _ = current_app.data.find('files', parsed_req, lookup_expired, perform_count=False)
for idx, file_doc in enumerate(cursor):
if idx == 0:
log.debug('Updating expired links for files that matched lookup %s', lookup_expired)
cursor = current_app.data.find('files', parsed_req, lookup_expired)
if cursor.count() == 0:
return
log.debug('Updating expired links for %d files that matched lookup %s',
cursor.count(), lookup_expired)
for file_doc in cursor:
# log.debug('Updating expired links for file %r.', file_doc['_id'])
generate_all_links(file_doc, now)
@ -593,14 +595,15 @@ def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
'link_expires': {'$lt': expire_before},
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size)
refresh_count = 0
if to_refresh.count() == 0:
log.info('No links to refresh.')
return
for file_doc in to_refresh:
log.debug('Refreshing links for file %s', file_doc['_id'])
generate_all_links(file_doc, now)
refresh_count += 1
if refresh_count:
log.info('Refreshed %i links', refresh_count)
log.info('Refreshed %i links', min(chunk_size, to_refresh.count()))
def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
@ -618,13 +621,14 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
my_log.info('Limiting to links that expire before %s', expire_before)
base_query = {'backend': backend_name, '_deleted': {'$ne': True}}
to_refresh_query = {
'$or': [{'link_expires': None, **base_query},
{'link_expires': {'$lt': expire_before}, **base_query},
{'link': None, **base_query}]
}
to_refresh = files_collection.find(
{'$or': [{'link_expires': None, **base_query},
{'link_expires': {'$lt': expire_before}, **base_query},
{'link': None, **base_query}]
}).sort([('link_expires', pymongo.ASCENDING)]).limit(
chunk_size).batch_size(5)
document_count = files_collection.count_documents(to_refresh_query)
document_count = to_refresh.count()
if document_count == 0:
my_log.info('No links to refresh.')
return
@ -635,11 +639,6 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
else:
my_log.info('Found %d documents to refresh, chunk size=%d', document_count, chunk_size)
to_refresh = files_collection.find(to_refresh_query)\
.sort([('link_expires', pymongo.ASCENDING)])\
.limit(chunk_size)\
.batch_size(5)
refreshed = 0
report_chunks = min(max(5, document_count // 25), 100)
for file_doc in to_refresh:
@ -650,7 +649,7 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
my_log.debug('Skipping file %s, it has no project.', file_id)
continue
count = proj_coll.count_documents({'_id': project_id, '$or': [
count = proj_coll.count({'_id': project_id, '$or': [
{'_deleted': {'$exists': False}},
{'_deleted': False},
]})
@ -821,10 +820,6 @@ def stream_to_storage(project_id: str):
local_file = uploaded_file.stream
result = upload_and_process(local_file, uploaded_file, project_id)
# Local processing is done, we can close the local file so it is removed.
local_file.close()
resp = jsonify(result)
resp.status_code = result['status_code']
add_access_control_headers(resp)
@ -833,9 +828,7 @@ def stream_to_storage(project_id: str):
def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
uploaded_file: werkzeug.datastructures.FileStorage,
project_id: str,
*,
may_process_file=True) -> dict:
project_id: str):
# Figure out the file size, as we need to pass this in explicitly to GCloud.
# Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
# supported by a BytesIO object (even though it does have a fileno
@ -862,15 +855,18 @@ def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
'size=%i as "queued_for_processing"',
file_id, internal_fname, file_size)
update_file_doc(file_id,
status='queued_for_processing' if may_process_file else 'complete',
status='queued_for_processing',
file_path=internal_fname,
length=blob.size,
content_type=uploaded_file.mimetype)
if may_process_file:
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
internal_fname, blob.size)
process_file(bucket, file_id, local_file)
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
internal_fname, blob.size)
process_file(bucket, file_id, local_file)
# Local processing is done, we can close the local file so it is removed.
if local_file is not None:
local_file.close()
log.debug('Handled uploaded file id=%s, fname=%s, size=%i, status=%i',
file_id, internal_fname, blob.size, status)
@ -984,50 +980,7 @@ def compute_aggregate_length_items(file_docs):
compute_aggregate_length(file_doc)
def get_file_url(file_id: ObjectId, variation='') -> str:
"""Return the URL of a file in storage.
Note that this function is cached, see setup_app().
:param file_id: the ID of the file
:param variation: if non-empty, indicates the variation of of the file
to return the URL for; if empty, returns the URL of the original.
:return: the URL, or an empty string if the file/variation does not exist.
"""
file_coll = current_app.db('files')
db_file = file_coll.find_one({'_id': file_id})
if not db_file:
return ''
ensure_valid_link(db_file)
if variation:
variations = file_doc.get('variations', ())
for file_var in variations:
if file_var['size'] == variation:
return file_var['link']
return ''
return db_file['link']
def update_file_doc(file_id, **updates):
files = current_app.data.driver.db['files']
res = files.update_one({'_id': ObjectId(file_id)},
{'$set': updates})
log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
file_id, updates, res.matched_count, res.modified_count)
return res
def setup_app(app, url_prefix):
global get_file_url
cached = app.cache.memoize(timeout=10)
get_file_url = cached(get_file_url)
app.on_pre_GET_files += on_pre_get_files
app.on_fetched_item_files += before_returning_file
@ -1038,3 +991,12 @@ def setup_app(app, url_prefix):
app.on_insert_files += compute_aggregate_length_items
app.register_api_blueprint(file_storage, url_prefix=url_prefix)
def update_file_doc(file_id, **updates):
files = current_app.data.driver.db['files']
res = files.update_one({'_id': ObjectId(file_id)},
{'$set': updates})
log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
file_id, updates, res.matched_count, res.modified_count)
return res

View File

@ -11,11 +11,12 @@ ATTACHMENT_SLUG_REGEX = r'[a-zA-Z0-9_\-]+'
attachments_embedded_schema = {
'type': 'dict',
'keysrules': {
# TODO: will be renamed to 'keyschema' in Cerberus 1.0
'keyschema': {
'type': 'string',
'regex': '^%s$' % ATTACHMENT_SLUG_REGEX,
},
'valuesrules': {
'valueschema': {
'type': 'dict',
'schema': {
'oid': {

View File

@ -13,7 +13,8 @@ from pillar.web.utils import pretty_date
log = logging.getLogger(__name__)
blueprint = Blueprint('nodes_api', __name__)
ROLES_FOR_SHARING = ROLES_FOR_COMMENTING = {'subscriber', 'demo'}
# TODO(fsiddi) Propose changes to make commenting roles a configuration value.
ROLES_FOR_SHARING = ROLES_FOR_COMMENTING = set()
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])

View File

@ -1,6 +1,6 @@
from eve.methods import get
import pillar.api.users.avatar
from pillar.api.utils import gravatar
def for_node(node_id):
@ -25,9 +25,9 @@ def _user_info(user_id):
users, _, _, status, _ = get('users', {'_id': user_id})
if len(users['_items']) > 0:
user = users['_items'][0]
user['avatar'] = pillar.api.users.avatar.url(user)
user['gravatar'] = gravatar(user['email'])
public_fields = {'full_name', 'username', 'avatar'}
public_fields = {'full_name', 'username', 'gravatar'}
for field in list(user.keys()):
if field not in public_fields:
del user[field]

View File

@ -10,11 +10,10 @@ import werkzeug.exceptions as wz_exceptions
import pillar
from pillar import current_app, shortcodes
import pillar.api.users.avatar
from pillar.api.nodes.custom.comment import patch_comment
from pillar.api.utils import jsonify
from pillar.api.utils import jsonify, gravatar
from pillar.auth import current_user
import pillar.markdown
log = logging.getLogger(__name__)
@ -23,7 +22,7 @@ log = logging.getLogger(__name__)
class UserDO:
id: str
full_name: str
avatar_url: str
gravatar: str
badges_html: str
@ -80,10 +79,9 @@ class CommentTreeBuilder:
self.nbr_of_Comments: int = 0
def build(self) -> CommentTreeDO:
enriched_comments = self.child_comments(
self.node_id,
sort={'properties.rating_positive': pymongo.DESCENDING,
'_created': pymongo.DESCENDING})
enriched_comments = self.child_comments(self.node_id,
sort={'properties.rating_positive': pymongo.DESCENDING,
'_created': pymongo.DESCENDING})
project_id = self.get_project_id()
return CommentTreeDO(
node_id=self.node_id,
@ -183,10 +181,7 @@ def find_node_or_raise(node_id, *args):
return node_to_comment
def patch_node_comment(parent_id: bson.ObjectId,
comment_id: bson.ObjectId,
markdown_msg: str,
attachments: dict):
def patch_node_comment(parent_id: bson.ObjectId, comment_id: bson.ObjectId, markdown_msg: str, attachments: dict):
_, _ = find_parent_and_comment_or_raise(parent_id, comment_id)
patch = dict(
@ -219,9 +214,10 @@ def find_parent_and_comment_or_raise(parent_id, comment_id):
def validate_comment_parent_relation(comment, parent):
if comment['parent'] != parent['_id']:
log.warning('User %s tried to update comment with bad parent/comment pair.'
' parent_id: %s comment_id: %s',
current_user.objectid, parent['_id'], comment['_id'])
log.warning('User %s tried to update comment with bad parent/comment pair. parent_id: %s comment_id: %s',
current_user.objectid,
parent['_id'],
comment['_id'])
raise wz_exceptions.BadRequest()
@ -256,7 +252,7 @@ def to_comment_data_object(mongo_comment: dict) -> CommentDO:
user = UserDO(
id=str(mongo_comment['user']['_id']),
full_name=user_dict['full_name'],
avatar_url=pillar.api.users.avatar.url(user_dict),
gravatar=gravatar(user_dict['email']),
badges_html=user_dict.get('badges', {}).get('html', '')
)
html = _get_markdowned_html(mongo_comment['properties'], 'content')

View File

@ -69,6 +69,22 @@ def before_replacing_node(item, original):
check_permissions('nodes', original, 'PUT')
update_file_name(item)
# XXX Dillo specific feature (for Graphicall)
if 'download' in original['properties']:
# Check if the file referenced in the download property was updated.
# If so, mark the old file as deleted. A cronjob will take care of
# removing the actual file based on the _delete status of file docs.
original_file_id = original['properties']['download']
new_file_id = item['properties']['download']
if original_file_id == new_file_id:
return
# Mark the original file as _deleted
files = current_app.data.driver.db['files']
files.update_one({'_id': original_file_id}, {'$set': {'_deleted': True}})
log.info('Marking file %s as _deleted' % original_file_id)
def after_replacing_node(item, original):
"""Push an update to the Algolia index when a node item is updated. If the

View File

@ -153,7 +153,7 @@ class OrgManager:
org_coll = current_app.db('organizations')
users_coll = current_app.db('users')
if users_coll.count_documents({'_id': user_id}) == 0:
if users_coll.count({'_id': user_id}) == 0:
raise ValueError('User not found')
self._log.info('Updating organization %s, setting admin user to %s', org_id, user_id)
@ -189,7 +189,7 @@ class OrgManager:
if user_doc is not None:
user_id = user_doc['_id']
if user_id and not users_coll.count_documents({'_id': user_id}):
if user_id and not users_coll.count({'_id': user_id}):
raise wz_exceptions.UnprocessableEntity('User does not exist')
self._log.info('Removing user %s / %s from organization %s', user_id, email, org_id)
@ -374,7 +374,7 @@ class OrgManager:
member_ids = [str2id(uid) for uid in member_sting_ids]
users_coll = current_app.db('users')
users = users_coll.find({'_id': {'$in': member_ids}},
projection={'_id': 1, 'full_name': 1, 'email': 1, 'avatar': 1})
projection={'_id': 1, 'full_name': 1, 'email': 1})
return list(users)
def user_has_organizations(self, user_id: bson.ObjectId) -> bool:
@ -385,7 +385,7 @@ class OrgManager:
org_coll = current_app.db('organizations')
org_count = org_coll.count_documents({'$or': [
org_count = org_coll.count({'$or': [
{'admin_uid': user_id},
{'members': user_id}
]})
@ -396,7 +396,7 @@ class OrgManager:
"""Return True iff the email is an unknown member of some org."""
org_coll = current_app.db('organizations')
org_count = org_coll.count_documents({'unknown_members': member_email})
org_count = org_coll.count({'unknown_members': member_email})
return bool(org_count)
def roles_for_ip_address(self, remote_addr: str) -> typing.Set[str]:

View File

@ -194,7 +194,7 @@ class OrganizationPatchHandler(patch_handler.AbstractPatchHandler):
self.log.info('User %s edits Organization %s: %s', current_user_id, org_id, update)
validator = current_app.validator_for_resource('organizations')
if not validator.validate_update(update, org_id, persisted_document={}):
if not validator.validate_update(update, org_id):
resp = jsonify({
'_errors': validator.errors,
'_message': ', '.join(f'{field}: {error}'

View File

@ -25,11 +25,8 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
# Move the files first. Since this requires API calls to an external
# service, this is more likely to go wrong than moving the nodes.
query = {'project': pid_from}
to_move = files_coll.find(query, projection={'_id': 1})
to_move_count = files_coll.count_documents(query)
log.info('Moving %d files to project %s', to_move_count, pid_to)
to_move = files_coll.find({'project': pid_from}, projection={'_id': 1})
log.info('Moving %d files to project %s', to_move.count(), pid_to)
for file_doc in to_move:
fid = file_doc['_id']
log.debug('moving file %s to project %s', fid, pid_to)
@ -38,7 +35,7 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
# Mass-move the nodes.
etag = random_etag()
result = nodes_coll.update_many(
query,
{'project': pid_from},
{'$set': {'project': pid_to,
'_etag': etag,
'_updated': utcnow(),

View File

@ -5,7 +5,6 @@ from bson import ObjectId
from flask import Blueprint, request, current_app, make_response, url_for
from werkzeug import exceptions as wz_exceptions
import pillar.api.users.avatar
from pillar.api.utils import authorization, jsonify, str2id
from pillar.api.utils import mongo
from pillar.api.utils.authorization import require_login, check_permissions
@ -55,13 +54,10 @@ def project_manage_users():
project = projects_collection.find_one({'_id': ObjectId(project_id)})
admin_group_id = project['permissions']['groups'][0]['group']
users = list(users_collection.find(
users = users_collection.find(
{'groups': {'$in': [admin_group_id]}},
{'username': 1, 'email': 1, 'full_name': 1, 'avatar': 1}))
for user in users:
user['avatar_url'] = pillar.api.users.avatar.url(user)
user.pop('avatar', None)
return jsonify({'_status': 'OK', '_items': users})
{'username': 1, 'email': 1, 'full_name': 1})
return jsonify({'_status': 'OK', '_items': list(users)})
# The request is not a form, since it comes from the API sdk
data = json.loads(request.data)
@ -96,8 +92,8 @@ def project_manage_users():
action, current_user_id)
raise wz_exceptions.UnprocessableEntity()
users_collection.update_one({'_id': target_user_id},
{operation: {'groups': admin_group['_id']}})
users_collection.update({'_id': target_user_id},
{operation: {'groups': admin_group['_id']}})
user = users_collection.find_one({'_id': target_user_id},
{'username': 1, 'email': 1,
@ -145,3 +141,5 @@ def get_allowed_methods(project_id=None, node_type=None):
resp.status_code = 204
return resp

View File

@ -198,9 +198,8 @@ def put_project(project: dict):
result, _, _, status_code = current_app.put_internal('projects', proj_no_none, _id=pid)
if status_code != 200:
message = f"Can't update project {pid}, status {status_code} with issues: {result}"
log.error(message)
raise ValueError(message)
raise ValueError(f"Can't update project {pid}, "
f"status {status_code} with issues: {result}")
def storage(project_id: ObjectId) -> file_storage_backends.Bucket:

View File

@ -91,7 +91,7 @@ class SearchHelper:
def has_more(self, continue_from: datetime) -> bool:
nodes_coll = current_app.db('nodes')
result = nodes_coll.count_documents(self._match(continue_from))
result = nodes_coll.count(self._match(continue_from))
return bool(result)

View File

@ -61,9 +61,6 @@ def _update_search_user_changed_role(sender, user: dict):
def setup_app(app, api_prefix):
from pillar.api import service
from . import patch
patch.setup_app(app, url_prefix=api_prefix)
app.on_pre_GET_users += hooks.check_user_access
app.on_post_GET_users += hooks.post_GET_user

View File

@ -1,159 +0,0 @@
import functools
import io
import logging
import mimetypes
import typing
from bson import ObjectId
from eve.methods.get import getitem_internal
import flask
from pillar import current_app
from pillar.api import blender_id
from pillar.api.blender_cloud import home_project
import pillar.api.file_storage
from werkzeug.datastructures import FileStorage
log = logging.getLogger(__name__)
DEFAULT_AVATAR = 'assets/img/default_user_avatar.png'
def url(user: dict) -> str:
"""Return the avatar URL for this user.
:param user: dictionary from the MongoDB 'users' collection.
"""
assert isinstance(user, dict), f'user must be dict, not {type(user)}'
avatar_id = user.get('avatar', {}).get('file')
if not avatar_id:
return _default_avatar()
# The file may not exist, in which case we get an empty string back.
return pillar.api.file_storage.get_file_url(avatar_id) or _default_avatar()
@functools.lru_cache(maxsize=1)
def _default_avatar() -> str:
"""Return the URL path of the default avatar.
Doesn't change after the app has started, so we just cache it.
"""
return flask.url_for('static_pillar', filename=DEFAULT_AVATAR)
def _extension_for_mime(mime_type: str) -> str:
# Take the longest extension. I'd rather have '.jpeg' than the weird '.jpe'.
extensions: typing.List[str] = mimetypes.guess_all_extensions(mime_type)
try:
return max(extensions, key=len)
except ValueError:
# Raised when extensions is empty, e.g. when the mime type is unknown.
return ''
def _get_file_link(file_id: ObjectId) -> str:
# Get the file document via Eve to make it update the link.
file_doc, _, _, status = getitem_internal('files', _id=file_id)
assert status == 200
return file_doc['link']
def sync_avatar(user_id: ObjectId) -> str:
"""Fetch the user's avatar from Blender ID and save to storage.
Errors are logged but do not raise an exception.
:return: the link to the avatar, or '' if it was not processed.
"""
users_coll = current_app.db('users')
db_user = users_coll.find_one({'_id': user_id})
old_avatar_info = db_user.get('avatar', {})
if isinstance(old_avatar_info, ObjectId):
old_avatar_info = {'file': old_avatar_info}
home_proj = home_project.get_home_project(user_id)
if not home_project:
log.error('Home project of user %s does not exist, unable to store avatar', user_id)
return ''
bid_userid = blender_id.get_user_blenderid(db_user)
if not bid_userid:
log.error('User %s has no Blender ID user-id, unable to fetch avatar', user_id)
return ''
avatar_url = blender_id.avatar_url(bid_userid)
bid_session = blender_id.Session()
# Avoid re-downloading the same avatar.
request_headers = {}
if avatar_url == old_avatar_info.get('last_downloaded_url') and \
old_avatar_info.get('last_modified'):
request_headers['If-Modified-Since'] = old_avatar_info.get('last_modified')
log.info('Downloading avatar for user %s from %s', user_id, avatar_url)
resp = bid_session.get(avatar_url, headers=request_headers, allow_redirects=True)
if resp.status_code == 304:
# File was not modified, we can keep the old file.
log.debug('Avatar for user %s was not modified on Blender ID, not re-downloading', user_id)
return _get_file_link(old_avatar_info['file'])
resp.raise_for_status()
mime_type = resp.headers['Content-Type']
file_extension = _extension_for_mime(mime_type)
if not file_extension:
log.error('No file extension known for mime type %s, unable to handle avatar of user %s',
mime_type, user_id)
return ''
filename = f'avatar-{user_id}{file_extension}'
fake_local_file = io.BytesIO(resp.content)
fake_local_file.name = filename
# Act as if this file was just uploaded by the user, so we can reuse
# existing Pillar file-handling code.
log.debug("Uploading avatar for user %s to storage", user_id)
uploaded_file = FileStorage(
stream=fake_local_file,
filename=filename,
headers=resp.headers,
content_type=mime_type,
content_length=resp.headers['Content-Length'],
)
with pillar.auth.temporary_user(db_user):
upload_data = pillar.api.file_storage.upload_and_process(
fake_local_file,
uploaded_file,
str(home_proj['_id']),
# Disallow image processing, as it's a tiny file anyway and
# we'll just serve the original.
may_process_file=False,
)
file_id = ObjectId(upload_data['file_id'])
avatar_info = {
'file': file_id,
'last_downloaded_url': resp.url,
'last_modified': resp.headers.get('Last-Modified'),
}
# Update the user to store the reference to their avatar.
old_avatar_file_id = old_avatar_info.get('file')
update_result = users_coll.update_one({'_id': user_id},
{'$set': {'avatar': avatar_info}})
if update_result.matched_count == 1:
log.debug('Updated avatar for user ID %s to file %s', user_id, file_id)
else:
log.warning('Matched %d users while setting avatar for user ID %s to file %s',
update_result.matched_count, user_id, file_id)
if old_avatar_file_id:
current_app.delete_internal('files', _id=old_avatar_file_id)
return _get_file_link(file_id)

View File

@ -1,12 +1,13 @@
import copy
import json
import bson
from eve.utils import parse_request
from werkzeug import exceptions as wz_exceptions
from pillar import current_app
from pillar.api.users.routes import log
import pillar.api.users.avatar
from pillar.api.utils.authorization import user_has_role
import pillar.auth
USER_EDITABLE_FIELDS = {'full_name', 'username', 'email', 'settings'}
@ -125,7 +126,7 @@ def check_put_access(request, lookup):
raise wz_exceptions.Forbidden()
def after_fetching_user(user: dict) -> None:
def after_fetching_user(user):
# Deny access to auth block; authentication stuff is managed by
# custom end-points.
user.pop('auth', None)

View File

@ -1,45 +0,0 @@
"""User patching support."""
import logging
import bson
from flask import Blueprint
import werkzeug.exceptions as wz_exceptions
from pillar import current_app
from pillar.auth import current_user
from pillar.api.utils import authorization, jsonify, remove_private_keys
from pillar.api import patch_handler
log = logging.getLogger(__name__)
patch_api_blueprint = Blueprint('users.patch', __name__)
class UserPatchHandler(patch_handler.AbstractPatchHandler):
item_name = 'user'
@authorization.require_login()
def patch_set_username(self, user_id: bson.ObjectId, patch: dict):
"""Updates a user's username."""
if user_id != current_user.user_id:
log.info('User %s tried to change username of user %s',
current_user.user_id, user_id)
raise wz_exceptions.Forbidden('You may only change your own username')
new_username = patch['username']
log.info('User %s uses PATCH to set username to %r', current_user.user_id, new_username)
users_coll = current_app.db('users')
db_user = users_coll.find_one({'_id': user_id})
db_user['username'] = new_username
# Save via Eve to check the schema and trigger update hooks.
response, _, _, status = current_app.put_internal(
'users', remove_private_keys(db_user), _id=user_id)
return jsonify(response), status
def setup_app(app, url_prefix):
UserPatchHandler(patch_api_blueprint)
app.register_api_blueprint(patch_api_blueprint, url_prefix=url_prefix)

View File

@ -8,7 +8,6 @@ import logging
import random
import typing
import urllib.request, urllib.parse, urllib.error
import warnings
import bson.objectid
import bson.tz_util
@ -187,16 +186,6 @@ def str2id(document_id: str) -> bson.ObjectId:
def gravatar(email: str, size=64) -> typing.Optional[str]:
"""Deprecated: return the Gravatar URL.
.. deprecated::
Use of Gravatar is deprecated, in favour of our self-hosted avatars.
See pillar.api.users.avatar.url(user).
"""
warnings.warn('pillar.api.utils.gravatar() is deprecated, '
'use pillar.api.users.avatar.url() instead',
category=DeprecationWarning)
if email is None:
return None

View File

@ -169,6 +169,8 @@ def validate_this_token(token, oauth_subclient=None):
# Check the users to see if there is one with this Blender ID token.
db_token = find_token(token, oauth_subclient)
if not db_token:
log.debug('Token %r not found in our local database.', token)
# If no valid token is found in our local database, we issue a new
# request to the Blender ID server to verify the validity of the token
# passed via the HTTP header. We will get basic user info if the user
@ -375,10 +377,6 @@ def current_user():
def setup_app(app):
@app.before_request
def validate_token_at_each_request():
# Skip token validation if this is a static asset
# to avoid spamming Blender ID for no good reason
if request.path.startswith('/static/'):
return
validate_token()

View File

@ -1,14 +1,11 @@
"""Authentication code common to the web and api modules."""
import collections
import contextlib
import copy
import functools
import logging
import typing
import blinker
from bson import ObjectId
import bson
from flask import session, g
import flask_login
from werkzeug.local import LocalProxy
@ -34,22 +31,19 @@ class UserClass(flask_login.UserMixin):
def __init__(self, token: typing.Optional[str]):
# We store the Token instead of ID
self.id = token
self.auth_token = token
self.username: str = None
self.full_name: str = None
self.user_id: ObjectId = None
self.user_id: bson.ObjectId = None
self.objectid: str = None
self.gravatar: str = None
self.email: str = None
self.roles: typing.List[str] = []
self.groups: typing.List[str] = [] # NOTE: these are stringified object IDs.
self.group_ids: typing.List[ObjectId] = []
self.group_ids: typing.List[bson.ObjectId] = []
self.capabilities: typing.Set[str] = set()
self.nodes: dict = {} # see the 'nodes' key in eve_settings.py::user_schema.
self.badges_html: str = ''
# Stored when constructing a user from the database
self._db_user = {}
# Lazily evaluated
self._has_organizations: typing.Optional[bool] = None
@ -57,9 +51,10 @@ class UserClass(flask_login.UserMixin):
def construct(cls, token: str, db_user: dict) -> 'UserClass':
"""Constructs a new UserClass instance from a Mongo user document."""
from ..api import utils
user = cls(token)
user._db_user = copy.deepcopy(db_user)
user.user_id = db_user.get('_id')
user.roles = db_user.get('roles') or []
user.group_ids = db_user.get('groups') or []
@ -68,13 +63,14 @@ class UserClass(flask_login.UserMixin):
user.full_name = db_user.get('full_name') or ''
user.badges_html = db_user.get('badges', {}).get('html') or ''
# Be a little more specific than just db_user['nodes'] or db_user['avatar']
# Be a little more specific than just db_user['nodes']
user.nodes = {
'view_progress': db_user.get('nodes', {}).get('view_progress', {}),
}
# Derived properties
user.objectid = str(user.user_id or '')
user.gravatar = utils.gravatar(user.email)
user.groups = [str(g) for g in user.group_ids]
user.collect_capabilities()
@ -167,31 +163,6 @@ class UserClass(flask_login.UserMixin):
return bool(self._has_organizations)
def frontend_info(self) -> dict:
"""Return a dictionary of user info for injecting into the page."""
return {
'user_id': str(self.user_id),
'username': self.username,
'full_name': self.full_name,
'avatar_url': self.avatar_url,
'email': self.email,
'capabilities': list(self.capabilities),
'badges_html': self.badges_html,
'is_authenticated': self.is_authenticated,
}
@property
@functools.lru_cache(maxsize=1)
def avatar_url(self) -> str:
"""Return the Avatar image URL for this user.
:return: The avatar URL (the default one if the user has no avatar).
"""
import pillar.api.users.avatar
return pillar.api.users.avatar.url(self._db_user)
class AnonymousUser(flask_login.AnonymousUserMixin, UserClass):
def __init__(self):
@ -275,25 +246,6 @@ def logout_user():
g.current_user = AnonymousUser()
@contextlib.contextmanager
def temporary_user(db_user: dict):
"""Temporarily sets the given user as 'current user'.
Does not trigger login signals, as this is not a real login action.
"""
try:
actual_current_user = g.current_user
except AttributeError:
actual_current_user = AnonymousUser()
temp_user = UserClass.construct('', db_user)
try:
g.current_user = temp_user
yield
finally:
g.current_user = actual_current_user
def get_blender_id_oauth_token() -> str:
"""Returns the Blender ID auth token, or an empty string if there is none."""

View File

@ -1,29 +0,0 @@
"""Avatar synchronisation.
Note that this module can only be imported when an application context is
active. Best to late-import this in the functions where it's needed.
"""
import logging
from bson import ObjectId
import celery
from pillar import current_app
from pillar.api.users.avatar import sync_avatar
log = logging.getLogger(__name__)
@current_app.celery.task(bind=True, ignore_result=True, acks_late=True)
def sync_avatar_for_user(self: celery.Task, user_id: str):
"""Downloads the user's avatar from Blender ID."""
# WARNING: when changing the signature of this function, also change the
# self.retry() call below.
uid = ObjectId(user_id)
try:
sync_avatar(uid)
except (IOError, OSError):
log.exception('Error downloading Blender ID avatar for user %s, will retry later')
self.retry((user_id, ), countdown=current_app.config['AVATAR_DOWNLOAD_CELERY_RETRY'])

View File

@ -306,7 +306,7 @@ def purge_home_projects(go=False):
yield pid
continue
if users_coll.count_documents({'_id': uid, '_deleted': {'$ne': True}}) == 0:
if users_coll.find({'_id': uid, '_deleted': {'$ne': True}}).count() == 0:
log.info('Project %s has non-existing owner %s', pid, uid)
bad += 1
yield pid
@ -727,7 +727,7 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
to_visit.append((subdoc, definition['schema']))
continue
coerce = definition.get('coerce') # Eve < 0.8
validator = definition.get('check_with') or definition.get('validator') # Eve >= 0.8
validator = definition.get('validator') # Eve >= 0.8
if coerce != 'markdown' and validator != 'markdown':
continue
@ -823,7 +823,7 @@ def _find_orphan_files() -> typing.Set[bson.ObjectId]:
# Find all references by iterating through the project itself and every document that has a
# 'project' key set to this ObjectId.
db = current_app.db()
for coll_name in sorted(db.list_collection_names()):
for coll_name in sorted(db.collection_names(include_system_collections=False)):
if coll_name in ORPHAN_FINDER_SKIP_COLLECTIONS:
continue
@ -1296,9 +1296,9 @@ def fix_missing_activities_subscription_defaults(user=None, context_object=None,
lookup_is_subscribed['context_object'] = ObjectId(context_object)
lookup_notifications['context_object'] = ObjectId(context_object)
num_need_is_subscribed_update = subscriptions_collection.count_documents(lookup_is_subscribed)
num_need_is_subscribed_update = subscriptions_collection.count(lookup_is_subscribed)
log.info("Found %d documents that needs to be update 'is_subscribed'", num_need_is_subscribed_update)
num_need_notification_web_update = subscriptions_collection.count_documents(lookup_notifications)
num_need_notification_web_update = subscriptions_collection.count(lookup_notifications)
log.info("Found %d documents that needs to be update 'notifications.web'", num_need_notification_web_update)
if not go:
@ -1306,27 +1306,29 @@ def fix_missing_activities_subscription_defaults(user=None, context_object=None,
if num_need_is_subscribed_update > 0:
log.info("Updating 'is_subscribed'")
resp = subscriptions_collection.update_many(
resp = subscriptions_collection.update(
lookup_is_subscribed,
{
'$set': {'is_subscribed': True}
},
multi=True,
upsert=False
)
if resp.modified_count != num_need_is_subscribed_update:
if resp['nModified'] is not num_need_is_subscribed_update:
log.warning("Expected % documents to be update, was %d",
num_need_is_subscribed_update, resp['nModified'])
if num_need_notification_web_update > 0:
log.info("Updating 'notifications.web'")
resp = subscriptions_collection.update_many(
resp = subscriptions_collection.update(
lookup_notifications,
{
'$set': {'notifications.web': True}
},
multi=True,
upsert=False
)
if resp.modified_count != num_need_notification_web_update:
if resp['nModified'] is not num_need_notification_web_update:
log.warning("Expected % documents to be update, was %d",
num_need_notification_web_update, resp['nModified'])

View File

@ -165,6 +165,49 @@ def merge_project(src_proj_url, dest_proj_url):
log.info('Done moving.')
@manager_operations.command
def index_users_rebuild():
"""Clear users index, update settings and reindex all users."""
import concurrent.futures
from pillar.api.utils.algolia import algolia_index_user_save
users_index = current_app.algolia_index_users
if users_index is None:
log.error('Algolia is not configured properly, unable to do anything!')
return 1
log.info('Dropping existing index: %s', users_index)
users_index.clear_index()
index_users_update_settings()
db = current_app.db()
users = db['users'].find({'_deleted': {'$ne': True}})
user_count = users.count()
log.info('Reindexing all %i users', user_count)
real_current_app = current_app._get_current_object()._get_current_object()
def do_user(user):
with real_current_app.app_context():
algolia_index_user_save(user)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
future_to_user = {executor.submit(do_user, user): user
for user in users}
for idx, future in enumerate(concurrent.futures.as_completed(future_to_user)):
user = future_to_user[future]
user_ident = user.get('email') or user.get('_id')
try:
future.result()
except Exception:
log.exception('Error updating user %i/%i %s', idx + 1, user_count, user_ident)
else:
log.info('Updated user %i/%i %s', idx + 1, user_count, user_ident)
@manager_operations.command
def index_users_update_settings():
"""Configure indexing backend as required by the project"""
@ -191,7 +234,7 @@ def hash_auth_tokens():
tokens_coll = current_app.db('tokens')
query = {'token': {'$exists': True}}
cursor = tokens_coll.find(query, projection={'token': 1, '_id': 1})
log.info('Updating %d tokens', tokens_coll.count_documents(query))
log.info('Updating %d tokens', cursor.count())
for token_doc in cursor:
hashed_token = hash_auth_token(token_doc['token'])

View File

@ -217,8 +217,6 @@ CELERY_BEAT_SCHEDULE = {
# TODO(Sybren): A proper value should be determined after we actually have users with badges.
BLENDER_ID_BADGE_EXPIRY = datetime.timedelta(hours=4)
# How many times the Celery task for downloading an avatar is retried.
AVATAR_DOWNLOAD_CELERY_RETRY = 3
# Mapping from user role to capabilities obtained by users with that role.
USER_CAPABILITIES = defaultdict(**{

View File

@ -4,7 +4,7 @@ This is for user-generated stuff, like comments.
"""
import bleach
import commonmark
import CommonMark
from . import shortcodes
@ -44,7 +44,7 @@ ALLOWED_STYLES = [
def markdown(s: str) -> str:
commented_shortcodes = shortcodes.comment_shortcodes(s)
tainted_html = commonmark.commonmark(commented_shortcodes)
tainted_html = CommonMark.commonmark(commented_shortcodes)
# Create a Cleaner that supports parsing of bare links (see filters).
cleaner = bleach.Cleaner(tags=ALLOWED_TAGS,

View File

@ -1,6 +1,7 @@
"""Our custom Jinja filters and other template stuff."""
import functools
import json
import logging
import typing
import urllib.parse
@ -10,11 +11,10 @@ import flask_login
import jinja2.filters
import jinja2.utils
import werkzeug.exceptions as wz_exceptions
from werkzeug.local import LocalProxy
import pillarsdk
import pillar.api.utils
from pillar import auth
from pillar.api.utils import pretty_duration
from pillar.web.utils import pretty_date
from pillar.web.nodes.routes import url_for_node
@ -211,16 +211,25 @@ def do_yesno(value, arg=None):
return no
def do_json(some_object: typing.Any) -> str:
import pillar.auth
def user_to_dict(user: auth.UserClass) -> dict:
return dict(
user_id=str(user.user_id),
username=user.username,
full_name=user.full_name,
gravatar=user.gravatar,
email=user.email,
capabilities=list(user.capabilities),
badges_html=user.badges_html,
is_authenticated=user.is_authenticated
)
if isinstance(some_object, LocalProxy):
return do_json(some_object._get_current_object())
def do_json(some_object) -> str:
if isinstance(some_object, pillarsdk.Resource):
some_object = some_object.to_dict()
if isinstance(some_object, pillar.auth.UserClass):
some_object = some_object.frontend_info()
return pillar.api.utils.dumps(some_object)
if isinstance(some_object, auth.UserClass):
some_object = user_to_dict(some_object)
return json.dumps(some_object)
def setup_jinja_env(jinja_env, app_config: dict):

View File

@ -1,6 +1,5 @@
import logging
import urllib.parse
import warnings
from pillarsdk import Node
from flask import Blueprint
@ -8,6 +7,7 @@ from flask import current_app
from flask import render_template
from flask import redirect
from flask import request
from werkzeug.contrib.atom import AtomFeed
from pillar.flask_extra import ensure_schema
from pillar.web.utils import system_util
@ -91,11 +91,6 @@ def error_403():
@blueprint.route('/feeds/blogs.atom')
def feeds_blogs():
"""Global feed generator for latest blogposts across all projects"""
# Werkzeug deprecated their Atom feed. Tracked in https://developer.blender.org/T65274.
with warnings.catch_warnings():
from werkzeug.contrib.atom import AtomFeed
@current_app.cache.cached(60*5)
def render_page():
feed = AtomFeed('Blender Cloud - Latest updates',

View File

@ -19,19 +19,10 @@ def attachment_form_group_create(schema_prop):
def _attachment_build_single_field(schema_prop):
# 'keyschema' was renamed to 'keysrules' in Cerberus 1.3, but our data may still have the old
# names. Same for 'valueschema' and 'valuesrules'.
keysrules = schema_prop.get('keysrules') or schema_prop.get('keyschema')
if keysrules is None:
raise KeyError(f"missing 'keysrules' key in schema {schema_prop}")
valuesrules = schema_prop.get('valuesrules') or schema_prop.get('valueschema')
if valuesrules is None:
raise KeyError(f"missing 'valuesrules' key in schema {schema_prop}")
# Ugly hard-coded schema.
fake_schema = {
'slug': keysrules,
'oid': valuesrules['schema']['oid'],
'slug': schema_prop['keyschema'],
'oid': schema_prop['valueschema']['schema']['oid'],
}
file_select_form_group = build_file_select_form(fake_schema)
return file_select_form_group

View File

@ -53,7 +53,7 @@ def find_for_comment(project, node):
'_deleted': {'$ne': True}
}}, api=api)
except ResourceNotFound:
log.warning(
log.debug(
'url_for_node(node_id=%r): Unable to find parent node %r',
node['_id'], parent.parent)
raise ValueError('Unable to find parent node %r' % parent.parent)

View File

@ -50,6 +50,7 @@ def iter_node_properties(node_type):
@functools.lru_cache(maxsize=1)
def tag_choices() -> typing.List[typing.Tuple[str, str]]:
"""Return (value, label) tuples for the NODE_TAGS config setting."""
#TODO(fsiddi) consider allowing tags based on custom_properties in the project.
tags = current_app.config.get('NODE_TAGS') or []
return [(tag, tag.title()) for tag in tags] # (value, label) tuples
@ -70,9 +71,7 @@ def add_form_properties(form_class, node_type):
# Recursive call if detects a dict
field_type = schema_prop['type']
if prop_name == 'tags' and field_type == 'list':
field = SelectMultipleField(choices=tag_choices())
elif field_type == 'dict':
if field_type == 'dict':
assert prop_name == 'attachments'
field = attachments.attachment_form_group_create(schema_prop)
elif field_type == 'list':

View File

@ -6,8 +6,7 @@ from flask_login import current_user
import pillar.flask_extra
from pillar import current_app
import pillar.api.users.avatar
from pillar.api.utils import authorization, str2id, jsonify
from pillar.api.utils import authorization, str2id, gravatar, jsonify
from pillar.web.system_util import pillar_api
from pillarsdk import Organization, User
@ -48,7 +47,7 @@ def view_embed(organization_id: str):
members = om.org_members(organization.members)
for member in members:
member['avatar'] = pillar.api.users.avatar.url(member)
member['avatar'] = gravatar(member.get('email'))
member['_id'] = str(member['_id'])
admin_user = User.find(organization.admin_uid, api=api)

View File

@ -22,7 +22,6 @@ import werkzeug.exceptions as wz_exceptions
from pillar import current_app
from pillar.api.utils import utcnow
import pillar.api.users.avatar
from pillar.web import system_util
from pillar.web import utils
from pillar.web.nodes import finders
@ -110,6 +109,7 @@ def index():
return render_template(
'projects/index_dashboard.html',
gravatar=utils.gravatar(current_user.email, size=128),
projects_user=projects_user['_items'],
projects_deleted=projects_deleted['_items'],
projects_shared=projects_shared['_items'],
@ -402,6 +402,7 @@ def render_project(project, api, extra_context=None, template_name=None):
template_name = template_name or 'projects/home_index.html'
return render_template(
template_name,
gravatar=utils.gravatar(current_user.email, size=128),
project=project,
api=system_util.pillar_api(),
**extra_context)
@ -707,12 +708,15 @@ def sharing(project_url):
api = system_util.pillar_api()
# Fetch the project or 404
try:
project = Project.find_one({'where': {'url': project_url}}, api=api)
project = Project.find_one({
'where': '{"url" : "%s"}' % (project_url)}, api=api)
except ResourceNotFound:
return abort(404)
# Fetch users that are part of the admin group
users = project.get_users(api=api)
for user in users['_items']:
user['avatar'] = utils.gravatar(user['email'])
if request.method == 'POST':
user_id = request.form['user_id']
@ -722,14 +726,13 @@ def sharing(project_url):
user = project.add_user(user_id, api=api)
elif action == 'remove':
user = project.remove_user(user_id, api=api)
else:
raise wz_exceptions.BadRequest(f'invalid action {action}')
except ResourceNotFound:
log.info('/p/%s/edit/sharing: User %s not found', project_url, user_id)
return jsonify({'_status': 'ERROR',
'message': 'User %s not found' % user_id}), 404
user['avatar'] = pillar.api.users.avatar.url(user)
# Add gravatar to user
user['avatar'] = utils.gravatar(user['email'])
return jsonify(user)
utils.attach_project_pictures(project, api)

View File

@ -1,18 +1,13 @@
import json
import logging
import urllib.parse
from flask import Blueprint, flash, render_template
from flask_login import login_required
from flask_login import login_required, current_user
from werkzeug.exceptions import abort
from pillar import current_app
from pillar.api.utils import jsonify
import pillar.api.users.avatar
from pillar.auth import current_user
from pillar.web import system_util
from pillar.web.users import forms
from pillarsdk import File, User, exceptions as sdk_exceptions
from pillarsdk import User, exceptions as sdk_exceptions
log = logging.getLogger(__name__)
blueprint = Blueprint('settings', __name__)
@ -32,20 +27,14 @@ def profile():
if form.validate_on_submit():
try:
response = user.set_username(form.username.data, api=api)
log.info('updated username of %s: %s', current_user, response)
user.username = form.username.data
user.update(api=api)
flash("Profile updated", 'success')
except sdk_exceptions.ResourceInvalid as ex:
log.warning('unable to set username %s to %r: %s', current_user, form.username.data, ex)
message = json.loads(ex.content)
except sdk_exceptions.ResourceInvalid as e:
message = json.loads(e.content)
flash(message)
blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
blender_profile_url = urllib.parse.urljoin(blender_id_endpoint, 'settings/profile')
return render_template('users/settings/profile.html',
form=form, title='profile',
blender_profile_url=blender_profile_url)
return render_template('users/settings/profile.html', form=form, title='profile')
@blueprint.route('/roles')
@ -53,19 +42,3 @@ def profile():
def roles():
"""Show roles and capabilties of the current user."""
return render_template('users/settings/roles.html', title='roles')
@blueprint.route('/profile/sync-avatar', methods=['POST'])
@login_required
def sync_avatar():
"""Fetch the user's avatar from Blender ID and save to storage.
This is an API-like endpoint, in the sense that it returns JSON.
It's here in this file to have it close to the endpoint that
serves the only page that calls on this endpoint.
"""
new_url = pillar.api.users.avatar.sync_avatar(current_user.user_id)
if not new_url:
return jsonify({'_message': 'Your avatar could not be updated'})
return new_url

Binary file not shown.

Before

Width:  |  Height:  |  Size: 496 B

View File

@ -72,6 +72,9 @@ def oauth_callback(provider):
# Find or create user
user_info = {'id': oauth_user.id, 'email': oauth_user.email, 'full_name': ''}
db_user = find_user_in_db(user_info, provider=provider)
if '_deleted' in db_user and db_user['_deleted'] is True:
log.debug('User has been deleted and will not be logge in')
return redirect(next_after_login)
db_id, status = upsert_user(db_user)
# TODO(Sybren): If the user doesn't have any badges, but the access token

View File

@ -43,40 +43,11 @@ def attach_project_pictures(project, api):
This function should be moved in the API, attached to a new Project object.
"""
# When adding to the list of pictures dealt with here, make sure
# you update unattach_project_pictures() too.
project.picture_square = get_file(project.picture_square, api=api)
project.picture_header = get_file(project.picture_header, api=api)
project.picture_16_9 = get_file(project.picture_16_9, api=api)
def unattach_project_pictures(project: dict):
"""Reverts the operation of 'attach_project_pictures'.
This makes it possible to PUT the project again.
"""
def unattach(property_name: str):
picture_info = project.get(property_name, None)
if not picture_info:
project.pop(property_name, None)
return
if not isinstance(picture_info, dict):
# Assume it's already is an ID.
return
try:
picture_id = picture_info['_id']
project[property_name] = picture_id
except KeyError:
return
unattach('picture_square')
unattach('picture_header')
unattach('picture_16_9')
def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *,
api, header=True, square=True):
"""Attach file object to all projects in the list.
@ -136,16 +107,9 @@ def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *
def gravatar(email: str, size=64):
"""Deprecated: return the Gravatar URL.
.. deprecated::
Use of Gravatar is deprecated, in favour of our self-hosted avatars.
See pillar.api.users.avatar.url(user).
"""
import warnings
warnings.warn('pillar.web.utils.gravatar() is deprecated, '
'use pillar.api.users.avatar.url() instead',
category=DeprecationWarning, stacklevel=2)
warnings.warn("the pillar.web.gravatar function is deprecated; use hashlib instead",
DeprecationWarning, 2)
from pillar.api.utils import gravatar as api_gravatar
return api_gravatar(email, size)

View File

@ -1,64 +0,0 @@
[tool.poetry]
name = "pillar"
version = "2.0"
description = ""
authors = [
"Francesco Siddi <francesco@blender.org>",
"Pablo Vazquez <pablo@blender.studio>",
"Sybren Stüvel <sybren@blender.studio>",
]
[tool.poetry.scripts]
# Must be run after installing/updating:
translations = 'pillar.cli.translations:main'
[tool.poetry.dependencies]
python = "~3.6"
attrs = "~19"
algoliasearch = "~1"
bcrypt = "~3"
blinker = "~1.4"
bleach = "~3.1"
celery = {version = "~4.3",extras = ["redis"]}
cryptography = "2.7"
commonmark = "~0.9"
# These must match the version of ElasticSearch used:
elasticsearch = "~6.1"
elasticsearch-dsl = "~6.1"
Eve = "~0.9"
Flask = "~1.0"
Flask-Babel = "~0.12"
Flask-Caching = "~1.7"
Flask-DebugToolbar = "~0.10"
Flask-Script = "~2.0"
Flask-Login = "~0.4"
Flask-WTF = "~0.14"
gcloud = "~0.18"
google-apitools = "~0.5"
IPy = "~1.00"
MarkupSafe = "~1.1"
ndg-httpsclient = "~0.5"
Pillow = "~6.0"
python-dateutil = "~2.8"
rauth = "~0.7"
raven = {version = "~6.10",extras = ["flask"]}
redis = "~3.2"
shortcodes = "~2.5"
zencoder = "~0.6"
pillarsdk = {path = "../pillar-python-sdk"}
# Secondary requirements that weren't installed automatically:
idna = "~2.8"
[tool.poetry.dev-dependencies]
pillar-devdeps = {path = "./devdeps"}
[build-system]
requires = ["poetry==1.0","cryptography==2.7","setuptools==51.0.0","wheel==0.35.1"]
build-backend = "poetry.masonry.api"

17
requirements-dev.txt Normal file
View File

@ -0,0 +1,17 @@
-r requirements.txt
-r ../pillar-python-sdk/requirements-dev.txt
-e ../pillar # also works from parent project, like blender-cloud
# Development requirements
pytest==3.0.6
responses==0.5.1
pytest-cov==2.4.0
mock==2.0.0
mypy==0.501
# Secondary development requirements
cookies==2.2.1
coverage==4.3.4
pbr==2.0.0
py==1.4.32
typed-ast==1.0.2

76
requirements.txt Normal file
View File

@ -0,0 +1,76 @@
# Primary requirements
-r ../pillar-python-sdk/requirements.txt
attrs==18.2.0
algoliasearch==1.12.0
bcrypt==3.1.3
blinker==1.4
bleach==2.1.3
celery[redis]==4.2.1
CommonMark==0.7.2
elasticsearch==6.1.1
elasticsearch-dsl==6.1.0
Eve==0.8
Flask==1.0.2
Flask-Babel==0.11.2
Flask-Caching==1.4.0
Flask-DebugToolbar==0.10.1
Flask-Script==2.0.6
Flask-Login==0.4.1
Flask-WTF==0.14.2
gcloud==0.12.0
google-apitools==0.4.11
httplib2==0.9.2
IPy==0.83
MarkupSafe==0.23
ndg-httpsclient==0.4.0
Pillow==4.1.1
python-dateutil==2.5.3
rauth==0.7.3
raven[flask]==6.3.0
requests==2.13.0
redis==2.10.5
shortcodes==2.5.0
WebOb==1.5.0
wheel==0.29.0
zencoder==0.6.5
# Secondary requirements
amqp==2.3.2
asn1crypto==0.24.0
Babel==2.6.0
billiard==3.5.0.4
Cerberus==1.2
cffi==1.12.2
click==6.7
cryptography==2.6.1
Events==0.3
future==0.16.0
googleapis-common-protos==1.5.3
html5lib==1.0.1
idna==2.5
ipaddress==1.0.22
itsdangerous==0.24
Jinja2==2.10.1
kombu==4.2.1
oauth2client==4.1.2
oauthlib==2.1.0
olefile==0.45.1
protobuf==3.6.0
protorpc==0.12.0
pyasn1==0.4.4
pyasn1-modules==0.2.2
pycparser==2.19
pymongo==3.7.0
pyOpenSSL==16.2.0
pytz==2018.5
requests-oauthlib==1.0.0
rsa==3.4.2
simplejson==3.16.0
six==1.12.0
urllib3==1.22
vine==1.1.4
webencodings==0.5.1
Werkzeug==0.14.1
WTForms==2.2.1

77
setup.py Normal file
View File

@ -0,0 +1,77 @@
#!/usr/bin/env python
"""Setup file for testing, not for packaging/distribution."""
import setuptools
from setuptools.command.develop import develop
from setuptools.command.install import install
def translations_compile():
"""Compile any existent translation.
"""
from pillar import cli
cli.translations.compile()
class PostDevelopCommand(develop):
"""Post-installation for develop mode."""
def run(self):
super().run()
translations_compile()
class PostInstallCommand(install):
"""Post-installation for installation mode."""
def run(self):
super().run()
translations_compile()
setuptools.setup(
name='pillar',
version='2.0',
packages=setuptools.find_packages('.', exclude=['test']),
install_requires=[
'Flask>=0.12',
'Eve>=0.7.3',
'Flask-Caching>=1.4.0',
'Flask-Script>=2.0.5',
'Flask-Login>=0.3.2',
'Flask-OAuthlib>=0.9.3',
'Flask-WTF>=0.14.2',
'algoliasearch>=1.12.0',
# Limit the major version to the major version of ElasticSearch we're using.
'elasticsearch>=6.0.0,<7.0.0',
'elasticsearch_dsl>=6.0.0,<7.0.0',
'attrs>=16.2.0',
'bugsnag>=2.3.1',
'gcloud>=0.12.0',
'google-apitools>=0.4.11',
'MarkupSafe>=0.23',
'Pillow>=2.8.1',
'requests>=2.9.1',
'rsa>=3.3',
'shortcodes>=2.5', # 2.4.0 and earlier corrupted unicode
'zencoder>=0.6.5',
'bcrypt>=2.0.0',
'blinker>=1.4',
'pillarsdk',
],
tests_require=[
'pytest>=2.9.1',
'responses>=0.5.1',
'pytest-cov>=2.2.1',
'mock>=2.0.0',
],
entry_points = {'console_scripts': [
'translations = pillar.cli.translations:main',
]},
cmdclass={
'install': PostInstallCommand,
'develop': PostDevelopCommand,
},
zip_safe=False,
)

View File

@ -10,7 +10,7 @@ export class Posts extends NodesBase {
let $title = $('<a>')
.attr('href', '/nodes/' + post._id + '/redir')
.attr('title', post.name)
.addClass('timeline-post-title')
.addClass('h1 text-uppercase font-weight-bold d-block pt-5 pb-2')
.text(post.name);
content.push($title);
let $post = $('<div>')

View File

@ -1,14 +1,9 @@
export const UserEvents = {
USER_LOADED: 'user-loaded',
}
let currentUserEventBus = new Vue();
class User{
constructor(kwargs) {
this.user_id = kwargs['user_id'] || '';
this.username = kwargs['username'] || '';
this.full_name = kwargs['full_name'] || '';
this.avatar_url = kwargs['avatar_url'] || '';
this.gravatar = kwargs['gravatar'] || '';
this.email = kwargs['email'] || '';
this.capabilities = kwargs['capabilities'] || [];
this.badges_html = kwargs['badges_html'] || '';
@ -30,16 +25,10 @@ class User{
let currentUser;
function initCurrentUser(kwargs){
currentUser = new User(kwargs);
currentUserEventBus.$emit(UserEvents.USER_LOADED, currentUser);
}
function getCurrentUser() {
return currentUser;
}
function updateCurrentUser(user) {
currentUser = user;
currentUserEventBus.$emit(UserEvents.USER_LOADED, currentUser);
}
export { getCurrentUser, initCurrentUser, updateCurrentUser, currentUserEventBus }
export { getCurrentUser, initCurrentUser }

View File

@ -1,6 +1,6 @@
export { transformPlaceholder } from './placeholder'
export { prettyDate } from './prettydate'
export { getCurrentUser, initCurrentUser, updateCurrentUser, currentUserEventBus, UserEvents } from './currentuser'
export { getCurrentUser, initCurrentUser } from './currentuser'
export { thenLoadImage } from './files'

View File

@ -19,7 +19,6 @@ import { StatusFilter } from './table/rows/filter/StatusFilter'
import { TextFilter } from './table/rows/filter/TextFilter'
import { NameFilter } from './table/rows/filter/NameFilter'
import { UserAvatar } from './user/Avatar'
import './user/CurrentUserAvatar'
let mixins = {
UnitOfWorkTracker,

View File

@ -1,7 +1,7 @@
const TEMPLATE = `
<div class="user-avatar">
<img
:src="user.avatar_url"
:src="user.gravatar"
:alt="user.full_name">
</div>
`;

View File

@ -1,23 +0,0 @@
const TEMPLATE = `
<img class="user-avatar" :src="avatarUrl" alt="Your avatar">
`
export let CurrentUserAvatar = Vue.component("current-user-avatar", {
data: function() { return {
avatarUrl: "",
}},
template: TEMPLATE,
created: function() {
pillar.utils.currentUserEventBus.$on(pillar.utils.UserEvents.USER_LOADED, this.updateAvatarURL);
this.updateAvatarURL(pillar.utils.getCurrentUser());
},
methods: {
updateAvatarURL(user) {
if (typeof user === 'undefined') {
this.avatarUrl = '';
return;
}
this.avatarUrl = user.avatar_url;
},
},
});

View File

@ -1,39 +0,0 @@
// The <i> is given a fixed width so that the button doesn't resize when we change the icon.
const TEMPLATE = `
<button class="btn btn-outline-primary" type="button" @click="syncAvatar"
:disabled="isSyncing">
<i style="width: 2em; display: inline-block"
:class="{'pi-refresh': !isSyncing, 'pi-spin': isSyncing, spin: isSyncing}"></i>
Fetch Avatar from Blender ID
</button>
`
Vue.component("avatar-sync-button", {
template: TEMPLATE,
data() { return {
isSyncing: false,
}},
methods: {
syncAvatar() {
this.isSyncing = true;
$.ajax({
type: 'POST',
url: `/settings/profile/sync-avatar`,
})
.then(response => {
toastr.info("sync was OK");
let user = pillar.utils.getCurrentUser();
user.avatar_url = response;
pillar.utils.updateCurrentUser(user);
})
.catch(err => {
toastr.error(xhrErrorResponseMessage(err), "There was an error syncing your avatar");
})
.then(() => {
this.isSyncing = false;
})
},
},
});

View File

@ -1 +0,0 @@
export { AvatarSync } from './AvatarSync';

View File

@ -1,3 +1,5 @@
/**
* Store the number of unread notifications on load.
* That way, if the number got higher while the page was
@ -17,24 +19,8 @@ function clearNotificationIcon(){
}
/**
* Get notifications by fetching /notifications/ and update ul#notifications-list
*
* This is called every 60 seconds by getNotificationsLoop() but the endpoint is queried only if the
* doNotQueryNotifications cookie is expired. If so, the cookie is created again, with a lifetime
* of 65 seconds.
*/
// Get notifications by fetching /notifications/ JSON every 30 seconds
function getNotifications(){
//- Check if the cookie is still valid, in which case, return
if( Cookies('doNotQueryNotifications') != null ) {return;}
//- Create a new cookie, which expires in 65 seconds
Cookies.set('doNotQueryNotifications', 'true', {
expires: new Date(new Date().getTime() + 65 * 1000)
});
//- Fetch data and update the interface
$.getJSON( "/notifications/", function( data ) {
if (!first_load) {
@ -368,12 +354,11 @@ $(function() {
function getNotificationsLoop() {
//- Fetch the actual notifications
getNotifications();
//- Call itself again in 60 seconds
setTimeout(function () {
var getLoop = setTimeout(function () {
getNotificationsLoop();
}, 60 * 1000);
}, 30000);
}
/* Returns a more-or-less reasonable message given an error response object. */

View File

@ -1,13 +1,12 @@
::-webkit-scrollbar
width: 8px
height: 8px
width: 5px
height: 5px
::-webkit-scrollbar-track
background-color: transparent
::-webkit-scrollbar-thumb
background-color: darken($color-background, 40%)
border-radius: 999em
::placeholder
color: $color-text-dark-hint

View File

@ -137,6 +137,7 @@
& .nc-text
width: 90%
white-space: normal
& .nc-date
display: block

View File

@ -101,9 +101,3 @@
color: $color-success
&.fail
color: $color-danger
img.user-avatar
border-radius: 1em
box-shadow: 0 0 0 0.2em $color-background-light
height: 160px
width: 160px

View File

@ -55,15 +55,6 @@ $pillar-font-path: "../../../../static/pillar/assets/font"
@import _notifications
body.blog
+media-xs
.comments-tree,
.node-details-description
padding-left: $spacer
padding-right: $spacer
.lead
font-size: initial
.node-details-description
font:
size: 1.3em

View File

@ -32,8 +32,7 @@
user-select: none
position: relative
img.user-avatar
border-radius: 4px
img.gravatar
height: 28px
position: relative
width: 28px
@ -85,10 +84,6 @@
width: 30px
&.subscription-status
+media-xs
padding-left: $spacer
padding-right: $spacer
a, a:hover
color: $white
@ -140,9 +135,6 @@ $nav-secondary-bar-size: -2px
.nav-secondary
align-items: center
+media-xs
margin-right: $spacer
.nav-link
color: $color-text
cursor: pointer
@ -325,13 +317,6 @@ body.has-overlay
.navbar-toggler
border: none
.navbar
.navbar-collapse
display: none
&.show
display: initial
// Mobile layout
@include media-breakpoint-down(sm)
.navbar

View File

@ -28,20 +28,6 @@
margin-bottom: 2rem
margin-top: 2rem
.timeline-post-title
display: block
font-size: $h3-font-size
font-weight: bold
line-height: initial
padding-top: 1rem
padding-bottom: 1rem
text-transform: uppercase
+media-sm
font-size: $h1-font-size
padding-top: 3rem
padding-bottom: 2rem
body.homepage
.timeline
.sticky-top

View File

@ -4,9 +4,9 @@
li.dropdown
| {% block menu_avatar %}
a.navbar-item.dropdown-toggle(href="#", data-toggle="dropdown", title="{{ current_user.email }}")
current-user-avatar
script.
new Vue({el: 'current-user-avatar'})
img.gravatar(
src="{{ current_user.gravatar }}",
alt="Avatar")
| {% endblock menu_avatar %}
ul.dropdown-menu.dropdown-menu-right

View File

@ -165,7 +165,7 @@ h4 Organization members
| {% for email in organization.unknown_members %}
li.sharing-users-item.unknown-member(data-user-email='{{ email }}')
.sharing-users-avatar
img(src="{{ url_for('static_pillar', filename='assets/img/default_user_avatar.png') }}")
img(src="{{ email | gravatar }}")
.sharing-users-details
span.sharing-users-email {{ email }}
.sharing-users-action

View File

@ -19,7 +19,7 @@
user-id="{{ user['_id'] }}",
class="{% if current_user.objectid == user['_id'] %}self{% endif %}")
.sharing-users-avatar
img(src="{{ user['avatar_url'] }}")
img(src="{{ user['avatar'] }}")
.sharing-users-details
span.sharing-users-name
| {{user['full_name']}}

View File

@ -21,50 +21,38 @@ style.
| {% block settings_page_content %}
.settings-form
form#settings-form(method='POST', action="{{url_for('settings.profile')}}")
.row
.form-group.col-md-6
.pb-3
.form-group
| {{ form.username.label }}
| {{ form.username(size=20, class='form-control') }}
| {% if form.username.errors %}
| {% for error in form.username.errors %}{{ error|e }}{% endfor %}
| {% endif %}
button.mt-3.btn.btn-outline-success.px-5.button-submit(type='submit')
i.pi-check.pr-2
| {{ _("Save Changes") }}
.form-group
label {{ _("Full name") }}
p {{ current_user.full_name }}
.form-group
label {{ _("E-mail") }}
p {{ current_user.email }}
.row.mt-3
.col-md-9
.form-group
label {{ _("Full name") }}
p {{ current_user.full_name }}
.form-group
label {{ _("E-mail") }}
p {{ current_user.email }}
.form-group
| {{ _("Change your full name, email, avatar, and password at") }} #[a(href="{{ blender_profile_url }}",target='_blank') Blender ID].
.form-group
| {{ _("Change your full name, email, and password at") }} #[a(href="https://www.blender.org/id/settings/profile",target='_blank') Blender ID].
| {% if current_user.badges_html %}
.form-group
p Your Blender ID badges:
| {{ current_user.badges_html|safe }}
p.hint-text Note that updates to these badges may take a few minutes to be visible here.
| {% endif %}
| {% if current_user.badges_html %}
.form-group
p Your Blender ID badges:
| {{ current_user.badges_html|safe }}
p.hint-text Note that updates to these badges may take a few minutes to be visible here.
| {% endif %}
.col-md-3
a(href="{{ blender_profile_url }}",target='_blank')
current-user-avatar
p
small Your #[a(href="{{ blender_profile_url }}",target='_blank') Blender ID] avatar
//- Avatar Sync button is commented out here, because it's not used by Blender Cloud.
//- This tag, and the commented-out script tag below, are just examples.
//- avatar-sync-button
.py-3
a(href="https://gravatar.com/")
img.rounded-circle(src="{{ current_user.gravatar }}")
span.p-3 {{ _("Change Gravatar") }}
| {% endblock %}
| {% block footer_scripts %}
| {{ super() }}
//- script(src="{{ url_for('static_pillar', filename='assets/js/avatar.min.js') }}")
script.
new Vue({el:'#settings-form'});
.py-3
button.btn.btn-outline-success.px-5.button-submit(type='submit')
i.pi-check.pr-2
| {{ _("Save Changes") }}
| {% endblock %}

View File

@ -187,6 +187,16 @@ class AuthenticationTests(AbstractPillarTest):
db_user = fetch_user()
self.assertEqual(['subscriber'], db_user['roles'])
# PATCH should not be allowed.
updated_fields = {'roles': ['admin', 'subscriber', 'demo']}
self.patch('/api/users/%s' % user_id,
json=updated_fields,
auth_token='token',
etag=db_user['_etag'],
expected_status=405)
db_user = fetch_user()
self.assertEqual(['subscriber'], db_user['roles'])
def test_token_expiry(self):
"""Expired tokens should be deleted from the database."""
@ -320,7 +330,7 @@ class UserListTests(AbstractPillarTest):
user_info = json.loads(resp.data)
regular_info = remove_private_keys(user_info)
self.assertEqual(set(), set(regular_info.keys()) - PUBLIC_USER_FIELDS)
self.assertEqual(PUBLIC_USER_FIELDS, set(regular_info.keys()))
def test_own_user_subscriber(self):
# Regular access should result in only your own info.
@ -342,7 +352,7 @@ class UserListTests(AbstractPillarTest):
self.assertNotIn('auth', user_info)
regular_info = remove_private_keys(user_info)
self.assertEqual(set(), set(regular_info.keys()) - PUBLIC_USER_FIELDS)
self.assertEqual(PUBLIC_USER_FIELDS, set(regular_info.keys()))
def test_put_user(self):
from pillar.api.utils import remove_private_keys
@ -731,7 +741,7 @@ class UserCreationTest(AbstractPillarTest):
with self.app.test_request_context():
users_coll = self.app.db().users
self.assertEqual(0, users_coll.count_documents({}))
self.assertEqual(0, users_coll.count())
self.mock_blenderid_validate_happy()
token = 'this is my life now'
@ -739,7 +749,7 @@ class UserCreationTest(AbstractPillarTest):
with self.app.test_request_context():
users_coll = self.app.db().users
self.assertEqual(1, users_coll.count_documents({}))
self.assertEqual(1, users_coll.count())
db_user = users_coll.find()[0]
self.assertEqual(db_user['email'], TEST_EMAIL_ADDRESS)
@ -750,7 +760,7 @@ class UserCreationTest(AbstractPillarTest):
with self.app.test_request_context():
users_coll = self.app.db().users
self.assertEqual(0, users_coll.count_documents({}))
self.assertEqual(0, users_coll.count())
bid_resp = {'status': 'success',
'user': {'email': TEST_EMAIL_ADDRESS,
@ -768,7 +778,7 @@ class UserCreationTest(AbstractPillarTest):
with self.app.test_request_context():
users_coll = self.app.db().users
self.assertEqual(1, users_coll.count_documents({}))
self.assertEqual(1, users_coll.count())
db_user = users_coll.find()[0]
self.assertEqual(db_user['email'], TEST_EMAIL_ADDRESS)
@ -779,7 +789,7 @@ class UserCreationTest(AbstractPillarTest):
"""Blender ID does not require full name, we do."""
with self.app.app_context():
users_coll = self.app.db().users
self.assertEqual(0, users_coll.count_documents({}))
self.assertEqual(0, users_coll.count())
# First request will create the user, the 2nd request will update.
self.mock_blenderid_validate_happy()
@ -808,7 +818,7 @@ class UserCreationTest(AbstractPillarTest):
self.get('/api/users/me', auth_token=token)
with self.app.app_context():
self.assertEqual(1, users_coll.count_documents({}))
self.assertEqual(1, users_coll.count())
db_user = users_coll.find()[0]
self.assertEqual(db_user['email'], TEST_EMAIL_ADDRESS)

View File

@ -185,7 +185,7 @@ class AbstractSchemaValidationTest(AbstractValidationTest):
class IPRangeValidatorTest(AbstractSchemaValidationTest):
schema = {'iprange': {'type': 'string', 'required': True, 'check_with': 'iprange'}}
schema = {'iprange': {'type': 'string', 'required': True, 'validator': 'iprange'}}
def test_ipv6(self):
self.assertValid({'iprange': '2a03:b0c0:0:1010::8fe:6ef1'})
@ -209,10 +209,10 @@ class IPRangeValidatorTest(AbstractSchemaValidationTest):
def test_descriptive_error_message(self):
is_valid = self.validator.validate({'iprange': '::/0'}, self.schema)
self.assertFalse(is_valid)
self.assertEqual(1, len(self.validator._errors))
self.assertEquals(1, len(self.validator._errors))
err = self.validator._errors[0]
self.assertEqual(('iprange', ), err.document_path)
self.assertEqual(('Zero-length prefix is not allowed',), err.info)
self.assertEquals(('iprange', ), err.document_path)
self.assertEquals(('Zero-length prefix is not allowed',), err.info)
class MarkdownValidatorTest(AbstractSchemaValidationTest):

View File

@ -81,7 +81,7 @@ class CommentEditTest(AbstractPillarTest):
with self.app.app_context():
proj_coll = self.app.db('projects')
proj_coll.update_one(
proj_coll.update(
{'_id': self.pid},
{'$set': {
'node_types': self.project['node_types'],

View File

@ -143,8 +143,8 @@ class LatestAssetsTest(AbstractPillarTest):
latest_assets = self.get(url).json['_items']
asset = latest_assets[0]
self.assertEqual(str(ok_id), asset['_id'])
self.assertEqual('Just a node name', asset['name'])
self.assertEquals(str(ok_id), asset['_id'])
self.assertEquals('Just a node name', asset['name'])
self.assertNotIn('user', asset)
@ -299,7 +299,7 @@ class LatestCommentsTest(AbstractPillarTest):
latest_comments = self.get(url).json['_items']
comment = latest_comments[0]
self.assertEqual(str(ok_id), comment['_id'])
self.assertEqual('Comment', comment['name'])
self.assertEqual('एनिमेशन is animation in Hindi', comment['properties']['content'])
self.assertEqual('คนรักของผัดไทย', comment['user']['full_name'])
self.assertEquals(str(ok_id), comment['_id'])
self.assertEquals('Comment', comment['name'])
self.assertEquals('एनिमेशन is animation in Hindi', comment['properties']['content'])
self.assertEquals('คนรักของผัดไทย', comment['user']['full_name'])

View File

@ -669,10 +669,10 @@ class TaggedNodesTest(AbstractPillarTest):
mock_utcnow.return_value = self.fake_now
url = flask.url_for('nodes_api.tagged', tag='एनिमेशन')
resp = self.get(url).json[0]
self.assertEqual('01:01:01', resp['properties']['duration'])
self.assertEqual('Unittest project', resp['project']['name'])
self.assertEqual('default-project', resp['project']['url'])
self.assertEqual('5m ago', resp['pretty_created'])
self.assertEquals('01:01:01', resp['properties']['duration'])
self.assertEquals('Unittest project', resp['project']['name'])
self.assertEquals('default-project', resp['project']['url'])
self.assertEquals('5m ago', resp['pretty_created'])
class NodesReferencedByProjectTest(AbstractPillarTest):
@ -690,7 +690,7 @@ class NodesReferencedByProjectTest(AbstractPillarTest):
self.node_etag = node['_etag']
with self.app.app_context():
self.app.db('projects').update_one(
self.app.db('projects').update(
{'_id': self.pid},
{'$set': {
'header_node': self.node_id,

View File

@ -1,6 +1,6 @@
import unittest
from unittest import mock
import mock
from bson import ObjectId
import pymongo.database

View File

@ -140,10 +140,10 @@ class NotificationsTest(AbstractPillarTest):
'notifications.web': True,
}
subscriptions = list(subscriptions_col.find(lookup))
self.assertEqual(len(subscriptions), len(user_ids))
self.assertEquals(len(subscriptions), len(user_ids))
for s in subscriptions:
self.assertIn(s['user'], user_ids)
self.assertEqual(s['context_object_type'], 'node')
self.assertEquals(s['context_object_type'], 'node')
def assertNotSubscribed(self, node_id, user_id):
subscriptions_col = self.app.data.driver.db['activities-subscriptions']
@ -153,7 +153,7 @@ class NotificationsTest(AbstractPillarTest):
}
subscriptions = subscriptions_col.find(lookup)
for s in subscriptions:
self.assertNotEqual(s['user'], user_id)
self.assertNotEquals(s['user'], user_id)
def notification_for_object(self, node_id):
notifications_url = flask.url_for('notifications.index')

View File

@ -24,13 +24,13 @@ class OrganizationCruTest(AbstractPillarTest):
self.enter_app_context()
# There should be no organizations to begin with.
org_coll = self.app.db('organizations')
self.assertEqual(0, org_coll.count_documents({}))
db = self.app.db('organizations')
self.assertEqual(0, db.count())
admin_uid = self.create_user(24 * 'a')
org_doc = self.app.org_manager.create_new_org('Хакеры', admin_uid, 25)
self.assertIsNotNone(org_coll.find_one(org_doc['_id']))
self.assertIsNotNone(db.find_one(org_doc['_id']))
self.assertEqual(bson.ObjectId(24 * 'a'), org_doc['admin_uid'])
self.assertEqual('Хакеры', org_doc['name'])
self.assertEqual(25, org_doc['seat_count'])

View File

@ -73,7 +73,7 @@ class TestSearchNodesGlobal(AbstractPillarTest):
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_empty_query_page_2(self):
with self.app.app_context():
@ -88,7 +88,7 @@ class TestSearchNodesGlobal(AbstractPillarTest):
**page_2
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_empty_query_with_terms(self):
with self.app.app_context():
@ -118,7 +118,7 @@ class TestSearchNodesGlobal(AbstractPillarTest):
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_query(self):
with self.app.app_context():
@ -169,7 +169,7 @@ class TestSearchNodesGlobal(AbstractPillarTest):
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
class TestSearchNodesInProject(AbstractPillarTest):
@ -186,7 +186,7 @@ class TestSearchNodesInProject(AbstractPillarTest):
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_empty_query_page_2(self):
with self.app.app_context():
@ -203,7 +203,7 @@ class TestSearchNodesInProject(AbstractPillarTest):
**page_2
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_empty_query_with_terms(self):
with self.app.app_context():
@ -236,7 +236,7 @@ class TestSearchNodesInProject(AbstractPillarTest):
**page_2
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_query(self):
with self.app.app_context():
@ -288,7 +288,7 @@ class TestSearchNodesInProject(AbstractPillarTest):
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
class TestSearchMultiNodes(AbstractPillarTest):
@ -332,4 +332,4 @@ class TestSearchMultiNodes(AbstractPillarTest):
second
]
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())

View File

@ -48,7 +48,7 @@ class TestSearchUsers(AbstractPillarTest):
**AGGREGATIONS,
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_query(self):
with self.app.app_context():
@ -95,7 +95,7 @@ class TestSearchUsers(AbstractPillarTest):
**AGGREGATIONS,
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_email_query(self):
with self.app.app_context():
@ -150,7 +150,7 @@ class TestSearchUsers(AbstractPillarTest):
**AGGREGATIONS,
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
class TestSearchUsersAdmin(AbstractPillarTest):
@ -163,7 +163,7 @@ class TestSearchUsersAdmin(AbstractPillarTest):
**AGGREGATIONS,
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_query(self):
with self.app.app_context():
@ -209,7 +209,7 @@ class TestSearchUsersAdmin(AbstractPillarTest):
**AGGREGATIONS,
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_terms(self):
with self.app.app_context():
@ -228,7 +228,7 @@ class TestSearchUsersAdmin(AbstractPillarTest):
**AGGREGATIONS,
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())
def test_object_id_query(self):
with self.app.app_context():
@ -282,4 +282,4 @@ class TestSearchUsersAdmin(AbstractPillarTest):
**AGGREGATIONS,
**PAGE_1
}
self.assertEqual(expected, search.to_dict())
self.assertEquals(expected, search.to_dict())

View File

@ -47,18 +47,18 @@ class GlobalTimelineTest(AbstractPillarTest):
timeline = response['groups']
continue_from = response['continue_from']
self.assertEqual(1520229908.0, continue_from)
self.assertEqual(3, len(timeline))
self.assertEqual('Week 11, 2018', timeline[1]['label'])
self.assertEqual('Week 10, 2018', timeline[2]['label'])
self.assertEqual('Unittest project', timeline[0]['groups'][0]['label'])
self.assertEqual('Another Project', timeline[0]['groups'][1]['label'])
self.assertEqual('/p/default-project/', timeline[0]['groups'][0]['url'])
self.assertEqual('/p/another-url/', timeline[0]['groups'][1]['url'])
self.assertEquals(1520229908.0, continue_from)
self.assertEquals(3, len(timeline))
self.assertEquals('Week 11, 2018', timeline[1]['label'])
self.assertEquals('Week 10, 2018', timeline[2]['label'])
self.assertEquals('Unittest project', timeline[0]['groups'][0]['label'])
self.assertEquals('Another Project', timeline[0]['groups'][1]['label'])
self.assertEquals('/p/default-project/', timeline[0]['groups'][0]['url'])
self.assertEquals('/p/another-url/', timeline[0]['groups'][1]['url'])
# week 12
week = timeline[0]
self.assertEqual('Week 12, 2018', week['label'])
self.assertEquals('Week 12, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[0:2]
@ -74,7 +74,7 @@ class GlobalTimelineTest(AbstractPillarTest):
# week 11
week = timeline[1]
self.assertEqual('Week 11, 2018', week['label'])
self.assertEquals('Week 11, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[2:9]
@ -90,7 +90,7 @@ class GlobalTimelineTest(AbstractPillarTest):
# week 10
week = timeline[2]
self.assertEqual('Week 10, 2018', week['label'])
self.assertEquals('Week 10, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[9:16]
@ -111,16 +111,16 @@ class GlobalTimelineTest(AbstractPillarTest):
timeline = response['groups']
self.assertNotIn('continue_from', response)
self.assertEqual(2, len(timeline))
self.assertEqual('Week 9, 2018', timeline[0]['label'])
self.assertEqual('Week 8, 2018', timeline[1]['label'])
self.assertEqual('Unittest project', timeline[0]['groups'][0]['label'])
self.assertEqual('Another Project', timeline[0]['groups'][1]['label'])
self.assertEqual('/p/default-project/', timeline[0]['groups'][0]['url'])
self.assertEquals(2, len(timeline))
self.assertEquals('Week 9, 2018', timeline[0]['label'])
self.assertEquals('Week 8, 2018', timeline[1]['label'])
self.assertEquals('Unittest project', timeline[0]['groups'][0]['label'])
self.assertEquals('Another Project', timeline[0]['groups'][1]['label'])
self.assertEquals('/p/default-project/', timeline[0]['groups'][0]['url'])
# week 9
week = timeline[0]
self.assertEqual('Week 9, 2018', week['label'])
self.assertEquals('Week 9, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[16:23]
@ -136,7 +136,7 @@ class GlobalTimelineTest(AbstractPillarTest):
# week 8
week = timeline[1]
self.assertEqual('Week 8, 2018', week['label'])
self.assertEquals('Week 8, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[23:25]
@ -151,14 +151,14 @@ class GlobalTimelineTest(AbstractPillarTest):
expected_post_ids, expected_asset_ids)
def assertProjectEquals(self, proj, label, url, expected_post_ids, expected_asset_ids):
self.assertEqual(label, proj['label'])
self.assertEqual(url, proj['url'])
self.assertEquals(label, proj['label'])
self.assertEquals(url, proj['url'])
actual_ids = [n['_id'] for n in proj['items']['post']]
self.assertEqual(expected_post_ids, actual_ids)
self.assertEquals(expected_post_ids, actual_ids)
actual_ids = [n['_id'] for n in proj['items']['asset']]
self.assertEqual(expected_asset_ids, actual_ids)
self.assertEquals(expected_asset_ids, actual_ids)
def create_asset(self, pid, days, hours):
asset_node = {

View File

@ -218,11 +218,11 @@ class TestPrettyDuration(unittest.TestCase):
def test_formatting(self):
from pillar.api.utils import pretty_duration
pretty_duration(500)
self.assertEqual('00:00', pretty_duration(0))
self.assertEqual('00:15', pretty_duration(15))
self.assertEqual('01:05', pretty_duration(65))
self.assertEqual('42:53', pretty_duration(2573))
self.assertEqual('01:11:22', pretty_duration(4282))
self.assertEqual('01:41', pretty_duration(100.85))
self.assertEqual('25:00:00', pretty_duration(90000)) # More than a day
self.assertEqual('', pretty_duration(None))
self.assertEquals('00:00', pretty_duration(0))
self.assertEquals('00:15', pretty_duration(15))
self.assertEquals('01:05', pretty_duration(65))
self.assertEquals('42:53', pretty_duration(2573))
self.assertEquals('01:11:22', pretty_duration(4282))
self.assertEquals('01:41', pretty_duration(100.85))
self.assertEquals('25:00:00', pretty_duration(90000)) # More than a day
self.assertEquals('', pretty_duration(None))

View File

@ -63,8 +63,8 @@ class UpgradeAttachmentSchemaTest(AbstractPillarTest):
"url": {"type": "string"},
"attachments": {
"type": "dict",
"keysrules": {"type": "string", "regex": "^[a-zA-Z0-9_ ]+$"},
"valuesrules": {
"keyschema": {"type": "string", "regex": "^[a-zA-Z0-9_ ]+$"},
"valueschema": {
"type": "dict",
"schema": {
"oid": {"type": "objectid", "required": True},
@ -195,8 +195,8 @@ class ReconcileNodeDurationTest(AbstractPillarTest):
new_node = nodes_coll.find_one({'_id': nid})
orig_node = self.orig_nodes[nid]
self.assertNotEqual(orig_node['_etag'], new_node['_etag'])
self.assertEqual(self.fake_now, new_node['_updated'])
self.assertEqual(duration_seconds, new_node['properties']['duration_seconds'])
self.assertEquals(self.fake_now, new_node['_updated'])
self.assertEquals(duration_seconds, new_node['properties']['duration_seconds'])
def assertAllUnchanged(self):
self.assertUnChanged(*self.orig_nodes.keys())
@ -206,7 +206,7 @@ class ReconcileNodeDurationTest(AbstractPillarTest):
for nid in node_ids:
new_node = nodes_coll.find_one({'_id': nid})
orig_node = self.orig_nodes[nid]
self.assertEqual(orig_node, new_node)
self.assertEquals(orig_node, new_node)
def _create_video_node(self, file_duration=None, node_duration=None, include_file=True):
file_id, _ = self.ensure_file_exists(file_overrides={

View File

@ -19,7 +19,7 @@ class OrphanFilesTest(AbstractPillarTest):
24 * 'c', project_overrides={'_id': ObjectId(), 'is_private': True})
private2, _ = self.create_project_with_admin(
24 * 'd', project_overrides={'_id': ObjectId(), 'is_private': None})
self.assertEqual(4, self.app.db('projects').count_documents({}))
self.assertEqual(4, self.app.db('projects').count())
# Create files, some orphan and some used.
project_ids = (public1, public2, private1, private2)

View File

@ -1,37 +0,0 @@
import flask
import flask_login
from pillar.tests import AbstractPillarTest
class UsernameTest(AbstractPillarTest):
def setUp(self, **kwargs) -> None:
super().setUp(**kwargs)
self.user_id = self.create_user()
def test_update_via_web(self) -> None:
from pillar.auth import current_user
import pillar.web.settings.routes
with self.app.app_context():
url = flask.url_for('settings.profile')
with self.app.test_request_context(
path=url,
data={'username': 'je.moeder'},
method='POST',
):
self.login_api_as(self.user_id)
flask_login.login_user(current_user)
pillar.web.settings.routes.profile()
db_user = self.fetch_user_from_db(self.user_id)
self.assertEqual('je.moeder', db_user['username'])
def test_update_via_patch(self) -> None:
self.create_valid_auth_token(self.user_id, 'user-token')
self.patch(f'/api/users/{self.user_id}',
json={'op': 'set-username', 'username': 'je.moeder'},
auth_token='user-token')
db_user = self.fetch_user_from_db(self.user_id)
self.assertEqual('je.moeder', db_user['username'])

View File

@ -1,3 +1,5 @@
# -*- encoding: utf-8 -*-
import unittest
import datetime
@ -118,40 +120,3 @@ class EvePaginationTest(unittest.TestCase):
self.assertEqual(2, lpi({'total': 10, 'max_results': 5}))
self.assertEqual(3, lpi({'total': 11, 'max_results': 5}))
self.assertEqual(404129352, lpi({'total': 2828905463, 'max_results': 7}))
class UnattachPicturesTest(unittest.TestCase):
def test_unattach_pictures(self):
project = {
'picture_square': {'_id': 'PICTURE_SQUARE_ID', 'je': 'moeder'},
'picture_header': 'PICTURE_HEADER_ID',
'picture_16_9': {},
'_id': 'PROJECT_ID',
'name': 'Op je Hoofd™',
}
from pillar.web.utils import unattach_project_pictures
unattach_project_pictures(project)
self.assertEqual({
'picture_square': 'PICTURE_SQUARE_ID',
'picture_header': 'PICTURE_HEADER_ID',
'_id': 'PROJECT_ID',
'name': 'Op je Hoofd™',
}, project)
def test_missing_pictures(self):
project = {
'picture_square': None,
'picture_16_9': {},
'_id': 'PROJECT_ID',
'name': 'Op je Hoofd™',
}
from pillar.web.utils import unattach_project_pictures
unattach_project_pictures(project)
self.assertEqual({
'_id': 'PROJECT_ID',
'name': 'Op je Hoofd™',
}, project)