Notifications regression: Notifications not created

Notifications for when someone posted a comment on your node
was not created.

Root cause was that default values defined in schema was not set,
resulting in activity subscriptions not being active.
There were 2 bugs preventing them to be set:
* The way the caching of markdown as html was implemented caused
  default values not to be set.
* Eve/Cerberus regression causes nested default values to fail
  https://github.com/pyeve/eve/issues/1174

Also, a 3rd bug caused nodes without a parent not to have a
subscription.

Migration scripts:
How markdown fields is cached has changed, and unused properties
of attachments has been removed.
./manage.py maintenance replace_pillar_node_type_schemas

Set the default values of activities-subscription
./manage.py maintenance fix_missing_activities_subscription_defaults
This commit is contained in:
Tobias Johansson 2019-02-19 14:16:28 +01:00
parent 250c7e2631
commit 32e25ce129
14 changed files with 424 additions and 427 deletions

View File

@ -119,6 +119,8 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
# If no subscription exists, we create one
if not subscription:
# Workaround for issue: https://github.com/pyeve/eve/issues/1174
lookup['notifications'] = {}
current_app.post_internal('activities-subscriptions', lookup)

View File

@ -1,4 +1,3 @@
import copy
from datetime import datetime
import logging
@ -6,36 +5,12 @@ from bson import ObjectId, tz_util
from eve.io.mongo import Validator
from flask import current_app
import pillar.markdown
from pillar import markdown
log = logging.getLogger(__name__)
class ValidateCustomFields(Validator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Will be reference to the actual document being validated, so that we can
# modify it during validation.
self.__real_document = None
def validate(self, document, *args, **kwargs):
# Keep a reference to the actual document, because Cerberus validates copies.
self.__real_document = document
result = super().validate(document, *args, **kwargs)
# Store the in-place modified document as self.document, so that Eve's post_internal
# can actually pick it up as the validated document. We need to make a copy so that
# further modifications (like setting '_etag' etc.) aren't done in-place.
self.document = copy.deepcopy(document)
return result
def _get_child_validator(self, *args, **kwargs):
child = super()._get_child_validator(*args, **kwargs)
# Pass along our reference to the actual document.
child.__real_document = self.__real_document
return child
# TODO: split this into a convert_property(property, schema) and call that from this function.
def convert_properties(self, properties, node_schema):
@ -137,8 +112,7 @@ class ValidateCustomFields(Validator):
if val:
# This ensures the modifications made by v's coercion rules are
# visible to this validator's output.
# TODO(fsiddi): this no longer works due to Cerberus internal changes.
# self.current[field] = v.current
self.document[field] = v.document
return True
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
@ -183,36 +157,18 @@ class ValidateCustomFields(Validator):
if ip.prefixlen() == 0:
self._error(field_name, 'Zero-length prefix is not allowed')
def _validator_markdown(self, field, value):
"""Convert MarkDown.
def _normalize_coerce_markdown(self, markdown_field: str) -> str:
"""
my_log = log.getChild('_validator_markdown')
Cache markdown as html.
# Find this field inside the original document
my_subdoc = self._subdoc_in_real_document()
if my_subdoc is None:
# If self.update==True we are validating an update document, which
# may not contain all fields, so then a missing field is fine.
if not self.update:
self._error(field, f'validator_markdown: unable to find sub-document '
f'for path {self.document_path}')
return
my_log.debug('validating field %r with value %r', field, value)
save_to = pillar.markdown.cache_field_name(field)
html = pillar.markdown.markdown(value)
my_log.debug('saving result to %r in doc with id %s', save_to, id(my_subdoc))
my_subdoc[save_to] = html
def _subdoc_in_real_document(self):
"""Return a reference to the current sub-document inside the real document.
This allows modification of the document being validated.
:param markdown_field: name of the field containing mark down
:return: html string
"""
my_subdoc = getattr(self, 'persisted_document') or self.__real_document
for item in self.document_path:
my_subdoc = my_subdoc[item]
return my_subdoc
my_log = log.getChild('_normalize_coerce_markdown')
mdown = self.document.get(markdown_field, '')
html = markdown.markdown(mdown)
my_log.debug('Generated html for markdown field %s in doc with id %s', markdown_field, id(self.document))
return html
if __name__ == '__main__':

View File

@ -1,5 +1,7 @@
import os
from pillar.api.node_types.utils import markdown_fields
STORAGE_BACKENDS = ["local", "pillar", "cdnsun", "gcs", "unittest"]
URL_PREFIX = 'api'
@ -184,12 +186,7 @@ organizations_schema = {
'maxlength': 128,
'required': True
},
'description': {
'type': 'string',
'maxlength': 256,
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
**markdown_fields('description', maxlength=256),
'website': {
'type': 'string',
'maxlength': 256,
@ -322,11 +319,7 @@ nodes_schema = {
'maxlength': 128,
'required': True,
},
'description': {
'type': 'string',
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
**markdown_fields('description'),
'picture': _file_embedded_schema,
'order': {
'type': 'integer',
@ -576,11 +569,7 @@ projects_schema = {
'maxlength': 128,
'required': True,
},
'description': {
'type': 'string',
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
**markdown_fields('description'),
# Short summary for the project
'summary': {
'type': 'string',

View File

@ -23,14 +23,6 @@ attachments_embedded_schema = {
'type': 'objectid',
'required': True,
},
'link': {
'type': 'string',
'allowed': ['self', 'none', 'custom'],
'default': 'self',
},
'link_custom': {
'type': 'string',
},
'collection': {
'type': 'string',
'allowed': ['files'],

View File

@ -1,17 +1,15 @@
from pillar.api.node_types import attachments_embedded_schema
from pillar.api.node_types.utils import markdown_fields
node_type_comment = {
'name': 'comment',
'description': 'Comments for asset nodes, pages, etc.',
'dyn_schema': {
# The actual comment content
'content': {
'type': 'string',
'minlength': 5,
'required': True,
'validator': 'markdown',
},
'_content_html': {'type': 'string'},
**markdown_fields(
'content',
minlength=5,
required=True),
'status': {
'type': 'string',
'allowed': [

View File

@ -1,17 +1,14 @@
from pillar.api.node_types import attachments_embedded_schema
from pillar.api.node_types.utils import markdown_fields
node_type_post = {
'name': 'post',
'description': 'A blog post, for any project',
'dyn_schema': {
'content': {
'type': 'string',
'minlength': 5,
'maxlength': 90000,
'required': True,
'validator': 'markdown',
},
'_content_html': {'type': 'string'},
**markdown_fields('content',
minlength=5,
maxlength=90000,
required=True),
'status': {
'type': 'string',
'allowed': [

View File

@ -0,0 +1,34 @@
from pillar import markdown
def markdown_fields(field: str, **kwargs) -> dict:
"""
Creates a field for the markdown, and a field for the cached html.
Example usage:
schema = {'myDoc': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
**markdown_fields('content', required=True),
}
},
}}
:param field:
:return:
"""
cache_field = markdown.cache_field_name(field)
return {
field: {
'type': 'string',
**kwargs
},
cache_field: {
'type': 'string',
'readonly': True,
'default': field, # Name of the field containing the markdown. Will be input to the coerce function.
'coerce': 'markdown',
}
}

View File

@ -247,14 +247,12 @@ def setup_app(app, url_prefix):
app.on_fetched_resource_nodes += eve_hooks.before_returning_nodes
app.on_replace_nodes += eve_hooks.before_replacing_node
app.on_replace_nodes += eve_hooks.parse_markdown
app.on_replace_nodes += eve_hooks.texture_sort_files
app.on_replace_nodes += eve_hooks.deduct_content_type_and_duration
app.on_replace_nodes += eve_hooks.node_set_default_picture
app.on_replaced_nodes += eve_hooks.after_replacing_node
app.on_insert_nodes += eve_hooks.before_inserting_nodes
app.on_insert_nodes += eve_hooks.parse_markdowns
app.on_insert_nodes += eve_hooks.nodes_deduct_content_type_and_duration
app.on_insert_nodes += eve_hooks.nodes_set_default_picture
app.on_insert_nodes += eve_hooks.textures_sort_files

View File

@ -122,39 +122,42 @@ def before_inserting_nodes(items):
# Default the 'user' property to the current user.
item.setdefault('user', current_user.user_id)
def after_inserting_nodes(items):
for item in items:
# Skip subscriptions for first level items (since the context is not a
# node, but a project).
# TODO: support should be added for mixed context
if 'parent' not in item:
return
context_object_id = item['parent']
if item['node_type'] == 'comment':
def get_comment_verb_and_context_object_id(comment):
nodes_collection = current_app.data.driver.db['nodes']
parent = nodes_collection.find_one({'_id': item['parent']})
# Always subscribe to the parent node
activity_subscribe(item['user'], 'node', item['parent'])
if parent['node_type'] == 'comment':
verb = 'commented'
parent = nodes_collection.find_one({'_id': comment['parent']})
context_object_id = comment['parent']
while parent['node_type'] == 'comment':
# If the parent is a comment, we provide its own parent as
# context. We do this in order to point the user to an asset
# or group when viewing the notification.
verb = 'replied'
context_object_id = parent['parent']
# Subscribe to the parent of the parent comment (post or group)
activity_subscribe(item['user'], 'node', parent['parent'])
else:
activity_subscribe(item['user'], 'node', item['_id'])
verb = 'commented'
elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
verb = 'posted'
activity_subscribe(item['user'], 'node', item['_id'])
else:
# Don't automatically create activities for non-Pillar node types,
# as we don't know what would be a suitable verb (among other things).
continue
parent = nodes_collection.find_one({'_id': parent['parent']})
return verb, context_object_id
def after_inserting_nodes(items):
for item in items:
context_object_id = None
# TODO: support should be added for mixed context
if item['node_type'] in PILLAR_NAMED_NODE_TYPES:
activity_subscribe(item['user'], 'node', item['_id'])
verb = 'posted'
context_object_id = item.get('parent')
if item['node_type'] == 'comment':
# Always subscribe to the parent node
activity_subscribe(item['user'], 'node', item['parent'])
verb, context_object_id = get_comment_verb_and_context_object_id(item)
# Subscribe to the parent of the parent comment (post or group)
activity_subscribe(item['user'], 'node', context_object_id)
if context_object_id and item['node_type'] in PILLAR_NAMED_NODE_TYPES:
# * Skip activity for first level items (since the context is not a
# node, but a project).
# * Don't automatically create activities for non-Pillar node types,
# as we don't know what would be a suitable verb (among other things).
activity_object_add(
item['user'],
verb,
@ -322,46 +325,6 @@ def textures_sort_files(nodes):
texture_sort_files(node)
def parse_markdown(node, original=None):
import copy
projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': node['project']}, {'node_types': 1})
# Query node type directly using the key
node_type = next(nt for nt in project['node_types']
if nt['name'] == node['node_type'])
# Create a copy to not overwrite the actual schema.
schema = copy.deepcopy(current_app.config['DOMAIN']['nodes']['schema'])
schema['properties'] = node_type['dyn_schema']
def find_markdown_fields(schema, node):
"""Find and process all makrdown validated fields."""
for k, v in schema.items():
if not isinstance(v, dict):
continue
if v.get('validator') == 'markdown':
# If there is a match with the validator: markdown pair, assign the sibling
# property (following the naming convention _<property>_html)
# the processed value.
if k in node:
html = pillar.markdown.markdown(node[k])
field_name = pillar.markdown.cache_field_name(k)
node[field_name] = html
if isinstance(node, dict) and k in node:
find_markdown_fields(v, node[k])
find_markdown_fields(schema, node)
return 'ok'
def parse_markdowns(items):
for item in items:
parse_markdown(item)
def short_link_info(short_code):
"""Returns the short link info in a dict."""

View File

@ -44,10 +44,16 @@ def remove_private_keys(document):
"""Removes any key that starts with an underscore, returns result as new
dictionary.
"""
doc_copy = copy.deepcopy(document)
for key in list(doc_copy.keys()):
def do_remove(doc):
for key in list(doc.keys()):
if key.startswith('_'):
del doc_copy[key]
del doc[key]
elif isinstance(doc[key], dict):
doc[key] = do_remove(doc[key])
return doc
doc_copy = copy.deepcopy(document)
do_remove(doc_copy)
try:
del doc_copy['allowed_methods']

View File

@ -739,113 +739,6 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
doc[key] = new_value
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
help='Project URL')
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
help='Replace on all projects.')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def upgrade_attachment_usage(proj_url=None, all_projects=False, go=False):
"""Replaces '@[slug]' with '{attachment slug}'.
Also moves links from the attachment dict to the attachment shortcode.
"""
if bool(proj_url) == all_projects:
log.error('Use either --project or --all.')
return 1
import html
from pillar.api.projects.utils import node_type_dict
from pillar.api.utils import remove_private_keys
from pillar.api.utils.authentication import force_cli_user
force_cli_user()
nodes_coll = current_app.db('nodes')
total_nodes = 0
failed_node_ids = set()
# Use a mixture of the old slug RE that still allowes spaces in the slug
# name and the new RE that allows dashes.
old_slug_re = re.compile(r'@\[([a-zA-Z0-9_\- ]+)\]')
for proj in _db_projects(proj_url, all_projects, go=go):
proj_id = proj['_id']
proj_url = proj.get('url', '-no-url-')
nodes = nodes_coll.find({
'_deleted': {'$ne': True},
'project': proj_id,
'properties.attachments': {'$exists': True},
})
node_count = nodes.count()
if node_count == 0:
log.debug('Skipping project %s (%s)', proj_url, proj_id)
continue
proj_node_types = node_type_dict(proj)
for node in nodes:
attachments = node['properties']['attachments']
replaced = False
# Inner functions because of access to the node's attachments.
def replace(match):
nonlocal replaced
slug = match.group(1)
log.debug(' - OLD STYLE attachment slug %r', slug)
try:
att = attachments[slug]
except KeyError:
log.info("Attachment %r not found for node %s", slug, node['_id'])
link = ''
else:
link = att.get('link', '')
if link == 'self':
link = " link='self'"
elif link == 'custom':
url = att.get('link_custom')
if url:
link = " link='%s'" % html.escape(url)
replaced = True
return '{attachment %r%s}' % (slug.replace(' ', '-'), link)
def update_markdown(value: str) -> str:
return old_slug_re.sub(replace, value)
iter_markdown(proj_node_types, node, update_markdown)
# Remove no longer used properties from attachments
new_attachments = {}
for slug, attachment in attachments.items():
replaced |= 'link' in attachment # link_custom implies link
attachment.pop('link', None)
attachment.pop('link_custom', None)
new_attachments[slug.replace(' ', '-')] = attachment
node['properties']['attachments'] = new_attachments
if replaced:
total_nodes += 1
else:
# Nothing got replaced,
continue
if go:
# Use Eve to PUT, so we have schema checking.
db_node = remove_private_keys(node)
r, _, _, status = current_app.put_internal('nodes', db_node, _id=node['_id'])
if status != 200:
log.error('Error %i storing altered node %s %s', status, node['_id'], r)
failed_node_ids.add(node['_id'])
# raise SystemExit('Error storing node; see log.')
log.debug('Updated node %s: %s', node['_id'], r)
log.info('Project %s (%s) has %d nodes with attachments',
proj_url, proj_id, node_count)
log.info('%s %d nodes', 'Updated' if go else 'Would update', total_nodes)
if failed_node_ids:
log.warning('Failed to update %d of %d nodes: %s', len(failed_node_ids), total_nodes,
', '.join(str(nid) for nid in failed_node_ids))
def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool) \
-> typing.Iterable[dict]:
"""Yields a subset of the projects in the database.
@ -1374,3 +1267,69 @@ def fix_projects_for_files(filepath: Path, go=False):
log.info('Done updating %d files (found %d, modified %d) on %d projects',
len(mapping), total_matched, total_modified, len(project_to_file_ids))
@manager_maintenance.option('-u', '--user', dest='user', nargs='?',
help='Update subscriptions for single user.')
@manager_maintenance.option('-o', '--object', dest='context_object', nargs='?',
help='Update subscriptions for context_object.')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def fix_missing_activities_subscription_defaults(user=None, context_object=None, go=False):
"""Assign default values to activities-subscriptions documents where values are missing.
"""
subscriptions_collection = current_app.db('activities-subscriptions')
lookup_is_subscribed = {
'is_subscribed': {'$exists': False},
}
lookup_notifications = {
'notifications.web': {'$exists': False},
}
if user:
lookup_is_subscribed['user'] = ObjectId(user)
lookup_notifications['user'] = ObjectId(user)
if context_object:
lookup_is_subscribed['context_object'] = ObjectId(context_object)
lookup_notifications['context_object'] = ObjectId(context_object)
num_need_is_subscribed_update = subscriptions_collection.count(lookup_is_subscribed)
log.info("Found %d documents that needs to be update 'is_subscribed'", num_need_is_subscribed_update)
num_need_notification_web_update = subscriptions_collection.count(lookup_notifications)
log.info("Found %d documents that needs to be update 'notifications.web'", num_need_notification_web_update)
if not go:
return
if num_need_is_subscribed_update > 0:
log.info("Updating 'is_subscribed'")
resp = subscriptions_collection.update(
lookup_is_subscribed,
{
'$set': {'is_subscribed': True}
},
multi=True,
upsert=False
)
if resp['nModified'] is not num_need_is_subscribed_update:
log.warning("Expected % documents to be update, was %d",
num_need_is_subscribed_update, resp['nModified'])
if num_need_notification_web_update > 0:
log.info("Updating 'notifications.web'")
resp = subscriptions_collection.update(
lookup_notifications,
{
'$set': {'notifications.web': True}
},
multi=True,
upsert=False
)
if resp['nModified'] is not num_need_notification_web_update:
log.warning("Expected % documents to be update, was %d",
num_need_notification_web_update, resp['nModified'])
log.info("Done updating 'activities-subscriptions' documents")

View File

@ -4,6 +4,8 @@ This'll help us upgrade to new versions of Cerberus.
"""
import unittest
from pillar.api.node_types.utils import markdown_fields
from pillar.tests import AbstractPillarTest
from bson import ObjectId
@ -219,10 +221,9 @@ class MarkdownValidatorTest(AbstractSchemaValidationTest):
'schema': {
'type': 'dict',
'schema': {
'content': {'type': 'string', 'required': True, 'validator': 'markdown'},
'_content_html': {'type': 'string'},
'descr': {'type': 'string', 'required': True, 'validator': 'markdown'},
'_descr_html': {'type': 'string'},
**markdown_fields('content', required=True),
**markdown_fields('descr', required=True),
'my_default_value': {'type': 'string', 'default': 'my default value'},
}
},
}}
@ -239,6 +240,7 @@ class MarkdownValidatorTest(AbstractSchemaValidationTest):
'_content_html': '<h1>Header</h1>\n<p>Some text</p>\n',
'descr': 'je moeder',
'_descr_html': '<p>je moeder</p>\n',
'my_default_value': 'my default value'
}]}
self.assertEqual(expect, doc)
self.assertEqual(expect, self.validator.document)

View File

@ -0,0 +1,193 @@
import bson
import flask
from bson import ObjectId
from pillar.tests import AbstractPillarTest
class NotificationsTest(AbstractPillarTest):
def setUp(self, **kwargs):
super().setUp(**kwargs)
self.project_id, _ = self.ensure_project_exists()
self.user1_id = self.create_user(user_id=str(bson.ObjectId()))
self.user2_id = self.create_user(user_id=str(bson.ObjectId()))
def test_create_node(self):
"""When a node is created, a subscription should also be created"""
with self.app.app_context():
self.login_api_as(self.user1_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
node_id = self.post_node(self.user1_id)
self.assertSubscribed(node_id, self.user1_id)
self.assertNotSubscribed(node_id, self.user2_id)
def test_comment_on_own_node(self):
"""A comment on my own node should not give me any notifications"""
with self.app.app_context():
self.login_api_as(self.user1_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
node_id = self.post_node(self.user1_id)
comment_id = self.post_comment(node_id)
self.assertSubscribed(comment_id, self.user1_id)
self.assertNotSubscribed(comment_id, self.user2_id)
with self.login_as(self.user1_id):
notification = self.notification_for_object(comment_id)
self.assertIsNone(notification)
def test_comment_on_node(self):
"""A comment on some one else's node should give them a notification, and subscriptions should be created"""
with self.app.app_context():
self.login_api_as(self.user1_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
node_id = self.post_node(self.user1_id)
with self.app.app_context():
self.login_api_as(self.user2_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
comment_id = self.post_comment(node_id)
self.assertSubscribed(comment_id, self.user2_id)
self.assertNotSubscribed(comment_id, self.user1_id)
self.assertSubscribed(node_id, self.user1_id, self.user2_id)
with self.login_as(self.user1_id):
notification = self.notification_for_object(comment_id)
self.assertIsNotNone(notification)
def test_mark_notification_as_read(self):
with self.app.app_context():
self.login_api_as(self.user1_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
node_id = self.post_node(self.user1_id)
with self.app.app_context():
self.login_api_as(self.user2_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
comment_id = self.post_comment(node_id)
with self.login_as(self.user1_id):
notification = self.notification_for_object(comment_id)
self.assertFalse(notification['is_read'])
is_read_toggle_url = flask.url_for('notifications.action_read_toggle', notification_id=notification['_id'])
self.get(is_read_toggle_url)
notification = self.notification_for_object(comment_id)
self.assertTrue(notification['is_read'])
self.get(is_read_toggle_url)
notification = self.notification_for_object(comment_id)
self.assertFalse(notification['is_read'])
def test_unsubscribe(self):
"""It should be possible to unsubscribe to notifications"""
with self.app.app_context():
self.login_api_as(self.user1_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
node_id = self.post_node(self.user1_id)
with self.app.app_context():
self.login_api_as(self.user2_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
comment_id = self.post_comment(node_id)
with self.login_as(self.user1_id):
notification = self.notification_for_object(comment_id)
self.assertTrue(notification['is_subscribed'])
is_subscribed_toggle_url =\
flask.url_for('notifications.action_subscription_toggle', notification_id=notification['_id'])
self.get(is_subscribed_toggle_url)
notification = self.notification_for_object(comment_id)
self.assertFalse(notification['is_subscribed'])
with self.app.app_context():
self.login_api_as(self.user2_id, roles={'subscriber', 'admin'},
# This group is hardcoded in the EXAMPLE_PROJECT.
group_ids=[ObjectId('5596e975ea893b269af85c0e')])
comment2_id = self.post_comment(node_id)
with self.login_as(self.user1_id):
notification = self.notification_for_object(comment2_id)
self.assertFalse(notification['is_subscribed'])
def assertSubscribed(self, node_id, *user_ids):
subscriptions_col = self.app.data.driver.db['activities-subscriptions']
lookup = {
'context_object': node_id,
'notifications.web': True,
}
subscriptions = list(subscriptions_col.find(lookup))
self.assertEquals(len(subscriptions), len(user_ids))
for s in subscriptions:
self.assertIn(s['user'], user_ids)
self.assertEquals(s['context_object_type'], 'node')
def assertNotSubscribed(self, node_id, user_id):
subscriptions_col = self.app.data.driver.db['activities-subscriptions']
lookup = {
'context_object': node_id,
}
subscriptions = subscriptions_col.find(lookup)
for s in subscriptions:
self.assertNotEquals(s['user'], user_id)
def notification_for_object(self, node_id):
notifications_url = flask.url_for('notifications.index')
notification_items = self.get(notifications_url).json['items']
object_url = flask.url_for('nodes.redirect_to_context', node_id=str(node_id), _external=False)
for candidate in notification_items:
if candidate['object_url'] == object_url:
return candidate
def post_node(self, user_id):
node_doc = {'description': '',
'project': self.project_id,
'node_type': 'asset',
'user': user_id,
'properties': {'status': 'published',
'tags': [],
'order': 0,
'categories': '',
},
'name': 'My first test node'}
r, _, _, status = self.app.post_internal('nodes', node_doc)
self.assertEqual(status, 201, r)
node_id = r['_id']
return node_id
def post_comment(self, node_id):
comment_url = flask.url_for('nodes_api.post_node_comment', node_path=str(node_id))
comment = self.post(
comment_url,
json={
'msg': 'je möder lives at [home](https://cloud.blender.org/)',
},
expected_status=201,
)
return ObjectId(comment.json['id'])

View File

@ -114,147 +114,6 @@ class UpgradeAttachmentSchemaTest(AbstractPillarTest):
self.assertEqual({}, db_node_type['form_schema'])
class UpgradeAttachmentUsageTest(AbstractPillarTest):
def setUp(self, **kwargs):
super().setUp(**kwargs)
self.pid, self.uid = self.create_project_with_admin(user_id=24 * 'a')
with self.app.app_context():
files_coll = self.app.db('files')
res = files_coll.insert_one({
**ctd.EXAMPLE_FILE,
'project': self.pid,
'user': self.uid,
})
self.fid = res.inserted_id
def test_image_link(self):
with self.app.app_context():
nodes_coll = self.app.db('nodes')
res = nodes_coll.insert_one({
**ctd.EXAMPLE_NODE,
'picture': self.fid,
'project': self.pid,
'user': self.uid,
'description': "# Title\n\n@[slug 0]\n@[slug1]\n@[slug2]\nEitje van Fabergé.",
'properties': {
'status': 'published',
'content_type': 'image',
'file': self.fid,
'attachments': {
'slug 0': {
'oid': self.fid,
'link': 'self',
},
'slug1': {
'oid': self.fid,
'link': 'custom',
'link_custom': 'https://cloud.blender.org/',
},
'slug2': {
'oid': self.fid,
},
}
}
})
nid = res.inserted_id
from pillar.cli.maintenance import upgrade_attachment_usage
with self.app.app_context():
upgrade_attachment_usage(proj_url=ctd.EXAMPLE_PROJECT['url'], go=True)
node = nodes_coll.find_one({'_id': nid})
self.assertEqual(
"# Title\n\n"
"{attachment 'slug-0' link='self'}\n"
"{attachment 'slug1' link='https://cloud.blender.org/'}\n"
"{attachment 'slug2'}\n"
"Eitje van Fabergé.",
node['description'],
'The description should be updated')
self.assertEqual(
"<h1>Title</h1>\n"
"<!-- {attachment 'slug-0' link='self'} -->\n"
"<!-- {attachment 'slug1' link='https://cloud.blender.org/'} -->\n"
"<!-- {attachment 'slug2'} -->\n"
"<p>Eitje van Fabergé.</p>\n",
node['_description_html'],
'The _description_html should be updated')
self.assertEqual(
{'slug-0': {'oid': self.fid},
'slug1': {'oid': self.fid},
'slug2': {'oid': self.fid},
},
node['properties']['attachments'],
'The link should have been removed from the attachment')
def test_post(self):
"""This requires checking the dynamic schema of the node."""
with self.app.app_context():
nodes_coll = self.app.db('nodes')
res = nodes_coll.insert_one({
**ctd.EXAMPLE_NODE,
'node_type': 'post',
'project': self.pid,
'user': self.uid,
'picture': self.fid,
'description': "meh",
'properties': {
'status': 'published',
'content': "# Title\n\n@[slug0]\n@[slug1]\n@[slug2]\nEitje van Fabergé.",
'attachments': {
'slug0': {
'oid': self.fid,
'link': 'self',
},
'slug1': {
'oid': self.fid,
'link': 'custom',
'link_custom': 'https://cloud.blender.org/',
},
'slug2': {
'oid': self.fid,
},
}
}
})
nid = res.inserted_id
from pillar.cli.maintenance import upgrade_attachment_usage
with self.app.app_context():
upgrade_attachment_usage(proj_url=ctd.EXAMPLE_PROJECT['url'], go=True)
node = nodes_coll.find_one({'_id': nid})
self.assertEqual(
"# Title\n\n"
"{attachment 'slug0' link='self'}\n"
"{attachment 'slug1' link='https://cloud.blender.org/'}\n"
"{attachment 'slug2'}\n"
"Eitje van Fabergé.",
node['properties']['content'],
'The content should be updated')
self.assertEqual(
"<h1>Title</h1>\n"
"<!-- {attachment 'slug0' link='self'} -->\n"
"<!-- {attachment 'slug1' link='https://cloud.blender.org/'} -->\n"
"<!-- {attachment 'slug2'} -->\n"
"<p>Eitje van Fabergé.</p>\n",
node['properties']['_content_html'],
'The _content_html should be updated')
self.assertEqual(
{'slug0': {'oid': self.fid},
'slug1': {'oid': self.fid},
'slug2': {'oid': self.fid},
},
node['properties']['attachments'],
'The link should have been removed from the attachment')
class ReconcileNodeDurationTest(AbstractPillarTest):
def setUp(self, **kwargs):
super().setUp(**kwargs)
@ -459,3 +318,52 @@ class DeleteProjectlessFilesTest(AbstractPillarTest):
self.assertEqual(file1_doc, found1)
self.assertEqual(file3_doc, found3)
self.assertEqual(file3_doc, found3)
class FixMissingActivitiesSubscription(AbstractPillarTest):
def test_fix_missing_activities_subscription(self):
from pillar.cli.maintenance import fix_missing_activities_subscription_defaults
with self.app.app_context():
subscriptions_collection = self.app.db('activities-subscriptions')
invalid_subscription = {
'user': ObjectId(),
'context_object_type': 'node',
'context_object': ObjectId(),
}
valid_subscription = {
'user': ObjectId(),
'context_object_type': 'node',
'context_object': ObjectId(),
'is_subscribed': False,
'notifications': {'web': False, }
}
result = subscriptions_collection.insert_one(
invalid_subscription,
bypass_document_validation=True)
id_invalid = result.inserted_id
result = subscriptions_collection.insert_one(
valid_subscription,
bypass_document_validation=True)
id_valid = result.inserted_id
fix_missing_activities_subscription_defaults() # Dry run. Nothing should change
invalid_subscription = subscriptions_collection.find_one({'_id': id_invalid})
self.assertNotIn('is_subscribed', invalid_subscription.keys())
self.assertNotIn('notifications', invalid_subscription.keys())
fix_missing_activities_subscription_defaults(go=True) # Real run. Invalid should be updated
invalid_subscription = subscriptions_collection.find_one({'_id': id_invalid})
self.assertTrue(invalid_subscription['is_subscribed'])
self.assertTrue(invalid_subscription['notifications']['web'])
# Was already ok. Should not have been updated
valid_subscription = subscriptions_collection.find_one({'_id': id_valid})
self.assertFalse(valid_subscription['is_subscribed'])
self.assertFalse(valid_subscription['notifications']['web'])