MongoCollection.count() and update()
are deprecated
Eve doesn't have any counting methods on `current_app.data`, so there is no one-to-one translation for `cursor.count()` in `file_storage/__init__.py`. Since the call was only used in a debug log entry, I just removed it altogether. I removed `pillar.cli.operations.index_users_rebuild()`, as it was importing `pillar.api.utils.algolia.algolia_index_user_save` which doesn't exist any more, so the code was dead anyway.
This commit is contained in:
parent
47d5c6cbad
commit
1e823a9dbe
@ -91,14 +91,14 @@ def notification_parse(notification):
|
||||
|
||||
|
||||
def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id):
|
||||
subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
|
||||
subscriptions_collection = current_app.db('activities-subscriptions')
|
||||
lookup = {
|
||||
'user': {"$ne": actor_user_id},
|
||||
'context_object_type': context_object_type,
|
||||
'context_object': context_object_id,
|
||||
'is_subscribed': True,
|
||||
}
|
||||
return subscriptions_collection.find(lookup)
|
||||
return subscriptions_collection.find(lookup), subscriptions_collection.count_documents(lookup)
|
||||
|
||||
|
||||
def activity_subscribe(user_id, context_object_type, context_object_id):
|
||||
@ -140,10 +140,10 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
|
||||
:param object_id: object id, to be traced with object_type_id
|
||||
"""
|
||||
|
||||
subscriptions = notification_get_subscriptions(
|
||||
subscriptions, subscription_count = notification_get_subscriptions(
|
||||
context_object_type, context_object_id, actor_user_id)
|
||||
|
||||
if subscriptions.count() == 0:
|
||||
if subscription_count == 0:
|
||||
return
|
||||
|
||||
info, status = register_activity(actor_user_id, verb, object_type, object_id,
|
||||
|
@ -257,7 +257,7 @@ def has_home_project(user_id):
|
||||
"""Returns True iff the user has a home project."""
|
||||
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
return proj_coll.count({'user': user_id, 'category': 'home', '_deleted': False}) > 0
|
||||
return proj_coll.count_documents({'user': user_id, 'category': 'home', '_deleted': False}) > 0
|
||||
|
||||
|
||||
def get_home_project(user_id, projection=None):
|
||||
@ -272,10 +272,10 @@ def is_home_project(project_id, user_id):
|
||||
"""Returns True iff the given project exists and is the user's home project."""
|
||||
|
||||
proj_coll = current_app.data.driver.db['projects']
|
||||
return proj_coll.count({'_id': project_id,
|
||||
'user': user_id,
|
||||
'category': 'home',
|
||||
'_deleted': False}) > 0
|
||||
return proj_coll.count_documents({'_id': project_id,
|
||||
'user': user_id,
|
||||
'category': 'home',
|
||||
'_deleted': False}) > 0
|
||||
|
||||
|
||||
def mark_node_updated(node_id):
|
||||
|
@ -566,12 +566,9 @@ def on_pre_get_files(_, lookup):
|
||||
lookup_expired['link_expires'] = {'$lte': now}
|
||||
|
||||
cursor = current_app.data.find('files', parsed_req, lookup_expired)
|
||||
if cursor.count() == 0:
|
||||
return
|
||||
|
||||
log.debug('Updating expired links for %d files that matched lookup %s',
|
||||
cursor.count(), lookup_expired)
|
||||
for file_doc in cursor:
|
||||
for idx, file_doc in enumerate(cursor):
|
||||
if idx == 0:
|
||||
log.debug('Updating expired links for files that matched lookup %s', lookup_expired)
|
||||
# log.debug('Updating expired links for file %r.', file_doc['_id'])
|
||||
generate_all_links(file_doc, now)
|
||||
|
||||
@ -595,15 +592,14 @@ def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
|
||||
'link_expires': {'$lt': expire_before},
|
||||
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size)
|
||||
|
||||
if to_refresh.count() == 0:
|
||||
log.info('No links to refresh.')
|
||||
return
|
||||
|
||||
refresh_count = 0
|
||||
for file_doc in to_refresh:
|
||||
log.debug('Refreshing links for file %s', file_doc['_id'])
|
||||
generate_all_links(file_doc, now)
|
||||
refresh_count += 1
|
||||
|
||||
log.info('Refreshed %i links', min(chunk_size, to_refresh.count()))
|
||||
if refresh_count:
|
||||
log.info('Refreshed %i links', refresh_count)
|
||||
|
||||
|
||||
def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||
@ -621,14 +617,13 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||
my_log.info('Limiting to links that expire before %s', expire_before)
|
||||
|
||||
base_query = {'backend': backend_name, '_deleted': {'$ne': True}}
|
||||
to_refresh = files_collection.find(
|
||||
{'$or': [{'link_expires': None, **base_query},
|
||||
{'link_expires': {'$lt': expire_before}, **base_query},
|
||||
{'link': None, **base_query}]
|
||||
}).sort([('link_expires', pymongo.ASCENDING)]).limit(
|
||||
chunk_size).batch_size(5)
|
||||
to_refresh_query = {
|
||||
'$or': [{'link_expires': None, **base_query},
|
||||
{'link_expires': {'$lt': expire_before}, **base_query},
|
||||
{'link': None, **base_query}]
|
||||
}
|
||||
|
||||
document_count = to_refresh.count()
|
||||
document_count = files_collection.count_documents(to_refresh_query)
|
||||
if document_count == 0:
|
||||
my_log.info('No links to refresh.')
|
||||
return
|
||||
@ -639,6 +634,11 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||
else:
|
||||
my_log.info('Found %d documents to refresh, chunk size=%d', document_count, chunk_size)
|
||||
|
||||
to_refresh = files_collection.find(to_refresh_query)\
|
||||
.sort([('link_expires', pymongo.ASCENDING)])\
|
||||
.limit(chunk_size)\
|
||||
.batch_size(5)
|
||||
|
||||
refreshed = 0
|
||||
report_chunks = min(max(5, document_count // 25), 100)
|
||||
for file_doc in to_refresh:
|
||||
@ -649,7 +649,7 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||
my_log.debug('Skipping file %s, it has no project.', file_id)
|
||||
continue
|
||||
|
||||
count = proj_coll.count({'_id': project_id, '$or': [
|
||||
count = proj_coll.count_documents({'_id': project_id, '$or': [
|
||||
{'_deleted': {'$exists': False}},
|
||||
{'_deleted': False},
|
||||
]})
|
||||
|
@ -153,7 +153,7 @@ class OrgManager:
|
||||
org_coll = current_app.db('organizations')
|
||||
users_coll = current_app.db('users')
|
||||
|
||||
if users_coll.count({'_id': user_id}) == 0:
|
||||
if users_coll.count_documents({'_id': user_id}) == 0:
|
||||
raise ValueError('User not found')
|
||||
|
||||
self._log.info('Updating organization %s, setting admin user to %s', org_id, user_id)
|
||||
@ -189,7 +189,7 @@ class OrgManager:
|
||||
if user_doc is not None:
|
||||
user_id = user_doc['_id']
|
||||
|
||||
if user_id and not users_coll.count({'_id': user_id}):
|
||||
if user_id and not users_coll.count_documents({'_id': user_id}):
|
||||
raise wz_exceptions.UnprocessableEntity('User does not exist')
|
||||
|
||||
self._log.info('Removing user %s / %s from organization %s', user_id, email, org_id)
|
||||
@ -385,7 +385,7 @@ class OrgManager:
|
||||
|
||||
org_coll = current_app.db('organizations')
|
||||
|
||||
org_count = org_coll.count({'$or': [
|
||||
org_count = org_coll.count_documents({'$or': [
|
||||
{'admin_uid': user_id},
|
||||
{'members': user_id}
|
||||
]})
|
||||
@ -396,7 +396,7 @@ class OrgManager:
|
||||
"""Return True iff the email is an unknown member of some org."""
|
||||
|
||||
org_coll = current_app.db('organizations')
|
||||
org_count = org_coll.count({'unknown_members': member_email})
|
||||
org_count = org_coll.count_documents({'unknown_members': member_email})
|
||||
return bool(org_count)
|
||||
|
||||
def roles_for_ip_address(self, remote_addr: str) -> typing.Set[str]:
|
||||
|
@ -25,8 +25,11 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
|
||||
|
||||
# Move the files first. Since this requires API calls to an external
|
||||
# service, this is more likely to go wrong than moving the nodes.
|
||||
to_move = files_coll.find({'project': pid_from}, projection={'_id': 1})
|
||||
log.info('Moving %d files to project %s', to_move.count(), pid_to)
|
||||
query = {'project': pid_from}
|
||||
to_move = files_coll.find(query, projection={'_id': 1})
|
||||
|
||||
to_move_count = files_coll.count_documents(query)
|
||||
log.info('Moving %d files to project %s', to_move_count, pid_to)
|
||||
for file_doc in to_move:
|
||||
fid = file_doc['_id']
|
||||
log.debug('moving file %s to project %s', fid, pid_to)
|
||||
@ -35,7 +38,7 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
|
||||
# Mass-move the nodes.
|
||||
etag = random_etag()
|
||||
result = nodes_coll.update_many(
|
||||
{'project': pid_from},
|
||||
query,
|
||||
{'$set': {'project': pid_to,
|
||||
'_etag': etag,
|
||||
'_updated': utcnow(),
|
||||
|
@ -92,8 +92,8 @@ def project_manage_users():
|
||||
action, current_user_id)
|
||||
raise wz_exceptions.UnprocessableEntity()
|
||||
|
||||
users_collection.update({'_id': target_user_id},
|
||||
{operation: {'groups': admin_group['_id']}})
|
||||
users_collection.update_one({'_id': target_user_id},
|
||||
{operation: {'groups': admin_group['_id']}})
|
||||
|
||||
user = users_collection.find_one({'_id': target_user_id},
|
||||
{'username': 1, 'email': 1,
|
||||
@ -141,5 +141,3 @@ def get_allowed_methods(project_id=None, node_type=None):
|
||||
resp.status_code = 204
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
|
@ -91,7 +91,7 @@ class SearchHelper:
|
||||
|
||||
def has_more(self, continue_from: datetime) -> bool:
|
||||
nodes_coll = current_app.db('nodes')
|
||||
result = nodes_coll.count(self._match(continue_from))
|
||||
result = nodes_coll.count_documents(self._match(continue_from))
|
||||
return bool(result)
|
||||
|
||||
|
||||
|
@ -306,7 +306,7 @@ def purge_home_projects(go=False):
|
||||
yield pid
|
||||
continue
|
||||
|
||||
if users_coll.find({'_id': uid, '_deleted': {'$ne': True}}).count() == 0:
|
||||
if users_coll.count_documents({'_id': uid, '_deleted': {'$ne': True}}) == 0:
|
||||
log.info('Project %s has non-existing owner %s', pid, uid)
|
||||
bad += 1
|
||||
yield pid
|
||||
@ -1296,9 +1296,9 @@ def fix_missing_activities_subscription_defaults(user=None, context_object=None,
|
||||
lookup_is_subscribed['context_object'] = ObjectId(context_object)
|
||||
lookup_notifications['context_object'] = ObjectId(context_object)
|
||||
|
||||
num_need_is_subscribed_update = subscriptions_collection.count(lookup_is_subscribed)
|
||||
num_need_is_subscribed_update = subscriptions_collection.count_documents(lookup_is_subscribed)
|
||||
log.info("Found %d documents that needs to be update 'is_subscribed'", num_need_is_subscribed_update)
|
||||
num_need_notification_web_update = subscriptions_collection.count(lookup_notifications)
|
||||
num_need_notification_web_update = subscriptions_collection.count_documents(lookup_notifications)
|
||||
log.info("Found %d documents that needs to be update 'notifications.web'", num_need_notification_web_update)
|
||||
|
||||
if not go:
|
||||
@ -1306,29 +1306,27 @@ def fix_missing_activities_subscription_defaults(user=None, context_object=None,
|
||||
|
||||
if num_need_is_subscribed_update > 0:
|
||||
log.info("Updating 'is_subscribed'")
|
||||
resp = subscriptions_collection.update(
|
||||
resp = subscriptions_collection.update_many(
|
||||
lookup_is_subscribed,
|
||||
{
|
||||
'$set': {'is_subscribed': True}
|
||||
},
|
||||
multi=True,
|
||||
upsert=False
|
||||
)
|
||||
if resp['nModified'] is not num_need_is_subscribed_update:
|
||||
if resp.modified_count != num_need_is_subscribed_update:
|
||||
log.warning("Expected % documents to be update, was %d",
|
||||
num_need_is_subscribed_update, resp['nModified'])
|
||||
|
||||
if num_need_notification_web_update > 0:
|
||||
log.info("Updating 'notifications.web'")
|
||||
resp = subscriptions_collection.update(
|
||||
resp = subscriptions_collection.update_many(
|
||||
lookup_notifications,
|
||||
{
|
||||
'$set': {'notifications.web': True}
|
||||
},
|
||||
multi=True,
|
||||
upsert=False
|
||||
)
|
||||
if resp['nModified'] is not num_need_notification_web_update:
|
||||
if resp.modified_count != num_need_notification_web_update:
|
||||
log.warning("Expected % documents to be update, was %d",
|
||||
num_need_notification_web_update, resp['nModified'])
|
||||
|
||||
|
@ -165,49 +165,6 @@ def merge_project(src_proj_url, dest_proj_url):
|
||||
log.info('Done moving.')
|
||||
|
||||
|
||||
@manager_operations.command
|
||||
def index_users_rebuild():
|
||||
"""Clear users index, update settings and reindex all users."""
|
||||
|
||||
import concurrent.futures
|
||||
|
||||
from pillar.api.utils.algolia import algolia_index_user_save
|
||||
|
||||
users_index = current_app.algolia_index_users
|
||||
if users_index is None:
|
||||
log.error('Algolia is not configured properly, unable to do anything!')
|
||||
return 1
|
||||
|
||||
log.info('Dropping existing index: %s', users_index)
|
||||
users_index.clear_index()
|
||||
index_users_update_settings()
|
||||
|
||||
db = current_app.db()
|
||||
users = db['users'].find({'_deleted': {'$ne': True}})
|
||||
user_count = users.count()
|
||||
|
||||
log.info('Reindexing all %i users', user_count)
|
||||
|
||||
real_current_app = current_app._get_current_object()._get_current_object()
|
||||
|
||||
def do_user(user):
|
||||
with real_current_app.app_context():
|
||||
algolia_index_user_save(user)
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
||||
future_to_user = {executor.submit(do_user, user): user
|
||||
for user in users}
|
||||
for idx, future in enumerate(concurrent.futures.as_completed(future_to_user)):
|
||||
user = future_to_user[future]
|
||||
user_ident = user.get('email') or user.get('_id')
|
||||
try:
|
||||
future.result()
|
||||
except Exception:
|
||||
log.exception('Error updating user %i/%i %s', idx + 1, user_count, user_ident)
|
||||
else:
|
||||
log.info('Updated user %i/%i %s', idx + 1, user_count, user_ident)
|
||||
|
||||
|
||||
@manager_operations.command
|
||||
def index_users_update_settings():
|
||||
"""Configure indexing backend as required by the project"""
|
||||
@ -234,7 +191,7 @@ def hash_auth_tokens():
|
||||
tokens_coll = current_app.db('tokens')
|
||||
query = {'token': {'$exists': True}}
|
||||
cursor = tokens_coll.find(query, projection={'token': 1, '_id': 1})
|
||||
log.info('Updating %d tokens', cursor.count())
|
||||
log.info('Updating %d tokens', tokens_coll.count_documents(query))
|
||||
|
||||
for token_doc in cursor:
|
||||
hashed_token = hash_auth_token(token_doc['token'])
|
||||
|
@ -741,7 +741,7 @@ class UserCreationTest(AbstractPillarTest):
|
||||
|
||||
with self.app.test_request_context():
|
||||
users_coll = self.app.db().users
|
||||
self.assertEqual(0, users_coll.count())
|
||||
self.assertEqual(0, users_coll.count_documents({}))
|
||||
|
||||
self.mock_blenderid_validate_happy()
|
||||
token = 'this is my life now'
|
||||
@ -749,7 +749,7 @@ class UserCreationTest(AbstractPillarTest):
|
||||
|
||||
with self.app.test_request_context():
|
||||
users_coll = self.app.db().users
|
||||
self.assertEqual(1, users_coll.count())
|
||||
self.assertEqual(1, users_coll.count_documents({}))
|
||||
|
||||
db_user = users_coll.find()[0]
|
||||
self.assertEqual(db_user['email'], TEST_EMAIL_ADDRESS)
|
||||
@ -760,7 +760,7 @@ class UserCreationTest(AbstractPillarTest):
|
||||
|
||||
with self.app.test_request_context():
|
||||
users_coll = self.app.db().users
|
||||
self.assertEqual(0, users_coll.count())
|
||||
self.assertEqual(0, users_coll.count_documents({}))
|
||||
|
||||
bid_resp = {'status': 'success',
|
||||
'user': {'email': TEST_EMAIL_ADDRESS,
|
||||
@ -778,7 +778,7 @@ class UserCreationTest(AbstractPillarTest):
|
||||
|
||||
with self.app.test_request_context():
|
||||
users_coll = self.app.db().users
|
||||
self.assertEqual(1, users_coll.count())
|
||||
self.assertEqual(1, users_coll.count_documents({}))
|
||||
|
||||
db_user = users_coll.find()[0]
|
||||
self.assertEqual(db_user['email'], TEST_EMAIL_ADDRESS)
|
||||
@ -789,7 +789,7 @@ class UserCreationTest(AbstractPillarTest):
|
||||
"""Blender ID does not require full name, we do."""
|
||||
with self.app.app_context():
|
||||
users_coll = self.app.db().users
|
||||
self.assertEqual(0, users_coll.count())
|
||||
self.assertEqual(0, users_coll.count_documents({}))
|
||||
|
||||
# First request will create the user, the 2nd request will update.
|
||||
self.mock_blenderid_validate_happy()
|
||||
@ -818,7 +818,7 @@ class UserCreationTest(AbstractPillarTest):
|
||||
self.get('/api/users/me', auth_token=token)
|
||||
|
||||
with self.app.app_context():
|
||||
self.assertEqual(1, users_coll.count())
|
||||
self.assertEqual(1, users_coll.count_documents({}))
|
||||
|
||||
db_user = users_coll.find()[0]
|
||||
self.assertEqual(db_user['email'], TEST_EMAIL_ADDRESS)
|
||||
|
@ -81,7 +81,7 @@ class CommentEditTest(AbstractPillarTest):
|
||||
|
||||
with self.app.app_context():
|
||||
proj_coll = self.app.db('projects')
|
||||
proj_coll.update(
|
||||
proj_coll.update_one(
|
||||
{'_id': self.pid},
|
||||
{'$set': {
|
||||
'node_types': self.project['node_types'],
|
||||
|
@ -690,7 +690,7 @@ class NodesReferencedByProjectTest(AbstractPillarTest):
|
||||
self.node_etag = node['_etag']
|
||||
|
||||
with self.app.app_context():
|
||||
self.app.db('projects').update(
|
||||
self.app.db('projects').update_one(
|
||||
{'_id': self.pid},
|
||||
{'$set': {
|
||||
'header_node': self.node_id,
|
||||
|
@ -24,13 +24,13 @@ class OrganizationCruTest(AbstractPillarTest):
|
||||
self.enter_app_context()
|
||||
|
||||
# There should be no organizations to begin with.
|
||||
db = self.app.db('organizations')
|
||||
self.assertEqual(0, db.count())
|
||||
org_coll = self.app.db('organizations')
|
||||
self.assertEqual(0, org_coll.count_documents({}))
|
||||
|
||||
admin_uid = self.create_user(24 * 'a')
|
||||
org_doc = self.app.org_manager.create_new_org('Хакеры', admin_uid, 25)
|
||||
|
||||
self.assertIsNotNone(db.find_one(org_doc['_id']))
|
||||
self.assertIsNotNone(org_coll.find_one(org_doc['_id']))
|
||||
self.assertEqual(bson.ObjectId(24 * 'a'), org_doc['admin_uid'])
|
||||
self.assertEqual('Хакеры', org_doc['name'])
|
||||
self.assertEqual(25, org_doc['seat_count'])
|
||||
|
Loading…
x
Reference in New Issue
Block a user