2016-04-13 15:33:54 +02:00
|
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
|
2016-03-25 15:57:17 +01:00
|
|
|
import json
|
|
|
|
import copy
|
|
|
|
import sys
|
|
|
|
import logging
|
2016-04-15 16:27:24 +02:00
|
|
|
import datetime
|
2016-03-25 15:57:17 +01:00
|
|
|
import os
|
2016-04-12 15:24:50 +02:00
|
|
|
import base64
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-04-15 16:27:24 +02:00
|
|
|
from bson import ObjectId, tz_util
|
2016-04-26 12:33:20 +02:00
|
|
|
|
|
|
|
# Override Eve settings before importing eve.tests.
|
|
|
|
import common_test_settings
|
|
|
|
|
|
|
|
common_test_settings.override_eve()
|
|
|
|
|
2016-03-25 15:57:17 +01:00
|
|
|
from eve.tests import TestMinimal
|
|
|
|
import pymongo.collection
|
|
|
|
from flask.testing import FlaskClient
|
2016-04-13 15:33:54 +02:00
|
|
|
import responses
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-03-25 16:05:36 +01:00
|
|
|
from common_test_data import EXAMPLE_PROJECT, EXAMPLE_FILE
|
2016-03-25 15:57:17 +01:00
|
|
|
|
|
|
|
MY_PATH = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
|
|
|
|
TEST_EMAIL_USER = 'koro'
|
|
|
|
TEST_EMAIL_ADDRESS = '%s@testing.blender.org' % TEST_EMAIL_USER
|
2016-04-13 15:33:54 +02:00
|
|
|
TEST_FULL_NAME = u'врач Сергей'
|
|
|
|
TEST_SUBCLIENT_TOKEN = 'my-subclient-token-for-pillar'
|
|
|
|
BLENDER_ID_TEST_USERID = 1896
|
|
|
|
BLENDER_ID_USER_RESPONSE = {'status': 'success',
|
|
|
|
'user': {'email': TEST_EMAIL_ADDRESS,
|
|
|
|
'full_name': TEST_FULL_NAME,
|
2016-04-15 14:27:54 +02:00
|
|
|
'id': BLENDER_ID_TEST_USERID},
|
2016-04-13 15:33:54 +02:00
|
|
|
'token_expires': 'Mon, 1 Jan 2018 01:02:03 GMT'}
|
2016-03-25 15:57:17 +01:00
|
|
|
|
|
|
|
logging.basicConfig(
|
|
|
|
level=logging.DEBUG,
|
|
|
|
format='%(asctime)-15s %(levelname)8s %(name)s %(message)s')
|
|
|
|
|
|
|
|
|
|
|
|
class AbstractPillarTest(TestMinimal):
|
|
|
|
def setUp(self, **kwargs):
|
2016-04-04 14:59:11 +02:00
|
|
|
eve_settings_file = os.path.join(MY_PATH, 'common_test_settings.py')
|
|
|
|
pillar_config_file = os.path.join(MY_PATH, 'config_testing.py')
|
|
|
|
kwargs['settings_file'] = eve_settings_file
|
|
|
|
os.environ['EVE_SETTINGS'] = eve_settings_file
|
|
|
|
os.environ['PILLAR_CONFIG'] = pillar_config_file
|
2016-03-25 15:57:17 +01:00
|
|
|
super(AbstractPillarTest, self).setUp(**kwargs)
|
|
|
|
|
|
|
|
from application import app
|
|
|
|
|
2016-04-19 16:00:32 +02:00
|
|
|
logging.getLogger('').setLevel(logging.DEBUG)
|
2016-03-25 15:57:17 +01:00
|
|
|
logging.getLogger('application').setLevel(logging.DEBUG)
|
|
|
|
logging.getLogger('werkzeug').setLevel(logging.DEBUG)
|
2016-03-25 18:23:01 +01:00
|
|
|
logging.getLogger('eve').setLevel(logging.DEBUG)
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-04-25 11:44:08 +02:00
|
|
|
from eve.utils import config
|
|
|
|
config.DEBUG = True
|
|
|
|
|
2016-03-25 15:57:17 +01:00
|
|
|
self.app = app
|
|
|
|
self.client = app.test_client()
|
|
|
|
assert isinstance(self.client, FlaskClient)
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
super(AbstractPillarTest, self).tearDown()
|
|
|
|
|
|
|
|
# Not only delete self.app (like the superclass does),
|
|
|
|
# but also un-import the application.
|
|
|
|
del sys.modules['application']
|
|
|
|
|
2016-03-25 16:05:36 +01:00
|
|
|
def ensure_file_exists(self, file_overrides=None):
|
2016-04-01 13:03:27 +02:00
|
|
|
self.ensure_project_exists()
|
2016-03-25 15:57:17 +01:00
|
|
|
with self.app.test_request_context():
|
|
|
|
files_collection = self.app.data.driver.db['files']
|
|
|
|
assert isinstance(files_collection, pymongo.collection.Collection)
|
|
|
|
|
|
|
|
file = copy.deepcopy(EXAMPLE_FILE)
|
|
|
|
if file_overrides is not None:
|
|
|
|
file.update(file_overrides)
|
2016-05-10 12:35:21 +02:00
|
|
|
if '_id' in file and file['_id'] is None:
|
|
|
|
del file['_id']
|
2016-03-25 15:57:17 +01:00
|
|
|
|
|
|
|
result = files_collection.insert_one(file)
|
|
|
|
file_id = result.inserted_id
|
2016-04-01 13:03:27 +02:00
|
|
|
|
|
|
|
# Re-fetch from the database, so that we're sure we return the same as is stored.
|
|
|
|
# This is necessary as datetimes are rounded by MongoDB.
|
|
|
|
from_db = files_collection.find_one(file_id)
|
|
|
|
return file_id, from_db
|
2016-03-25 18:23:01 +01:00
|
|
|
|
|
|
|
def ensure_project_exists(self, project_overrides=None):
|
|
|
|
with self.app.test_request_context():
|
|
|
|
projects_collection = self.app.data.driver.db['projects']
|
|
|
|
assert isinstance(projects_collection, pymongo.collection.Collection)
|
|
|
|
|
|
|
|
project = copy.deepcopy(EXAMPLE_PROJECT)
|
|
|
|
if project_overrides is not None:
|
|
|
|
project.update(project_overrides)
|
|
|
|
|
2016-04-01 13:03:27 +02:00
|
|
|
found = projects_collection.find_one(project['_id'])
|
|
|
|
if found is None:
|
|
|
|
result = projects_collection.insert_one(project)
|
|
|
|
return result.inserted_id, project
|
|
|
|
|
|
|
|
return found['_id'], found
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-06-14 13:26:53 +02:00
|
|
|
def create_user(self, user_id='cafef00dc379cf10c4aaceaf', roles=('subscriber',)):
|
2016-04-26 10:45:54 +02:00
|
|
|
from application.utils.authentication import make_unique_username
|
|
|
|
|
2016-04-15 16:27:24 +02:00
|
|
|
with self.app.test_request_context():
|
|
|
|
users = self.app.data.driver.db['users']
|
|
|
|
assert isinstance(users, pymongo.collection.Collection)
|
|
|
|
|
|
|
|
result = users.insert_one({
|
2016-04-19 16:46:39 +02:00
|
|
|
'_id': ObjectId(user_id),
|
2016-04-15 16:27:24 +02:00
|
|
|
'_updated': datetime.datetime(2016, 4, 15, 13, 15, 11, tzinfo=tz_util.utc),
|
|
|
|
'_created': datetime.datetime(2016, 4, 15, 13, 15, 11, tzinfo=tz_util.utc),
|
2016-04-26 10:45:54 +02:00
|
|
|
'username': make_unique_username('tester'),
|
2016-04-15 16:27:24 +02:00
|
|
|
'groups': [],
|
2016-04-19 16:00:32 +02:00
|
|
|
'roles': list(roles),
|
2016-04-15 16:27:24 +02:00
|
|
|
'settings': {'email_communications': 1},
|
|
|
|
'auth': [{'token': '',
|
|
|
|
'user_id': unicode(BLENDER_ID_TEST_USERID),
|
|
|
|
'provider': 'blender-id'}],
|
|
|
|
'full_name': u'คนรักของผัดไทย',
|
|
|
|
'email': TEST_EMAIL_ADDRESS
|
|
|
|
})
|
|
|
|
|
|
|
|
return result.inserted_id
|
|
|
|
|
2016-04-19 16:00:32 +02:00
|
|
|
def create_valid_auth_token(self, user_id, token='token'):
|
|
|
|
now = datetime.datetime.now(tz_util.utc)
|
|
|
|
future = now + datetime.timedelta(days=1)
|
|
|
|
|
|
|
|
with self.app.test_request_context():
|
|
|
|
from application.utils import authentication as auth
|
|
|
|
|
|
|
|
token_data = auth.store_token(user_id, token, future, None)
|
|
|
|
|
|
|
|
return token_data
|
|
|
|
|
2016-06-24 14:32:12 +02:00
|
|
|
def badger(self, user_email, roles, action, srv_token=None):
|
2016-06-14 13:26:53 +02:00
|
|
|
"""Creates a service account, and uses it to grant or revoke a role to the user.
|
|
|
|
|
|
|
|
To skip creation of the service account, pass a srv_token.
|
|
|
|
|
|
|
|
:returns: the authentication token of the created service account.
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
|
2016-06-24 14:32:12 +02:00
|
|
|
if isinstance(roles, str):
|
|
|
|
roles = set(roles)
|
|
|
|
|
2016-06-14 13:26:53 +02:00
|
|
|
# Create a service account if needed.
|
|
|
|
if srv_token is None:
|
|
|
|
from application.modules.service import create_service_account
|
|
|
|
with self.app.test_request_context():
|
|
|
|
_, srv_token_doc = create_service_account('service@example.com',
|
|
|
|
{'badger'},
|
2016-06-24 14:32:12 +02:00
|
|
|
{'badger': list(roles)})
|
2016-06-14 13:26:53 +02:00
|
|
|
srv_token = srv_token_doc['token']
|
|
|
|
|
2016-06-24 14:32:12 +02:00
|
|
|
for role in roles:
|
|
|
|
resp = self.client.post('/service/badger',
|
|
|
|
headers={'Authorization': self.make_header(srv_token),
|
|
|
|
'Content-Type': 'application/json'},
|
|
|
|
data=json.dumps({'action': action,
|
|
|
|
'role': role,
|
|
|
|
'user_email': user_email}))
|
2016-06-14 13:26:53 +02:00
|
|
|
self.assertEqual(204, resp.status_code, resp.data)
|
|
|
|
|
|
|
|
return srv_token
|
|
|
|
|
2016-04-13 15:33:54 +02:00
|
|
|
def mock_blenderid_validate_unhappy(self):
|
|
|
|
"""Sets up Responses to mock unhappy validation flow."""
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-04-13 15:33:54 +02:00
|
|
|
responses.add(responses.POST,
|
2016-04-15 12:19:43 +02:00
|
|
|
'%s/u/validate_token' % self.app.config['BLENDER_ID_ENDPOINT'],
|
2016-04-13 15:33:54 +02:00
|
|
|
json={'status': 'fail'},
|
2016-04-15 12:19:43 +02:00
|
|
|
status=403)
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-04-13 15:33:54 +02:00
|
|
|
def mock_blenderid_validate_happy(self):
|
|
|
|
"""Sets up Responses to mock happy validation flow."""
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-04-13 15:33:54 +02:00
|
|
|
responses.add(responses.POST,
|
2016-04-15 12:19:43 +02:00
|
|
|
'%s/u/validate_token' % self.app.config['BLENDER_ID_ENDPOINT'],
|
2016-04-13 15:33:54 +02:00
|
|
|
json=BLENDER_ID_USER_RESPONSE,
|
|
|
|
status=200)
|
2016-03-25 15:57:17 +01:00
|
|
|
|
2016-04-13 15:33:54 +02:00
|
|
|
def make_header(self, username, subclient_id=''):
|
2016-04-12 15:24:50 +02:00
|
|
|
"""Returns a Basic HTTP Authentication header value."""
|
|
|
|
|
2016-04-13 15:33:54 +02:00
|
|
|
return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
|
2016-06-15 13:08:45 +02:00
|
|
|
|
|
|
|
def create_standard_groups(self, additional_groups=()):
|
|
|
|
"""Creates standard admin/demo/subscriber groups, plus any additional.
|
|
|
|
|
|
|
|
:returns: mapping from group name to group ID
|
|
|
|
"""
|
|
|
|
from application.modules import service
|
|
|
|
|
|
|
|
with self.app.test_request_context():
|
|
|
|
group_ids = {}
|
|
|
|
groups_coll = self.app.data.driver.db['groups']
|
|
|
|
|
|
|
|
for group_name in ['admin', 'demo', 'subscriber'] + list(additional_groups):
|
|
|
|
result = groups_coll.insert_one({'name': group_name})
|
|
|
|
group_ids[group_name] = result.inserted_id
|
|
|
|
|
|
|
|
service.fetch_role_to_group_id_map()
|
|
|
|
|
|
|
|
return group_ids
|