diff --git a/pillar/application/modules/encoding.py b/pillar/application/modules/encoding.py index af815f32..3a44e622 100644 --- a/pillar/application/modules/encoding.py +++ b/pillar/application/modules/encoding.py @@ -5,7 +5,7 @@ from eve.methods.put import put_internal from flask import Blueprint from flask import abort from flask import request -from application import app +from flask import current_app from application import utils encoding = Blueprint('encoding', __name__) @@ -14,11 +14,11 @@ log = logging.getLogger(__name__) @encoding.route('/zencoder/notifications', methods=['POST']) def zencoder_notifications(): - if app.config['ENCODING_BACKEND'] != 'zencoder': + if current_app.config['ENCODING_BACKEND'] != 'zencoder': log.warning('Received notification from Zencoder but app not configured for Zencoder.') return abort(403) - if not app.config['DEBUG']: + if not current_app.config['DEBUG']: # If we are in production, look for the Zencoder header secret try: notification_secret_request = request.headers[ @@ -27,14 +27,14 @@ def zencoder_notifications(): log.warning('Received Zencoder notification without secret.') return abort(401) # If the header is found, check it agains the one in the config - notification_secret = app.config['ZENCODER_NOTIFICATIONS_SECRET'] + notification_secret = current_app.config['ZENCODER_NOTIFICATIONS_SECRET'] if notification_secret_request != notification_secret: log.warning('Received Zencoder notification with incorrect secret.') return abort(401) # Cast request data into a dict data = request.get_json() - files_collection = app.data.driver.db['files'] + files_collection = current_app.data.driver.db['files'] # Find the file object based on processing backend and job_id lookup = {'processing.backend': 'zencoder', 'processing.job_id': str(data['job']['id'])} f = files_collection.find_one(lookup) diff --git a/pillar/application/modules/file_storage.py b/pillar/application/modules/file_storage.py index cecf1de1..9421119e 100644 --- a/pillar/application/modules/file_storage.py +++ b/pillar/application/modules/file_storage.py @@ -16,6 +16,7 @@ from flask import request from flask import abort from flask import send_from_directory from flask import url_for, helpers +from flask import current_app from application import utils from application.utils import remove_private_keys @@ -67,9 +68,7 @@ def build_thumbnails(file_path=None, file_id=None): variation properties. """ - from application import app - - files_collection = app.data.driver.db['files'] + files_collection = current_app.data.driver.db['files'] if file_path: # Search file with backend "pillar" and path=file_path file_ = files_collection.find({"file_path": "{0}".format(file_path)}) @@ -79,7 +78,7 @@ def build_thumbnails(file_path=None, file_id=None): file_ = files_collection.find_one({"_id": ObjectId(file_id)}) file_path = file_['name'] - file_full_path = safe_join(safe_join(app.config['SHARED_DIR'], file_path[:2]), + file_full_path = safe_join(safe_join(current_app.config['SHARED_DIR'], file_path[:2]), file_path) # Does the original file exist? if not os.path.isfile(file_full_path): @@ -120,11 +119,10 @@ def build_thumbnails(file_path=None, file_id=None): @file_storage.route('/file', methods=['POST']) @file_storage.route('/file/', methods=['GET', 'POST']) def index(file_name=None): - from application import app # GET file -> read it if request.method == 'GET': - return send_from_directory(app.config['STORAGE_DIR'], file_name) + return send_from_directory(current_app.config['STORAGE_DIR'], file_name) # POST file -> save it @@ -137,7 +135,7 @@ def index(file_name=None): # Determine & create storage directory folder_name = file_name[:2] - file_folder_path = helpers.safe_join(app.config['STORAGE_DIR'], folder_name) + file_folder_path = helpers.safe_join(current_app.config['STORAGE_DIR'], folder_name) if not os.path.exists(file_folder_path): log.info('Creating folder path %r', file_folder_path) os.mkdir(file_folder_path) @@ -157,12 +155,11 @@ def process_file(file_id, src_file): :param file_id: '_id' key of the file :param src_file: POSTed data of the file, lacks private properties. """ - from application import app src_file = utils.remove_private_keys(src_file) filename = src_file['name'] - file_abs_path = safe_join(safe_join(app.config['SHARED_DIR'], filename[:2]), filename) + file_abs_path = safe_join(safe_join(current_app.config['SHARED_DIR'], filename[:2]), filename) if not os.path.exists(file_abs_path): log.warning("POSTed file document %r refers to non-existant file on file system %s!", @@ -227,7 +224,7 @@ def process_file(file_id, src_file): def encode(src_path, src_file, res_y): # For every variation in the list call video_encode # print "encoding {0}".format(variations) - if app.config['ENCODING_BACKEND'] == 'zencoder': + if current_app.config['ENCODING_BACKEND'] == 'zencoder': # Move the source file in place on the remote storage (which can # be accessed from zencoder) push_to_storage(str(src_file['project']), src_path) @@ -244,7 +241,7 @@ def process_file(file_id, src_file): pass except KeyError: pass - elif app.config['ENCODING_BACKEND'] == 'local': + elif current_app.config['ENCODING_BACKEND'] == 'local': for v in src_file['variations']: path = ffmpeg_encode(src_path, v['format'], res_y) # Update size data after encoding @@ -291,9 +288,7 @@ def delete_file(file_item): else: pass - from application import app - - files_collection = app.data.driver.db['files'] + files_collection = current_app.data.driver.db['files'] # Collect children (variations) of the original file children = files_collection.find({'parent': file_item['_id']}) for child in children: @@ -306,7 +301,6 @@ def generate_link(backend, file_path, project_id=None, is_public=False): """Hook to check the backend of a file resource, to build an appropriate link that can be used by the client to retrieve the actual file. """ - from application import app if backend == 'gcs': storage = GoogleCloudStorageBucket(project_id) @@ -319,7 +313,7 @@ def generate_link(backend, file_path, project_id=None, is_public=False): link = None elif backend == 'pillar': link = url_for('file_storage.index', file_name=file_path, _external=True, - _scheme=app.config['SCHEME']) + _scheme=current_app.config['SCHEME']) elif backend == 'cdnsun': link = hash_file_path(file_path, None) elif backend == 'unittest': @@ -367,8 +361,6 @@ def _generate_all_links(response, now): :param now: datetime that reflects 'now', for consistent expiry generation. """ - from application import app - project_id = str( response['project']) if 'project' in response else None # TODO: add project id to all files backend = response['backend'] @@ -378,7 +370,7 @@ def _generate_all_links(response, now): variation['link'] = generate_link(backend, variation['file_path'], project_id) # Construct the new expiry datetime. - validity_secs = app.config['FILE_LINK_VALIDITY'][backend] + validity_secs = current_app.config['FILE_LINK_VALIDITY'][backend] response['link_expires'] = now + datetime.timedelta(seconds=validity_secs) patch_info = remove_private_keys(response) @@ -410,8 +402,6 @@ def before_deleting_file(item): def on_pre_get_files(_, lookup): - from application import app - # Override the HTTP header, we always want to fetch the document from MongoDB. parsed_req = eve.utils.parse_request('files') parsed_req.if_modified_since = None @@ -421,22 +411,20 @@ def on_pre_get_files(_, lookup): lookup_expired = lookup.copy() lookup_expired['link_expires'] = {'$lte': now} - cursor = app.data.find('files', parsed_req, lookup_expired) + cursor = current_app.data.find('files', parsed_req, lookup_expired) for file_doc in cursor: log.debug('Updating expired links for file %r.', file_doc['_id']) _generate_all_links(file_doc, now) def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds): - from application import app - if chunk_size: log.info('Refreshing the first %i links for project %s', chunk_size, project_uuid) else: log.info('Refreshing all links for project %s', project_uuid) # Retrieve expired links. - files_collection = app.data.driver.db['files'] + files_collection = current_app.data.driver.db['files'] now = datetime.datetime.now(tz=bson.tz_util.utc) expire_before = now + datetime.timedelta(seconds=expiry_seconds) diff --git a/pillar/application/modules/projects.py b/pillar/application/modules/projects.py index 45bca6c5..0c87a2ed 100644 --- a/pillar/application/modules/projects.py +++ b/pillar/application/modules/projects.py @@ -3,7 +3,6 @@ import logging import json from eve.methods.post import post_internal -from eve.methods.put import put_internal from eve.methods.patch import patch_internal from flask import g, Blueprint, request, abort, current_app diff --git a/pillar/application/utils/__init__.py b/pillar/application/utils/__init__.py index f1b560c9..6dee8945 100644 --- a/pillar/application/utils/__init__.py +++ b/pillar/application/utils/__init__.py @@ -17,6 +17,11 @@ def remove_private_keys(document): if key.startswith('_'): del doc_copy[key] + try: + del doc_copy['allowed_methods'] + except KeyError: + pass + return doc_copy diff --git a/pillar/application/utils/activities.py b/pillar/application/utils/activities.py index 22be6d41..210ada4f 100644 --- a/pillar/application/utils/activities.py +++ b/pillar/application/utils/activities.py @@ -1,15 +1,15 @@ from flask import g +from flask import current_app from eve.methods.post import post_internal -from application import app from application.modules.users import gravatar def notification_parse(notification): - activities_collection = app.data.driver.db['activities'] + activities_collection = current_app.data.driver.db['activities'] activities_subscriptions_collection = \ - app.data.driver.db['activities-subscriptions'] - users_collection = app.data.driver.db['users'] - nodes_collection = app.data.driver.db['nodes'] + current_app.data.driver.db['activities-subscriptions'] + users_collection = current_app.data.driver.db['users'] + nodes_collection = current_app.data.driver.db['nodes'] activity = activities_collection.find_one({'_id': notification['activity']}) if activity['object_type'] != 'node': @@ -83,7 +83,7 @@ def notification_parse(notification): def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id): - subscriptions_collection = app.data.driver.db['activities-subscriptions'] + subscriptions_collection = current_app.data.driver.db['activities-subscriptions'] lookup = { 'user': {"$ne": actor_user_id}, 'context_object_type': context_object_type, @@ -101,7 +101,7 @@ def activity_subscribe(user_id, context_object_type, context_object_id): :param context_object_type: hardcoded index, check the notifications/model.py :param context_object_id: object id, to be traced with context_object_type_id """ - subscriptions_collection = app.data.driver.db['activities-subscriptions'] + subscriptions_collection = current_app.data.driver.db['activities-subscriptions'] lookup = { 'user': user_id, 'context_object_type': context_object_type, diff --git a/pillar/application/utils/algolia.py b/pillar/application/utils/algolia.py index 58006a8d..473d788b 100644 --- a/pillar/application/utils/algolia.py +++ b/pillar/application/utils/algolia.py @@ -1,5 +1,5 @@ from bson import ObjectId -from application import app +from flask import current_app from application import algolia_index_users from application import algolia_index_nodes from application.modules.file_storage import generate_link @@ -33,10 +33,10 @@ def algolia_index_node_save(node): and node['properties']['status'] != 'published': return - projects_collection = app.data.driver.db['projects'] + projects_collection = current_app.data.driver.db['projects'] project = projects_collection.find_one({'_id': ObjectId(node['project'])}) - users_collection = app.data.driver.db['users'] + users_collection = current_app.data.driver.db['users'] user = users_collection.find_one({'_id': ObjectId(node['user'])}) node_ob = { @@ -57,7 +57,7 @@ def algolia_index_node_save(node): if 'description' in node and node['description']: node_ob['description'] = node['description'] if 'picture' in node and node['picture']: - files_collection = app.data.driver.db['files'] + files_collection = current_app.data.driver.db['files'] lookup = {'_id': ObjectId(node['picture'])} picture = files_collection.find_one(lookup) if picture['backend'] == 'gcs': diff --git a/pillar/application/utils/authentication.py b/pillar/application/utils/authentication.py index a3629ee8..303e0cca 100644 --- a/pillar/application/utils/authentication.py +++ b/pillar/application/utils/authentication.py @@ -11,10 +11,9 @@ from bson import tz_util from datetime import datetime from flask import g from flask import request +from flask import current_app from eve.methods.post import post_internal -from application import app - log = logging.getLogger(__name__) @@ -55,7 +54,7 @@ def validate_token(): db_user, status = blender_id.validate_create_user('', token, oauth_subclient) else: log.debug("User is already in our database and token hasn't expired yet.") - users = app.data.driver.db['users'] + users = current_app.data.driver.db['users'] db_user = users.find_one(db_token['user']) if db_user is None: @@ -72,7 +71,7 @@ def validate_token(): def find_token(token, is_subclient_token=False, **extra_filters): """Returns the token document, or None if it doesn't exist (or is expired).""" - tokens_collection = app.data.driver.db['tokens'] + tokens_collection = current_app.data.driver.db['tokens'] # TODO: remove expired tokens from collection. lookup = {'token': token, @@ -152,7 +151,7 @@ def make_unique_username(email): # Check for min length of username (otherwise validation fails) username = "___{0}".format(username) if len(username) < 3 else username - users = app.data.driver.db['users'] + users = current_app.data.driver.db['users'] user_from_username = users.find_one({'username': username}) if not user_from_username: diff --git a/pillar/application/utils/authorization.py b/pillar/application/utils/authorization.py index 133e2d72..99eff9dd 100644 --- a/pillar/application/utils/authorization.py +++ b/pillar/application/utils/authorization.py @@ -2,7 +2,7 @@ import logging import functools from flask import g from flask import abort -from application import app +from flask import current_app log = logging.getLogger(__name__) @@ -38,7 +38,7 @@ def check_permissions(resource, method, append_allowed_methods=False): if type(resource['project']) is dict: project = resource['project'] else: - projects_collection = app.data.driver.db['projects'] + projects_collection = current_app.data.driver.db['projects'] project = projects_collection.find_one(resource['project']) node_type = next( (item for item in project['node_types'] if item.get('name') \ @@ -77,6 +77,7 @@ def check_permissions(resource, method, append_allowed_methods=False): permission_granted = method in allowed_methods if permission_granted: if append_allowed_methods: + # TODO: rename this field _allowed_methods resource['allowed_methods'] = list(set(allowed_methods)) return @@ -116,3 +117,5 @@ def user_has_role(role): return False return role in current_user['roles'] + + diff --git a/pillar/application/utils/cdn.py b/pillar/application/utils/cdn.py index 5ee18b0d..dac50f7c 100644 --- a/pillar/application/utils/cdn.py +++ b/pillar/application/utils/cdn.py @@ -1,31 +1,31 @@ import datetime from hashlib import md5 -from application import app +from flask import current_app def hash_file_path(file_path, expiry_timestamp=None): if not file_path.startswith('/'): - file_path = '/' + file_path; - service_domain = app.config['CDN_SERVICE_DOMAIN'] - domain_subfolder = app.config['CDN_CONTENT_SUBFOLDER'] - asset_url = app.config['CDN_SERVICE_DOMAIN_PROTOCOL'] + \ + file_path = '/' + file_path + service_domain = current_app.config['CDN_SERVICE_DOMAIN'] + domain_subfolder = current_app.config['CDN_CONTENT_SUBFOLDER'] + asset_url = current_app.config['CDN_SERVICE_DOMAIN_PROTOCOL'] + \ '://' + \ service_domain + \ domain_subfolder + \ file_path - if app.config['CDN_USE_URL_SIGNING']: + if current_app.config['CDN_USE_URL_SIGNING']: - url_signing_key = app.config['CDN_URL_SIGNING_KEY'] - hash_string = domain_subfolder + file_path + url_signing_key; + url_signing_key = current_app.config['CDN_URL_SIGNING_KEY'] + hash_string = domain_subfolder + file_path + url_signing_key if not expiry_timestamp: expiry_timestamp = datetime.datetime.now() + datetime.timedelta(hours=24) expiry_timestamp = expiry_timestamp.strftime('%s') - hash_string = expiry_timestamp + hash_string; + hash_string = expiry_timestamp + hash_string - expiry_timestamp = "," + str(expiry_timestamp); + expiry_timestamp = "," + str(expiry_timestamp) hashed_file_path = md5(hash_string).digest().encode('base64')[:-1] hashed_file_path = hashed_file_path.replace('+', '-') diff --git a/pillar/application/utils/encoding.py b/pillar/application/utils/encoding.py index bd86d47f..9522a6a5 100644 --- a/pillar/application/utils/encoding.py +++ b/pillar/application/utils/encoding.py @@ -1,7 +1,9 @@ import os + +from flask import current_app from zencoder import Zencoder + from application import encoding_service_client -from application import app class Encoder: @@ -20,7 +22,7 @@ class Encoder: storage_base = "gcs://{0}/_/".format(src_file['project']) file_input = os.path.join(storage_base, src_file['file_path']) outputs = [] - options = dict(notifications=app.config['ZENCODER_NOTIFICATIONS_URL']) + options = dict(notifications=current_app.config['ZENCODER_NOTIFICATIONS_URL']) for v in src_file['variations']: outputs.append({ 'format': v['format'], diff --git a/pillar/application/utils/gcs.py b/pillar/application/utils/gcs.py index 697b5c92..551d768f 100644 --- a/pillar/application/utils/gcs.py +++ b/pillar/application/utils/gcs.py @@ -5,7 +5,7 @@ import bugsnag from bson import ObjectId from gcloud.storage.client import Client from gcloud.exceptions import NotFound -from application import app +from flask import current_app class GoogleCloudStorageBucket(object): @@ -153,7 +153,7 @@ def update_file_name(item): return "{0}{1}{2}".format(root, size, ext) def _update_name(item, file_id): - files_collection = app.data.driver.db['files'] + files_collection = current_app.data.driver.db['files'] f = files_collection.find_one({'_id': ObjectId(file_id)}) status = item['properties']['status'] if f and f['backend'] == 'gcs' and status != 'processing': diff --git a/pillar/application/utils/imaging.py b/pillar/application/utils/imaging.py index e8e5e795..beb8f56f 100644 --- a/pillar/application/utils/imaging.py +++ b/pillar/application/utils/imaging.py @@ -2,7 +2,7 @@ import os import json import subprocess from PIL import Image -from application import app +from flask import current_app def generate_local_thumbnails(src, return_image_stats=False): @@ -15,7 +15,7 @@ def generate_local_thumbnails(src, return_image_stats=False): resolution, format and path of the thumbnailed image """ - thumbnail_settings = app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS'] + thumbnail_settings = current_app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS'] thumbnails = {} for size, settings in thumbnail_settings.iteritems(): root, ext = os.path.splitext(src) @@ -118,7 +118,7 @@ def get_video_data(filepath): """Return video duration and resolution given an input file path""" outdata = None ffprobe_inspect = [ - app.config['BIN_FFPROBE'], + current_app.config['BIN_FFPROBE'], '-loglevel', 'error', '-show_streams', @@ -195,13 +195,13 @@ def ffmpeg_encode(src, format, res_y=720): dst = "{0}-{1}p.{2}".format(dst[0], res_y, format) args.append(dst) print "Encoding {0} to {1}".format(src, format) - returncode = subprocess.call([app.config['BIN_FFMPEG']] + args) + returncode = subprocess.call([current_app.config['BIN_FFMPEG']] + args) if returncode == 0: print "Successfully encoded {0}".format(dst) else: print "Error during encode" print "Code: {0}".format(returncode) - print "Command: {0}".format(app.config['BIN_FFMPEG'] + " " + " ".join(args)) + print "Command: {0}".format(current_app.config['BIN_FFMPEG'] + " " + " ".join(args)) dst = None # return path of the encoded video return dst diff --git a/pillar/application/utils/storage.py b/pillar/application/utils/storage.py index 218729ef..cbc7f35e 100644 --- a/pillar/application/utils/storage.py +++ b/pillar/application/utils/storage.py @@ -1,13 +1,8 @@ import os import subprocess -# import logging -from application import app -from application.utils.gcs import GoogleCloudStorageBucket -BIN_FFPROBE = app.config['BIN_FFPROBE'] -BIN_FFMPEG = app.config['BIN_FFMPEG'] -BIN_SSH = app.config['BIN_SSH'] -BIN_RSYNC = app.config['BIN_RSYNC'] +from flask import current_app +from application.utils.gcs import GoogleCloudStorageBucket def get_sizedata(filepath): @@ -18,13 +13,16 @@ def get_sizedata(filepath): def rsync(path, remote_dir=''): + BIN_SSH = current_app.config['BIN_SSH'] + BIN_RSYNC = current_app.config['BIN_RSYNC'] + DRY_RUN = False arguments = ['--verbose', '--ignore-existing', '--recursive', '--human-readable'] - logs_path = app.config['CDN_SYNC_LOGS'] - storage_address = app.config['CDN_STORAGE_ADDRESS'] - user = app.config['CDN_STORAGE_USER'] - rsa_key_path = app.config['CDN_RSA_KEY'] - known_hosts_path = app.config['CDN_KNOWN_HOSTS'] + logs_path = current_app.config['CDN_SYNC_LOGS'] + storage_address = current_app.config['CDN_STORAGE_ADDRESS'] + user = current_app.config['CDN_STORAGE_USER'] + rsa_key_path = current_app.config['CDN_RSA_KEY'] + known_hosts_path = current_app.config['CDN_KNOWN_HOSTS'] if DRY_RUN: arguments.append('--dry-run') diff --git a/pillar/settings.py b/pillar/settings.py index dd4024c7..628352b9 100644 --- a/pillar/settings.py +++ b/pillar/settings.py @@ -780,3 +780,4 @@ MONGO_PORT = os.environ.get('MONGO_PORT', 27017) MONGO_DBNAME = os.environ.get('MONGO_DBNAME', 'eve') CACHE_EXPIRES = 60 HATEOAS = False +UPSET_ON_PUT = False # do not create new document on PUT of non-existant URL. diff --git a/requirements.txt b/requirements.txt index aea88870..97dbff39 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,9 @@ +# Primary requirements algoliasearch==1.8.0 blinker==1.4 bugsnag==2.3.1 -Cerberus==0.9.1 -Eve==0.5.3 +Cerberus==0.9.2 +Eve==0.6.3 Events==0.2.1 Flask-Script==2.0.5 flup==1.0.2 @@ -18,11 +19,37 @@ pycrypto==2.6.1 pyOpenSSL==0.15.1 requests==2.9.1 rsa==3.3 -simplejson==3.8.1 +simplejson==3.8.2 WebOb==1.5.0 wheel==0.24.0 zencoder==0.6.5 -# development requirements +# Development requirements pytest==2.9.1 responses==0.5.1 + +# Secondary requirements +Flask==0.10.1 +Flask-PyMongo==0.4.1 +Jinja2==2.8 +Werkzeug==0.11.3 +argparse==1.2.1 +cffi==1.6.0 +cookies==2.2.1 +cryptography==1.3.1 +enum34==1.1.3 +funcsigs==1.0.1 +googleapis-common-protos==1.1.0 +ipaddress==1.0.16 +itsdangerous==0.24 +mock==2.0.0 +oauth2client==2.0.2 +pbr==1.9.1 +protobuf==3.0.0b2.post2 +protorpc==0.11.1 +py==1.4.31 +pyasn1==0.1.9 +pyasn1-modules==0.0.8 +pymongo==3.2.2 +six==1.10.0 +wsgiref==0.1.2 diff --git a/tests/test_project_management.py b/tests/test_project_management.py index ace05782..ff2f5649 100644 --- a/tests/test_project_management.py +++ b/tests/test_project_management.py @@ -41,15 +41,21 @@ class ProjectCreationTest(AbstractProjectTest): def test_project_creation_good_role(self): user_id = self._create_user_with_token([u'subscriber'], 'token') resp = self._create_project(u'Prøject El Niño', 'token') - self.assertEqual(201, resp.status_code) - project = json.loads(resp.data.decode('utf-8')) - project_id = project['_id'] + + # The response of a POST is not the entire document, just some _xxx fields. + project_info = json.loads(resp.data.decode('utf-8')) + project_id = project_info['_id'] # Test that the Location header contains the location of the project document. self.assertEqual('http://localhost/projects/%s' % project_id, resp.headers['Location']) + # Actually get the project. + resp = self.client.get(resp.headers['Location']) + project = json.loads(resp.data.decode('utf-8')) + project_id = project['_id'] + # Check some of the more complex/interesting fields. self.assertEqual(u'Prøject El Niño', project['name']) self.assertEqual(str(user_id), project['user']) @@ -82,8 +88,11 @@ class ProjectEditTest(AbstractProjectTest): from application.utils import remove_private_keys, PillarJSONEncoder dumps = functools.partial(json.dumps, cls=PillarJSONEncoder) - project = self._create_user_and_project([u'subscriber']) - project_url = '/projects/%(_id)s' % project + project_info = self._create_user_and_project([u'subscriber']) + project_url = '/projects/%(_id)s' % project_info + + resp = self.client.get(project_url) + project = json.loads(resp.data.decode('utf-8')) # Create another user we can try and assign the project to. other_user_id = 'f00dd00df00dd00df00dd00d' @@ -133,8 +142,11 @@ class ProjectEditTest(AbstractProjectTest): from application.utils import remove_private_keys, PillarJSONEncoder dumps = functools.partial(json.dumps, cls=PillarJSONEncoder) - project = self._create_user_and_project([u'subscriber', u'admin']) - project_url = '/projects/%(_id)s' % project + project_info = self._create_user_and_project([u'subscriber', u'admin']) + project_url = '/projects/%(_id)s' % project_info + + resp = self.client.get(project_url) + project = json.loads(resp.data.decode('utf-8')) # Create another user we can try and assign the project to. other_user_id = 'f00dd00df00dd00df00dd00d' @@ -152,6 +164,7 @@ class ProjectEditTest(AbstractProjectTest): put_project['category'] = 'software' put_project['user'] = other_user_id + resp = self.client.put(project_url, data=dumps(put_project), headers={'Authorization': self.make_header('token'),