Ran 2to3 on pillar + some manual fixups
The 'manual fixups' are: - incorrect use of dict.items() where dict.iteritems() was meant; this results in list(dict.items()), which I changed to dict.items(). - removal of 'from __future__ import' lines, which 2to3 changes into empty lines; I removed the empty lines.
This commit is contained in:
@@ -18,7 +18,7 @@ log = logging.getLogger(__name__)
|
||||
HOME_PROJECT_USERS = set()
|
||||
|
||||
# Users with any of these roles will get full write access to their home project.
|
||||
HOME_PROJECT_WRITABLE_USERS = {u'subscriber', u'demo'}
|
||||
HOME_PROJECT_WRITABLE_USERS = {'subscriber', 'demo'}
|
||||
|
||||
HOME_PROJECT_DESCRIPTION = ('# Your home project\n\n'
|
||||
'This is your home project. It allows synchronisation '
|
||||
@@ -30,7 +30,7 @@ HOME_PROJECT_SUMMARY = 'This is your home project. Here you can sync your Blende
|
||||
# 'as a pastebin for text, images and other assets, and '
|
||||
# 'allows synchronisation of your Blender settings.')
|
||||
# HOME_PROJECT_SUMMARY = 'This is your home project. Pastebin and Blender settings sync in one!'
|
||||
SYNC_GROUP_NODE_NAME = u'Blender Sync'
|
||||
SYNC_GROUP_NODE_NAME = 'Blender Sync'
|
||||
SYNC_GROUP_NODE_DESC = ('The [Blender Cloud Addon](https://cloud.blender.org/services'
|
||||
'#blender-addon) will synchronize your Blender settings here.')
|
||||
|
||||
@@ -135,8 +135,8 @@ def create_home_project(user_id, write_access):
|
||||
# This allows people to comment on shared images and see comments.
|
||||
node_type_comment = assign_permissions(
|
||||
node_type_comment,
|
||||
subscriber_methods=[u'GET', u'POST'],
|
||||
world_methods=[u'GET'])
|
||||
subscriber_methods=['GET', 'POST'],
|
||||
world_methods=['GET'])
|
||||
|
||||
project['node_types'] = [
|
||||
node_type_group,
|
||||
@@ -215,7 +215,7 @@ def home_project():
|
||||
write_access = write_access_with_roles(roles)
|
||||
create_home_project(user_id, write_access)
|
||||
|
||||
resp, _, _, status, _ = get('projects', category=u'home', user=user_id)
|
||||
resp, _, _, status, _ = get('projects', category='home', user=user_id)
|
||||
if status != 200:
|
||||
return utils.jsonify(resp), status
|
||||
|
||||
@@ -248,8 +248,8 @@ def home_project_permissions(write_access):
|
||||
"""
|
||||
|
||||
if write_access:
|
||||
return [u'GET', u'PUT', u'POST', u'DELETE']
|
||||
return [u'GET']
|
||||
return ['GET', 'PUT', 'POST', 'DELETE']
|
||||
return ['GET']
|
||||
|
||||
|
||||
def has_home_project(user_id):
|
||||
|
@@ -86,7 +86,7 @@ def upsert_user(db_user, blender_id_user_id):
|
||||
:type: (ObjectId, int)
|
||||
"""
|
||||
|
||||
if u'subscriber' in db_user.get('groups', []):
|
||||
if 'subscriber' in db_user.get('groups', []):
|
||||
log.error('Non-ObjectID string found in user.groups: %s', db_user)
|
||||
raise wz_exceptions.InternalServerError('Non-ObjectID string found in user.groups: %s' % db_user)
|
||||
|
||||
@@ -117,8 +117,8 @@ def upsert_user(db_user, blender_id_user_id):
|
||||
if status == 422:
|
||||
# Probably non-unique username, so retry a few times with different usernames.
|
||||
log.info('Error creating new user: %s', r)
|
||||
username_issue = r.get('_issues', {}).get(u'username', '')
|
||||
if u'not unique' in username_issue:
|
||||
username_issue = r.get('_issues', {}).get('username', '')
|
||||
if 'not unique' in username_issue:
|
||||
# Retry
|
||||
db_user['username'] = authentication.make_unique_username(db_user['email'])
|
||||
continue
|
||||
|
@@ -61,13 +61,13 @@ class ValidateCustomFields(Validator):
|
||||
Only validates the dict values, not the keys. Modifies the given dict in-place.
|
||||
"""
|
||||
|
||||
assert dict_valueschema[u'type'] == u'dict'
|
||||
assert dict_valueschema['type'] == 'dict'
|
||||
assert isinstance(dict_property, dict)
|
||||
|
||||
for key, val in dict_property.items():
|
||||
item_schema = {u'item': dict_valueschema}
|
||||
item_prop = {u'item': val}
|
||||
dict_property[key] = self.convert_properties(item_prop, item_schema)[u'item']
|
||||
item_schema = {'item': dict_valueschema}
|
||||
item_prop = {'item': val}
|
||||
dict_property[key] = self.convert_properties(item_prop, item_schema)['item']
|
||||
|
||||
def _validate_valid_properties(self, valid_properties, field, value):
|
||||
from pillar.api.utils import project_get_node_type
|
||||
|
@@ -723,7 +723,7 @@ users = {
|
||||
|
||||
# By default don't include the 'auth' field. It can still be obtained
|
||||
# using projections, though, so we block that in hooks.
|
||||
'datasource': {'projection': {u'auth': 0}},
|
||||
'datasource': {'projection': {'auth': 0}},
|
||||
|
||||
'schema': users_schema
|
||||
}
|
||||
|
@@ -222,7 +222,7 @@ def process_file(gcs, file_id, local_file):
|
||||
mime_category, src_file['format'] = src_file['content_type'].split('/', 1)
|
||||
|
||||
# Prevent video handling for non-admins.
|
||||
if not user_has_role(u'admin') and mime_category == 'video':
|
||||
if not user_has_role('admin') and mime_category == 'video':
|
||||
if src_file['format'].startswith('x-'):
|
||||
xified = src_file['format']
|
||||
else:
|
||||
|
@@ -29,7 +29,7 @@ def change_file_storage_backend(file_id, dest_backend):
|
||||
Files on the original backend are not deleted automatically.
|
||||
"""
|
||||
|
||||
dest_backend = unicode(dest_backend)
|
||||
dest_backend = str(dest_backend)
|
||||
file_id = ObjectId(file_id)
|
||||
|
||||
# Fetch file document
|
||||
|
@@ -87,7 +87,7 @@ def generate_and_store_token(user_id, days=15, prefix=''):
|
||||
|
||||
|
||||
def hash_password(password, salt):
|
||||
if isinstance(salt, unicode):
|
||||
if isinstance(salt, str):
|
||||
salt = salt.encode('utf-8')
|
||||
encoded_password = base64.b64encode(hashlib.sha256(password).digest())
|
||||
return bcrypt.hashpw(encoded_password, salt)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import base64
|
||||
import functools
|
||||
import logging
|
||||
import urlparse
|
||||
import urllib.parse
|
||||
|
||||
import pymongo.errors
|
||||
import rsa.randnum
|
||||
@@ -20,7 +20,7 @@ from pillar.api.utils.gcs import update_file_name
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint = Blueprint('nodes_api', __name__)
|
||||
ROLES_FOR_SHARING = {u'subscriber', u'demo'}
|
||||
ROLES_FOR_SHARING = {'subscriber', 'demo'}
|
||||
|
||||
|
||||
def only_for_node_type_decorator(*required_node_type_names):
|
||||
@@ -138,7 +138,7 @@ def make_world_gettable(node):
|
||||
log.debug('Ensuring the world can read node %s', node_id)
|
||||
|
||||
world_perms = set(node.get('permissions', {}).get('world', []))
|
||||
world_perms.add(u'GET')
|
||||
world_perms.add('GET')
|
||||
world_perms = list(world_perms)
|
||||
|
||||
result = nodes_coll.update_one({'_id': node_id},
|
||||
@@ -164,7 +164,7 @@ def create_short_code(node):
|
||||
def short_link_info(short_code):
|
||||
"""Returns the short link info in a dict."""
|
||||
|
||||
short_link = urlparse.urljoin(current_app.config['SHORT_LINK_BASE_URL'], short_code)
|
||||
short_link = urllib.parse.urljoin(current_app.config['SHORT_LINK_BASE_URL'], short_code)
|
||||
|
||||
return {
|
||||
'short_code': short_code,
|
||||
@@ -349,7 +349,7 @@ def node_set_default_picture(node, original=None):
|
||||
# Find the colour map, defaulting to the first image map available.
|
||||
image_file_id = None
|
||||
for image in props.get('files', []):
|
||||
if image_file_id is None or image.get('map_type') == u'color':
|
||||
if image_file_id is None or image.get('map_type') == 'color':
|
||||
image_file_id = image.get('file')
|
||||
else:
|
||||
log.debug('Not setting default picture on node type %s content type %s',
|
||||
|
@@ -11,20 +11,20 @@ from pillar.api.utils import authorization, authentication, jsonify
|
||||
from . import register_patch_handler
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
ROLES_FOR_COMMENT_VOTING = {u'subscriber', u'demo'}
|
||||
COMMENT_VOTING_OPS = {u'upvote', u'downvote', u'revoke'}
|
||||
VALID_COMMENT_OPERATIONS = COMMENT_VOTING_OPS.union({u'edit'})
|
||||
ROLES_FOR_COMMENT_VOTING = {'subscriber', 'demo'}
|
||||
COMMENT_VOTING_OPS = {'upvote', 'downvote', 'revoke'}
|
||||
VALID_COMMENT_OPERATIONS = COMMENT_VOTING_OPS.union({'edit'})
|
||||
|
||||
|
||||
@register_patch_handler(u'comment')
|
||||
@register_patch_handler('comment')
|
||||
def patch_comment(node_id, patch):
|
||||
assert_is_valid_patch(node_id, patch)
|
||||
user_id = authentication.current_user_id()
|
||||
|
||||
if patch[u'op'] in COMMENT_VOTING_OPS:
|
||||
if patch['op'] in COMMENT_VOTING_OPS:
|
||||
result, node = vote_comment(user_id, node_id, patch)
|
||||
else:
|
||||
assert patch[u'op'] == u'edit', 'Invalid patch operation %s' % patch[u'op']
|
||||
assert patch['op'] == 'edit', 'Invalid patch operation %s' % patch['op']
|
||||
result, node = edit_comment(user_id, node_id, patch)
|
||||
|
||||
return jsonify({'_status': 'OK',
|
||||
@@ -95,9 +95,9 @@ def vote_comment(user_id, node_id, patch):
|
||||
return update
|
||||
|
||||
actions = {
|
||||
u'upvote': upvote,
|
||||
u'downvote': downvote,
|
||||
u'revoke': revoke,
|
||||
'upvote': upvote,
|
||||
'downvote': downvote,
|
||||
'revoke': revoke,
|
||||
}
|
||||
action = actions[patch['op']]
|
||||
mongo_update = action()
|
||||
@@ -141,7 +141,7 @@ def edit_comment(user_id, node_id, patch):
|
||||
log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
|
||||
raise wz_exceptions.NotFound('Node %s not found' % node_id)
|
||||
|
||||
if node['user'] != user_id and not authorization.user_has_role(u'admin'):
|
||||
if node['user'] != user_id and not authorization.user_has_role('admin'):
|
||||
raise wz_exceptions.Forbidden('You can only edit your own comments.')
|
||||
|
||||
# Use Eve to PATCH this node, as that also updates the etag.
|
||||
@@ -173,8 +173,8 @@ def assert_is_valid_patch(node_id, patch):
|
||||
raise wz_exceptions.BadRequest("PATCH should have a key 'op' indicating the operation.")
|
||||
|
||||
if op not in VALID_COMMENT_OPERATIONS:
|
||||
raise wz_exceptions.BadRequest(u'Operation should be one of %s',
|
||||
u', '.join(VALID_COMMENT_OPERATIONS))
|
||||
raise wz_exceptions.BadRequest('Operation should be one of %s',
|
||||
', '.join(VALID_COMMENT_OPERATIONS))
|
||||
|
||||
if op not in COMMENT_VOTING_OPS:
|
||||
# We can't check here, we need the node owner for that.
|
||||
|
@@ -28,7 +28,7 @@ def before_inserting_projects(items):
|
||||
"""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
if user_has_role('admin'):
|
||||
return
|
||||
|
||||
for item in items:
|
||||
@@ -70,7 +70,7 @@ def protect_sensitive_fields(document, original):
|
||||
"""When not logged in as admin, prevents update to certain fields."""
|
||||
|
||||
# Allow admin users to do whatever they want.
|
||||
if user_has_role(u'admin'):
|
||||
if user_has_role('admin'):
|
||||
return
|
||||
|
||||
def revert(name):
|
||||
|
@@ -16,7 +16,7 @@ blueprint_api = Blueprint('projects_api', __name__)
|
||||
|
||||
|
||||
@blueprint_api.route('/create', methods=['POST'])
|
||||
@authorization.require_login(require_roles={u'admin', u'subscriber', u'demo'})
|
||||
@authorization.require_login(require_roles={'admin', 'subscriber', 'demo'})
|
||||
def create_project(overrides=None):
|
||||
"""Creates a new project."""
|
||||
|
||||
@@ -65,7 +65,7 @@ def project_manage_users():
|
||||
project = projects_collection.find_one({'_id': project_id})
|
||||
|
||||
# Check if the current_user is owner of the project, or removing themselves.
|
||||
if not authorization.user_has_role(u'admin'):
|
||||
if not authorization.user_has_role('admin'):
|
||||
remove_self = target_user_id == current_user_id and action == 'remove'
|
||||
if project['user'] != current_user_id and not remove_self:
|
||||
utils.abort_with_error(403)
|
||||
|
@@ -13,7 +13,7 @@ blueprint = Blueprint('service', __name__)
|
||||
log = logging.getLogger(__name__)
|
||||
signal_user_changed_role = blinker.NamedSignal('badger:user_changed_role')
|
||||
|
||||
ROLES_WITH_GROUPS = {u'admin', u'demo', u'subscriber'}
|
||||
ROLES_WITH_GROUPS = {'admin', 'demo', 'subscriber'}
|
||||
|
||||
# Map of role name to group ID, for the above groups.
|
||||
role_to_group_id = {}
|
||||
@@ -38,7 +38,7 @@ def fetch_role_to_group_id_map():
|
||||
|
||||
|
||||
@blueprint.route('/badger', methods=['POST'])
|
||||
@authorization.require_login(require_roles={u'service', u'badger'}, require_all=True)
|
||||
@authorization.require_login(require_roles={'service', 'badger'}, require_all=True)
|
||||
def badger():
|
||||
if request.mimetype != 'application/json':
|
||||
log.debug('Received %s instead of application/json', request.mimetype)
|
||||
@@ -117,7 +117,7 @@ def do_badger(action, user_email, role):
|
||||
|
||||
|
||||
@blueprint.route('/urler/<project_id>', methods=['GET'])
|
||||
@authorization.require_login(require_roles={u'service', u'urler'}, require_all=True)
|
||||
@authorization.require_login(require_roles={'service', 'urler'}, require_all=True)
|
||||
def urler(project_id):
|
||||
"""Returns the URL of any project."""
|
||||
|
||||
@@ -189,7 +189,7 @@ def create_service_account(email, roles, service, update_existing=None):
|
||||
raise ValueError('User %s already exists' % email)
|
||||
|
||||
# Compute the new roles, and assign.
|
||||
roles = list(set(roles).union({u'service'}).union(user['roles']))
|
||||
roles = list(set(roles).union({'service'}).union(user['roles']))
|
||||
user['roles'] = list(roles)
|
||||
|
||||
# Let the caller perform any required updates.
|
||||
@@ -204,7 +204,7 @@ def create_service_account(email, roles, service, update_existing=None):
|
||||
expected_status = 200
|
||||
else:
|
||||
# Create a user with the correct roles.
|
||||
roles = list(set(roles).union({u'service'}))
|
||||
roles = list(set(roles).union({'service'}))
|
||||
user = {'username': email,
|
||||
'groups': [],
|
||||
'roles': roles,
|
||||
|
@@ -60,7 +60,7 @@ def check_user_access(request, lookup):
|
||||
current_user_id = current_user['user_id'] if current_user else None
|
||||
|
||||
# Admins can do anything and get everything, except the 'auth' block.
|
||||
if user_has_role(u'admin'):
|
||||
if user_has_role('admin'):
|
||||
return
|
||||
|
||||
if not lookup and not current_user:
|
||||
@@ -74,7 +74,7 @@ def check_user_access(request, lookup):
|
||||
def check_put_access(request, lookup):
|
||||
"""Only allow PUT to the current user, or all users if admin."""
|
||||
|
||||
if user_has_role(u'admin'):
|
||||
if user_has_role('admin'):
|
||||
return
|
||||
|
||||
current_user = g.get('current_user')
|
||||
@@ -94,7 +94,7 @@ def after_fetching_user(user):
|
||||
current_user_id = current_user['user_id'] if current_user else None
|
||||
|
||||
# Admins can do anything and get everything, except the 'auth' block.
|
||||
if user_has_role(u'admin'):
|
||||
if user_has_role('admin'):
|
||||
return
|
||||
|
||||
# Only allow full access to the current user.
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import urllib
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
|
||||
import datetime
|
||||
import functools
|
||||
@@ -103,7 +103,7 @@ def skip_when_testing(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if current_app.config['TESTING']:
|
||||
log.debug('Skipping call to %s(...) due to TESTING', func.func_name)
|
||||
log.debug('Skipping call to %s(...) due to TESTING', func.__name__)
|
||||
return None
|
||||
|
||||
return func(*args, **kwargs)
|
||||
@@ -145,19 +145,18 @@ def gravatar(email, size=64):
|
||||
parameters = {'s': str(size), 'd': 'mm'}
|
||||
return "https://www.gravatar.com/avatar/" + \
|
||||
hashlib.md5(str(email)).hexdigest() + \
|
||||
"?" + urllib.urlencode(parameters)
|
||||
"?" + urllib.parse.urlencode(parameters)
|
||||
|
||||
|
||||
|
||||
class MetaFalsey(type):
|
||||
def __nonzero__(cls):
|
||||
def __bool__(cls):
|
||||
return False
|
||||
__bool__ = __nonzero__ # for Python 3
|
||||
|
||||
|
||||
class DoesNotExist(object):
|
||||
class DoesNotExist(object, metaclass=MetaFalsey):
|
||||
"""Returned as value by doc_diff if a value does not exist."""
|
||||
__metaclass__ = MetaFalsey
|
||||
|
||||
|
||||
def doc_diff(doc1, doc2, falsey_is_equal=True):
|
||||
@@ -175,7 +174,7 @@ def doc_diff(doc1, doc2, falsey_is_equal=True):
|
||||
"""
|
||||
|
||||
for key in set(doc1.keys()).union(set(doc2.keys())):
|
||||
if isinstance(key, basestring) and key[0] == u'_':
|
||||
if isinstance(key, str) and key[0] == '_':
|
||||
continue
|
||||
|
||||
val1 = doc1.get(key, DoesNotExist)
|
||||
|
@@ -124,7 +124,7 @@ def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
|
||||
:returns: the token document from MongoDB
|
||||
"""
|
||||
|
||||
assert isinstance(token, (str, unicode)), 'token must be string type, not %r' % type(token)
|
||||
assert isinstance(token, str), 'token must be string type, not %r' % type(token)
|
||||
|
||||
token_data = {
|
||||
'user': user_id,
|
||||
|
@@ -238,22 +238,22 @@ def merge_permissions(*args):
|
||||
asdict0 = {permission[field_name]: permission['methods'] for permission in from0}
|
||||
asdict1 = {permission[field_name]: permission['methods'] for permission in from1}
|
||||
|
||||
keys = set(asdict0.keys() + asdict1.keys())
|
||||
keys = set(asdict0.keys()).union(set(asdict1.keys()))
|
||||
for key in maybe_sorted(keys):
|
||||
methods0 = asdict0.get(key, [])
|
||||
methods1 = asdict1.get(key, [])
|
||||
methods = maybe_sorted(set(methods0).union(set(methods1)))
|
||||
effective.setdefault(plural_name, []).append({field_name: key, u'methods': methods})
|
||||
effective.setdefault(plural_name, []).append({field_name: key, 'methods': methods})
|
||||
|
||||
merge(u'user')
|
||||
merge(u'group')
|
||||
merge('user')
|
||||
merge('group')
|
||||
|
||||
# Gather permissions for world
|
||||
world0 = args[0].get('world', [])
|
||||
world1 = args[1].get('world', [])
|
||||
world_methods = set(world0).union(set(world1))
|
||||
if world_methods:
|
||||
effective[u'world'] = maybe_sorted(world_methods)
|
||||
effective['world'] = maybe_sorted(world_methods)
|
||||
|
||||
# Recurse for longer merges
|
||||
if len(args) > 2:
|
||||
@@ -380,4 +380,4 @@ def user_matches_roles(require_roles=set(),
|
||||
def is_admin(user):
|
||||
"""Returns True iff the given user has the admin role."""
|
||||
|
||||
return user_has_role(u'admin', user)
|
||||
return user_has_role('admin', user)
|
||||
|
@@ -173,7 +173,7 @@ class GoogleCloudStorageBucket(Bucket):
|
||||
"""Set the ContentDisposition metadata so that when a file is downloaded
|
||||
it has a human-readable name.
|
||||
"""
|
||||
blob.content_disposition = u'attachment; filename="{0}"'.format(name)
|
||||
blob.content_disposition = 'attachment; filename="{0}"'.format(name)
|
||||
blob.patch()
|
||||
|
||||
def copy_blob(self, blob, to_bucket):
|
||||
@@ -215,11 +215,11 @@ def update_file_name(node):
|
||||
if node['properties'].get('status', '') == 'processing':
|
||||
return
|
||||
|
||||
def _format_name(name, override_ext, size=None, map_type=u''):
|
||||
def _format_name(name, override_ext, size=None, map_type=''):
|
||||
root, _ = os.path.splitext(name)
|
||||
size = u'-{}'.format(size) if size else u''
|
||||
map_type = u'-{}'.format(map_type) if map_type else u''
|
||||
return u'{}{}{}{}'.format(root, size, map_type, override_ext)
|
||||
size = '-{}'.format(size) if size else ''
|
||||
map_type = '-{}'.format(map_type) if map_type else ''
|
||||
return '{}{}{}{}'.format(root, size, map_type, override_ext)
|
||||
|
||||
def _update_name(file_id, file_props):
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
@@ -229,7 +229,7 @@ def update_file_name(node):
|
||||
return
|
||||
|
||||
# For textures -- the map type should be part of the name.
|
||||
map_type = file_props.get('map_type', u'')
|
||||
map_type = file_props.get('map_type', '')
|
||||
|
||||
storage = GoogleCloudStorageBucket(str(node['project']))
|
||||
blob = storage.Get(file_doc['file_path'], to_dict=False)
|
||||
|
@@ -21,7 +21,7 @@ def generate_local_thumbnails(name_base, src):
|
||||
save_to_base, _ = os.path.splitext(src)
|
||||
name_base, _ = os.path.splitext(name_base)
|
||||
|
||||
for size, settings in thumbnail_settings.iteritems():
|
||||
for size, settings in thumbnail_settings.items():
|
||||
dst = '{0}-{1}{2}'.format(save_to_base, size, '.jpg')
|
||||
name = '{0}-{1}{2}'.format(name_base, size, '.jpg')
|
||||
|
||||
@@ -143,7 +143,7 @@ def get_video_data(filepath):
|
||||
res_y=video_stream['height'],
|
||||
)
|
||||
if video_stream['sample_aspect_ratio'] != '1:1':
|
||||
print '[warning] Pixel aspect ratio is not square!'
|
||||
print('[warning] Pixel aspect ratio is not square!')
|
||||
|
||||
return outdata
|
||||
|
||||
@@ -190,14 +190,14 @@ def ffmpeg_encode(src, format, res_y=720):
|
||||
dst = os.path.splitext(src)
|
||||
dst = "{0}-{1}p.{2}".format(dst[0], res_y, format)
|
||||
args.append(dst)
|
||||
print "Encoding {0} to {1}".format(src, format)
|
||||
print("Encoding {0} to {1}".format(src, format))
|
||||
returncode = subprocess.call([current_app.config['BIN_FFMPEG']] + args)
|
||||
if returncode == 0:
|
||||
print "Successfully encoded {0}".format(dst)
|
||||
print("Successfully encoded {0}".format(dst))
|
||||
else:
|
||||
print "Error during encode"
|
||||
print "Code: {0}".format(returncode)
|
||||
print "Command: {0}".format(current_app.config['BIN_FFMPEG'] + " " + " ".join(args))
|
||||
print("Error during encode")
|
||||
print("Code: {0}".format(returncode))
|
||||
print("Command: {0}".format(current_app.config['BIN_FFMPEG'] + " " + " ".join(args)))
|
||||
dst = None
|
||||
# return path of the encoded video
|
||||
return dst
|
||||
|
Reference in New Issue
Block a user