Cleanup: pep8

This commit is contained in:
2014-11-05 10:42:59 +01:00
parent 7f57c74f02
commit 53cbb0fa55
7 changed files with 12 additions and 21 deletions

View File

@@ -40,6 +40,7 @@ del os, sys, path
class bam_config: class bam_config:
# fake module # fake module
__slots__ = () __slots__ = ()
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
raise RuntimeError("%s should not be instantiated" % cls) raise RuntimeError("%s should not be instantiated" % cls)
@@ -98,6 +99,7 @@ class bam_config:
class bam_utils: class bam_utils:
# fake module # fake module
__slots__ = () __slots__ = ()
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
raise RuntimeError("%s should not be instantiated" % cls) raise RuntimeError("%s should not be instantiated" % cls)
@@ -187,9 +189,6 @@ class bam_utils:
print("Expected a directory (%r)" % path) print("Expected a directory (%r)" % path)
sys.exit(1) sys.exit(1)
# make a zipfile from session # make a zipfile from session
import json import json
with open(os.path.join(path, ".bam_paths_uuid.json")) as f: with open(os.path.join(path, ".bam_paths_uuid.json")) as f:
@@ -285,9 +284,11 @@ class bam_utils:
if file_type != "dir": if file_type != "dir":
print(" %s" % name_short) print(" %s" % name_short)
def subcommand_checkout_cb(args): def subcommand_checkout_cb(args):
bam_utils.checkout(args.paths) bam_utils.checkout(args.paths)
def subcommand_commit_cb(args): def subcommand_commit_cb(args):
bam_utils.commit(args.paths, args.message) bam_utils.commit(args.paths, args.message)
@@ -345,7 +346,6 @@ def create_argparse_revert(subparsers):
subparse.set_defaults(func=subcommand_revert_cb) subparse.set_defaults(func=subcommand_revert_cb)
def create_argparse_status(subparsers): def create_argparse_status(subparsers):
subparse = subparsers.add_parser("status", aliases=("st",)) subparse = subparsers.add_parser("status", aliases=("st",))
subparse.add_argument( subparse.add_argument(
@@ -362,8 +362,6 @@ def create_argparse_list(subparsers):
subparse.set_defaults(func=subcommand_list_cb) subparse.set_defaults(func=subcommand_list_cb)
def create_argparse(): def create_argparse():
import os import os
import argparse import argparse

View File

@@ -10,6 +10,7 @@ with open(os.path.join(MODULE_DIR, 'config.json'), 'r') as config:
import json import json
config = json.load(config) config = json.load(config)
def request_url(path): def request_url(path):
return ('%s%s' % (config['BAM_SERVER'], path)) return ('%s%s' % (config['BAM_SERVER'], path))
@@ -45,7 +46,7 @@ def request_url(path):
# ) # )
args = { args = {
'message' : 'Adding test file.' 'message': "Adding test file."
} }
payload = { payload = {

View File

@@ -103,7 +103,6 @@ class Application(tk.Frame):
menubar.add_command(label="Preferences", command=lambda: print("Preferences")) menubar.add_command(label="Preferences", command=lambda: print("Preferences"))
root.config(menu=menubar) root.config(menu=menubar)
# local data # local data
self.item_list = [] self.item_list = []
@@ -224,7 +223,6 @@ class Application(tk.Frame):
self.grid_members.append(but) self.grid_members.append(but)
row += 1 row += 1
def OnFrameConfigure(self, event): def OnFrameConfigure(self, event):
'''Reset the scroll region to encompass the inner frame''' '''Reset the scroll region to encompass the inner frame'''
self.canvas.configure(scrollregion=self.canvas.bbox("all")) self.canvas.configure(scrollregion=self.canvas.bbox("all"))

View File

@@ -136,7 +136,6 @@ class BlendFile:
# cache (could lazy init, incase we never use?) # cache (could lazy init, incase we never use?)
self.block_from_offset = {block.addr_old: block for block in self.blocks if block.code != b'ENDB'} self.block_from_offset = {block.addr_old: block for block in self.blocks if block.code != b'ENDB'}
def find_blocks_from_code(self, code): def find_blocks_from_code(self, code):
assert(type(code) == bytes) assert(type(code) == bytes)
if code not in self.code_index: if code not in self.code_index:
@@ -243,7 +242,6 @@ class BlendFile:
sdna_index_from_id[dna_struct.dna_type_id] = sdna_index sdna_index_from_id[dna_struct.dna_type_id] = sdna_index
structs.append(dna_struct) structs.append(dna_struct)
fields_len = d[1] fields_len = d[1]
dna_offset = 0 dna_offset = 0
@@ -394,7 +392,6 @@ class BlendFileBlock:
else: else:
return None return None
# ---------------------- # ----------------------
# Python convenience API # Python convenience API
@@ -481,7 +478,6 @@ class BlendFileHeader:
))) )))
class DNAName: class DNAName:
""" """
DNAName is a C-type name stored in the DNA DNAName is a C-type name stored in the DNA

View File

@@ -21,8 +21,10 @@
VERBOSE = True VERBOSE = True
TIMEIT = True TIMEIT = True
class C_defs: class C_defs:
__slots__ = () __slots__ = ()
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
raise RuntimeError("%s should not be instantiated" % cls) raise RuntimeError("%s should not be instantiated" % cls)
@@ -570,7 +572,6 @@ class ExpandID:
yield block.get_pointer(b'group') yield block.get_pointer(b'group')
@staticmethod @staticmethod
def expand_TE(block): # 'Tex' def expand_TE(block): # 'Tex'
yield from ExpandID._expand_generic_animdata(block) yield from ExpandID._expand_generic_animdata(block)

View File

@@ -96,7 +96,6 @@ def pack(blendfile_src, blendfile_dst, mode='FILE',
path_temp_files.add(filepath_tmp) path_temp_files.add(filepath_tmp)
return filepath_tmp return filepath_tmp
# base_dir_src = os.path.dirname(blendfile_src) # base_dir_src = os.path.dirname(blendfile_src)
base_dir_dst = os.path.dirname(blendfile_dst) base_dir_dst = os.path.dirname(blendfile_dst)
@@ -184,7 +183,6 @@ def pack(blendfile_src, blendfile_dst, mode='FILE',
del blendfile_dst_tmp del blendfile_dst_tmp
del sha1_from_file del sha1_from_file
# -------------------- # --------------------
# Handle File Copy/Zip # Handle File Copy/Zip
@@ -232,6 +230,7 @@ def pack(blendfile_src, blendfile_dst, mode='FILE',
if WRITE_JSON_REMAP: if WRITE_JSON_REMAP:
import json import json
def write_dict_as_json(fn, dct): def write_dict_as_json(fn, dct):
zip.writestr( zip.writestr(
fn, fn,
@@ -250,7 +249,6 @@ def pack(blendfile_src, blendfile_dst, mode='FILE',
del write_dict_as_json del write_dict_as_json
yield report(" %s: %r\n" % (colorize("written", color='green'), blendfile_dst)) yield report(" %s: %r\n" % (colorize("written", color='green'), blendfile_dst))
else: else:
raise Exception("%s not a known mode" % mode) raise Exception("%s not a known mode" % mode)
@@ -316,7 +314,6 @@ def main():
sort_keys=True, indent=4, separators=(',', ': '), sort_keys=True, indent=4, separators=(',', ': '),
) )
if deps_remap is not None: if deps_remap is not None:
write_dict_as_json(args.deps_remap, deps_remap) write_dict_as_json(args.deps_remap, deps_remap)

View File

@@ -44,6 +44,7 @@ auth = HTTPBasicAuth()
import config import config
app.config.from_object(config.Development) app.config.from_object(config.Development)
@api.representation('application/octet-stream') @api.representation('application/octet-stream')
def output_file(data, code, headers=None): def output_file(data, code, headers=None):
"""Makes a Flask response to return a file.""" """Makes a Flask response to return a file."""
@@ -154,7 +155,6 @@ class FileAPI(Resource):
elif command == 'checkout': elif command == 'checkout':
filepath = os.path.join(app.config['STORAGE_PATH'], filepath) filepath = os.path.join(app.config['STORAGE_PATH'], filepath)
if not os.path.exists(filepath): if not os.path.exists(filepath):
return jsonify(message="Path not found %r" % filepath) return jsonify(message="Path not found %r" % filepath)
elif os.path.isdir(filepath): elif os.path.isdir(filepath):