Merge branch 'master' into blender2.8

This commit is contained in:
2018-07-03 06:58:34 +02:00
56 changed files with 757 additions and 622 deletions

View File

@@ -7,7 +7,7 @@
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
@@ -22,21 +22,21 @@
#
# Name:
# dna.py
#
#
# Description:
# Creates a browsable DNA output to HTML.
#
#
# Author:
# Jeroen Bakker
#
#
# Version:
# v0.1 (12-05-2009) - migration of original source code to python.
# Added code to support blender 2.5 branch
# v0.2 (25-05-2009) - integrated with BlendFileReader.py
#
#
# Input:
# blender build executable
#
#
# Output:
# dna.html
# dna.css (will only be created when not existing)
@@ -76,12 +76,12 @@ class DNACatalogHTML:
DNACatalog is a catalog of all information in the DNA1 file-block
'''
def __init__(self, catalog, bpy_module = None):
def __init__(self, catalog, bpy_module=None):
self.Catalog = catalog
self.bpy = bpy_module
def WriteToHTML(self, handle):
dna_html_template = """
<!DOCTYPE html PUBLIC -//W3C//DTD HTML 4.01 Transitional//EN http://www.w3.org/TR/html4/loose.dtd>
<html>
@@ -105,10 +105,10 @@ class DNACatalogHTML:
${structs_content}
</body>
</html>"""
header = self.Catalog.Header
bpy = self.bpy
# ${version} and ${revision}
if bpy:
version = '.'.join(map(str, bpy.app.version))
@@ -116,7 +116,7 @@ class DNACatalogHTML:
else:
version = str(header.Version)
revision = 'Unknown'
# ${bitness}
if header.PointerSize == 8:
bitness = '64 bit'
@@ -125,10 +125,10 @@ class DNACatalogHTML:
# ${endianness}
if header.LittleEndianness:
endianess= 'Little endianness'
endianess = 'Little endianness'
else:
endianess= 'Big endianness'
endianess = 'Big endianness'
# ${structs_list}
log.debug("Creating structs index")
structs_list = ''
@@ -136,7 +136,7 @@ class DNACatalogHTML:
structureIndex = 0
for structure in self.Catalog.Structs:
structs_list += list_item.format(structureIndex, structure.Type.Name)
structureIndex+=1
structureIndex += 1
# ${structs_content}
log.debug("Creating structs content")
@@ -144,20 +144,20 @@ class DNACatalogHTML:
for structure in self.Catalog.Structs:
log.debug(structure.Type.Name)
structs_content += self.Structure(structure)
d = dict(
version = version,
revision = revision,
bitness = bitness,
endianness = endianess,
structs_list = structs_list,
structs_content = structs_content
version=version,
revision=revision,
bitness=bitness,
endianness=endianess,
structs_list=structs_list,
structs_content=structs_content
)
dna_html = Template(dna_html_template).substitute(d)
dna_html = self.format(dna_html)
handle.write(dna_html)
def Structure(self, structure):
struct_table_template = """
<table><a name="${struct_name}"></a>
@@ -178,23 +178,23 @@ class DNACatalogHTML:
</table>
<label>Total size: ${size} bytes</label><br/>
<label>(<a href="#top">top</a>)</label><br/>"""
d = dict(
struct_name = structure.Type.Name,
fields = self.StructureFields(structure, None, 0),
size = str(structure.Type.Size)
struct_name=structure.Type.Name,
fields=self.StructureFields(structure, None, 0),
size=str(structure.Type.Size)
)
struct_table = Template(struct_table_template).substitute(d)
return struct_table
def StructureFields(self, structure, parentReference, offset):
fields = ''
for field in structure.Fields:
fields += self.StructureField(field, structure, parentReference, offset)
offset += field.Size(self.Catalog.Header)
return fields
def StructureField(self, field, structure, parentReference, offset):
structure_field_template = """
<tr>
@@ -205,7 +205,7 @@ class DNACatalogHTML:
<td>${offset}</td>
<td>${size}</td>
</tr>"""
if field.Type.Structure is None or field.Name.IsPointer():
# ${reference}
@@ -216,37 +216,37 @@ class DNACatalogHTML:
struct = '<a href="#{0}">{0}</a>'.format(structure.Type.Name)
else:
struct = structure.Type.Name
# ${type}
type = field.Type.Name
# ${name}
name = field.Name.Name
# ${offset}
# offset already set
# ${size}
size = field.Size(self.Catalog.Header)
d = dict(
reference = reference,
struct = struct,
type = type,
name = name,
offset = offset,
size = size
reference=reference,
struct=struct,
type=type,
name=name,
offset=offset,
size=size
)
structure_field = Template(structure_field_template).substitute(d)
elif field.Type.Structure is not None:
reference = field.Name.AsReference(parentReference)
structure_field = self.StructureFields(field.Type.Structure, reference, offset)
structure_field = self.StructureFields(field.Type.Structure, reference, offset)
return structure_field
def indent(self, input, dent, startswith = ''):
def indent(self, input, dent, startswith=''):
output = ''
if dent < 0:
for line in input.split('\n'):
@@ -257,19 +257,19 @@ class DNACatalogHTML:
output += line.lstrip() + '\n' # remove indentation completely
elif dent > 0:
for line in input.split('\n'):
output += ' '* dent + line + '\n'
output += ' ' * dent + line + '\n'
return output
def format(self, input):
diff = {
'\n<!DOCTYPE':'<!DOCTYPE',
'\n</ul>' :'</ul>',
'<a name' :'\n<a name',
'<tr>\n' :'<tr>',
'<tr>' :' <tr>',
'</th>\n' :'</th>',
'</td>\n' :'</td>',
'<tbody>\n' :'<tbody>'
'\n<!DOCTYPE': '<!DOCTYPE',
'\n</ul>': '</ul>',
'<a name': '\n<a name',
'<tr>\n': '<tr>',
'<tr>': ' <tr>',
'</th>\n': '</th>',
'</td>\n': '</td>',
'<tbody>\n': '<tbody>'
}
output = self.indent(input, 0)
for key, value in diff.items():
@@ -283,17 +283,17 @@ class DNACatalogHTML:
'''
css = """
@CHARSET "ISO-8859-1";
body {
font-family: verdana;
font-size: small;
}
div.title {
font-size: large;
text-align: center;
}
h1 {
page-break-before: always;
}
@@ -304,7 +304,7 @@ class DNACatalogHTML:
margin-right: 3%;
padding-left: 40px;
}
h1:hover{
background-color: #EBEBEB;
}
@@ -312,7 +312,7 @@ class DNACatalogHTML:
h3 {
padding-left: 40px;
}
table {
border-width: 1px;
border-style: solid;
@@ -321,21 +321,21 @@ class DNACatalogHTML:
width: 94%;
margin: 20px 3% 10px;
}
caption {
margin-bottom: 5px;
}
th {
background-color: #000000;
color:#ffffff;
padding-left:5px;
padding-right:5px;
}
tr {
}
td {
border-width: 1px;
border-style: solid;
@@ -343,12 +343,12 @@ class DNACatalogHTML:
padding-left:5px;
padding-right:5px;
}
label {
float:right;
margin-right: 3%;
}
ul.multicolumn {
list-style:none;
float:left;
@@ -361,18 +361,18 @@ class DNACatalogHTML:
width:200px;
margin-right:0px;
}
a {
color:#a000a0;
text-decoration:none;
}
a:hover {
color:#a000a0;
text-decoration:underline;
}
"""
css = self.indent(css, 0)
handle.write(css)
@@ -389,13 +389,13 @@ def usage():
print("\tdefault: % blender2.5 --background -noaudio --python BlendFileDnaExporter_25.py")
print("\twith options: % blender2.5 --background -noaudio --python BlendFileDnaExporter_25.py -- --dna-keep-blend --dna-debug\n")
######################################################
# Main
######################################################
def main():
import os, os.path
try:
@@ -408,37 +408,37 @@ def main():
else:
filename = 'dna'
dir = os.path.dirname(__file__)
Path_Blend = os.path.join(dir, filename + '.blend') # temporary blend file
Path_HTML = os.path.join(dir, filename + '.html') # output html file
Path_CSS = os.path.join(dir, 'dna.css') # output css file
Path_Blend = os.path.join(dir, filename + '.blend') # temporary blend file
Path_HTML = os.path.join(dir, filename + '.html') # output html file
Path_CSS = os.path.join(dir, 'dna.css') # output css file
# create a blend file for dna parsing
if not os.path.exists(Path_Blend):
log.info("1: write temp blend file with SDNA info")
log.info(" saving to: " + Path_Blend)
try:
bpy.ops.wm.save_as_mainfile(filepath = Path_Blend, copy = True, compress = False)
bpy.ops.wm.save_as_mainfile(filepath=Path_Blend, copy=True, compress=False)
except:
log.error("Filename {0} does not exist and can't be created... quitting".format(Path_Blend))
return
else:
log.info("1: found blend file with SDNA info")
log.info(" " + Path_Blend)
# read blend header from blend file
log.info("2: read file:")
if not dir in sys.path:
sys.path.append(dir)
import BlendFileReader
handle = BlendFileReader.openBlendFile(Path_Blend)
blendfile = BlendFileReader.BlendFile(handle)
catalog = DNACatalogHTML(blendfile.Catalog, bpy)
# close temp file
handle.close()
# deleting or not?
if '--dna-keep-blend' in sys.argv:
# keep the blend, useful for studying hexdumps
@@ -449,7 +449,7 @@ def main():
log.info("5: close and delete temp blend:")
log.info(" {0}".format(Path_Blend))
os.remove(Path_Blend)
# export dna to xhtml
log.info("6: export sdna to xhtml file: %r" % Path_HTML)
handleHTML = open(Path_HTML, "w")
@@ -466,12 +466,12 @@ def main():
if not bpy.app.background:
log.info("7: quit blender")
bpy.ops.wm.exit_blender()
except ImportError:
log.warning(" skipping, not running in Blender")
usage()
sys.exit(2)
if __name__ == '__main__':
main()

View File

@@ -7,7 +7,7 @@
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
@@ -34,6 +34,7 @@ log = logging.getLogger("BlendFileReader")
# module global routines
######################################################
def ReadString(handle, length):
'''
ReadString reads a String of given length or a zero terminating String
@@ -45,7 +46,7 @@ def ReadString(handle, length):
# length == 0 means we want a zero terminating string
result = ""
s = ReadString(handle, 1)
while s!="\0":
while s != "\0":
result += s
s = ReadString(handle, 1)
return result
@@ -57,7 +58,7 @@ def Read(type, handle, fileheader):
'''
def unpacked_bytes(type_char, size):
return struct.unpack(fileheader.StructPre + type_char, handle.read(size))[0]
if type == 'ushort':
return unpacked_bytes("H", 2) # unsigned short
elif type == 'short':
@@ -94,10 +95,10 @@ def openBlendFile(filename):
log.debug("decompressing started")
fs = gzip.open(filename, "rb")
handle = tempfile.TemporaryFile()
data = fs.read(1024*1024)
while data:
handle.write(data)
data = fs.read(1024*1024)
data = fs.read(1024 * 1024)
while data:
handle.write(data)
data = fs.read(1024 * 1024)
log.debug("decompressing finished")
fs.close()
log.debug("resetting decompressed file")
@@ -112,7 +113,7 @@ def Align(handle):
offset = handle.tell()
trim = offset % 4
if trim != 0:
handle.seek(4-trim, os.SEEK_CUR)
handle.seek(4 - trim, os.SEEK_CUR)
######################################################
@@ -121,14 +122,14 @@ def Align(handle):
class BlendFile:
'''
Reads a blendfile and store the header, all the fileblocks, and catalogue
Reads a blendfile and store the header, all the fileblocks, and catalogue
structs foound in the DNA fileblock
- BlendFile.Header (BlendFileHeader instance)
- BlendFile.Blocks (list of BlendFileBlock instances)
- BlendFile.Catalog (DNACatalog instance)
'''
def __init__(self, handle):
log.debug("initializing reading blend-file")
self.Header = BlendFileHeader(handle)
@@ -141,13 +142,13 @@ class BlendFile:
found_dna_block = True
else:
fileblock.Header.skip(handle)
self.Blocks.append(fileblock)
fileblock = BlendFileBlock(handle, self)
# appending last fileblock, "ENDB"
self.Blocks.append(fileblock)
# seems unused?
"""
def FindBlendFileBlocksWithCode(self, code):
@@ -164,27 +165,27 @@ class BlendFileHeader:
BlendFileHeader allocates the first 12 bytes of a blend file.
It contains information about the hardware architecture.
Header example: BLENDER_v254
BlendFileHeader.Magic (str)
BlendFileHeader.PointerSize (int)
BlendFileHeader.LittleEndianness (bool)
BlendFileHeader.StructPre (str) see http://docs.python.org/py3k/library/struct.html#byte-order-size-and-alignment
BlendFileHeader.Version (int)
'''
def __init__(self, handle):
log.debug("reading blend-file-header")
self.Magic = ReadString(handle, 7)
log.debug(self.Magic)
pointersize = ReadString(handle, 1)
log.debug(pointersize)
if pointersize == "-":
self.PointerSize = 8
if pointersize == "_":
self.PointerSize = 4
endianness = ReadString(handle, 1)
log.debug(endianness)
if endianness == "v":
@@ -193,11 +194,11 @@ class BlendFileHeader:
if endianness == "V":
self.LittleEndianness = False
self.StructPre = ">"
version = ReadString(handle, 3)
log.debug(version)
self.Version = int(version)
log.debug("{0} {1} {2} {3}".format(self.Magic, self.PointerSize, self.LittleEndianness, version))
@@ -206,11 +207,11 @@ class BlendFileBlock:
BlendFileBlock.File (BlendFile)
BlendFileBlock.Header (FileBlockHeader)
'''
def __init__(self, handle, blendfile):
self.File = blendfile
self.Header = FileBlockHeader(handle, blendfile.Header)
def Get(self, handle, path):
log.debug("find dna structure")
dnaIndex = self.Header.SDNAIndex
@@ -232,7 +233,7 @@ class FileBlockHeader:
Count (int)
FileOffset (= file pointer of datablock)
'''
def __init__(self, handle, fileheader):
self.Code = ReadString(handle, 4).strip()
if self.Code != "ENDB":
@@ -257,28 +258,28 @@ class FileBlockHeader:
class DNACatalog:
'''
DNACatalog is a catalog of all information in the DNA1 file-block
Header = None
Names = None
Types = None
Structs = None
'''
def __init__(self, fileheader, handle):
log.debug("building DNA catalog")
self.Names=[]
self.Types=[]
self.Structs=[]
self.Names = []
self.Types = []
self.Structs = []
self.Header = fileheader
SDNA = ReadString(handle, 4)
# names
NAME = ReadString(handle, 4)
numberOfNames = Read('uint', handle, fileheader)
log.debug("building #{0} names".format(numberOfNames))
for i in range(numberOfNames):
name = ReadString(handle,0)
name = ReadString(handle, 0)
self.Names.append(DNAName(name))
Align(handle)
@@ -287,7 +288,7 @@ class DNACatalog:
numberOfTypes = Read('uint', handle, fileheader)
log.debug("building #{0} types".format(numberOfTypes))
for i in range(numberOfTypes):
type = ReadString(handle,0)
type = ReadString(handle, 0)
self.Types.append(DNAType(type))
Align(handle)
@@ -321,24 +322,24 @@ class DNACatalog:
class DNAName:
'''
DNAName is a C-type name stored in the DNA.
Name = str
'''
def __init__(self, name):
self.Name = name
def AsReference(self, parent):
if parent is None:
result = ""
else:
result = parent+"."
result = parent + "."
result = result + self.ShortName()
return result
def ShortName(self):
result = self.Name;
result = self.Name
result = result.replace("*", "")
result = result.replace("(", "")
result = result.replace(")", "")
@@ -346,12 +347,12 @@ class DNAName:
if Index != -1:
result = result[0:Index]
return result
def IsPointer(self):
return self.Name.find("*")>-1
return self.Name.find("*") > -1
def IsMethodPointer(self):
return self.Name.find("(*")>-1
return self.Name.find("(*") > -1
def ArraySize(self):
result = 1
@@ -360,10 +361,10 @@ class DNAName:
while Index != -1:
Index2 = Temp.find("]")
result*=int(Temp[Index+1:Index2])
Temp = Temp[Index2+1:]
result *= int(Temp[Index + 1:Index2])
Temp = Temp[Index2 + 1:]
Index = Temp.find("[")
return result
@@ -375,46 +376,46 @@ class DNAType:
Size = int
Structure = DNAStructure
'''
def __init__(self, aName):
self.Name = aName
self.Structure=None
self.Structure = None
class DNAStructure:
'''
DNAType is a C-type structure stored in the DNA
Type = DNAType
Fields = [DNAField]
'''
def __init__(self, aType):
self.Type = aType
self.Type.Structure = self
self.Fields=[]
self.Fields = []
def GetField(self, header, handle, path):
splitted = path.partition(".")
name = splitted[0]
rest = splitted[2]
offset = 0;
offset = 0
for field in self.Fields:
if field.Name.ShortName() == name:
log.debug("found "+name+"@"+str(offset))
log.debug("found " + name + "@" + str(offset))
handle.seek(offset, os.SEEK_CUR)
return field.DecodeField(header, handle, rest)
else:
offset += field.Size(header)
log.debug("error did not find "+path)
log.debug("error did not find " + path)
return None
class DNAField:
'''
DNAField is a coupled DNAType and DNAName.
Type = DNAType
Name = DNAName
'''
@@ -422,25 +423,24 @@ class DNAField:
def __init__(self, aType, aName):
self.Type = aType
self.Name = aName
def Size(self, header):
if self.Name.IsPointer() or self.Name.IsMethodPointer():
return header.PointerSize*self.Name.ArraySize()
return header.PointerSize * self.Name.ArraySize()
else:
return self.Type.Size*self.Name.ArraySize()
return self.Type.Size * self.Name.ArraySize()
def DecodeField(self, header, handle, path):
if path == "":
if self.Name.IsPointer():
return Read('pointer', handle, header)
if self.Type.Name=="int":
if self.Type.Name == "int":
return Read('int', handle, header)
if self.Type.Name=="short":
if self.Type.Name == "short":
return Read('short', handle, header)
if self.Type.Name=="float":
if self.Type.Name == "float":
return Read('float', handle, header)
if self.Type.Name=="char":
if self.Type.Name == "char":
return ReadString(handle, self.Name.ArraySize())
else:
return self.Type.Structure.GetField(header, handle, path)

View File

@@ -42,6 +42,7 @@ def man_format(data):
data = data.replace("\t", " ")
return data
if len(sys.argv) != 3:
import getopt
raise getopt.GetoptError("Usage: %s <path-to-blender> <output-filename>" % sys.argv[0])

View File

@@ -73,6 +73,8 @@ def rna_info_BuildRNAInfo_cache():
if rna_info_BuildRNAInfo_cache.ret is None:
rna_info_BuildRNAInfo_cache.ret = rna_info.BuildRNAInfo()
return rna_info_BuildRNAInfo_cache.ret
rna_info_BuildRNAInfo_cache.ret = None
# --- end rna_info cache
@@ -434,7 +436,7 @@ else:
BLENDER_VERSION_DOTS = ".".join(blender_version_strings)
if BLENDER_REVISION != "Unknown":
# '2.62a SHA1' (release) or '2.62.1 SHA1' (non-release)
BLENDER_VERSION_DOTS += " " + BLENDER_REVISION
BLENDER_VERSION_DOTS += " " + BLENDER_REVISION
if is_release:
# '2_62a_release'
@@ -516,6 +518,8 @@ def escape_rst(text):
""" Escape plain text which may contain characters used by RST.
"""
return text.translate(escape_rst.trans)
escape_rst.trans = str.maketrans({
"`": "\\`",
"|": "\\|",
@@ -1018,6 +1022,7 @@ def pymodule2sphinx(basepath, module_name, module, title):
file.close()
# Changes in Blender will force errors here
context_type_map = {
"active_base": ("ObjectBase", False),

View File

@@ -209,6 +209,8 @@ def modules(module_cache=addons_fake_modules, *, refresh=True):
)
)
return mod_list
modules._is_first = True

View File

@@ -172,6 +172,7 @@ def ui_draw_filter_register(
class Wrapper(cls_real):
__slots__ = ()
def __getattribute__(self, attr):
if attr == "layout":
return UILayout_Fake(self_real.layout)

View File

@@ -43,14 +43,17 @@ else:
# XXX This is a quick hack to make it work with new I18n... objects! To be reworked!
def main():
import argparse
parser = argparse.ArgumentParser(description=
"Merge one or more .po files into the first dest one.\n"
"If a msgkey (msgctxt, msgid) is present in more than one merged po, the one in the first file "
"wins, unless its marked as fuzzy and one later is not.\n"
"The fuzzy flag is removed if necessary.\n"
"All other comments are never modified.\n"
"Commented messages in dst will always remain commented, and commented messages are never merged "
"from sources.")
parser = argparse.ArgumentParser(
description=(
"Merge one or more .po files into the first dest one.\n"
"If a msgkey (msgctxt, msgid) is present in more than one merged po, the one in the first file "
"wins, unless its marked as fuzzy and one later is not.\n"
"The fuzzy flag is removed if necessary.\n"
"All other comments are never modified.\n"
"Commented messages in dst will always remain commented, and commented messages are never merged "
"from sources."
),
)
parser.add_argument('-s', '--stats', action="store_true", help="Show statistics info.")
parser.add_argument('-r', '--replace', action="store_true",
help="Replace existing messages of same \"level\" already in dest po.")

View File

@@ -442,7 +442,7 @@ def do_previews(do_objects, do_collections, do_scenes, do_data_intern):
if not has_camera:
# We had to add a temp camera, now we need to place it to see interesting objects!
objects = tuple((ob.name, ob.library.filepath if ob.library else None) for ob in scene.objects
if (not ob.hide_render) and (ob.type in OBJECT_TYPES_RENDER))
if (not ob.hide_render) and (ob.type in OBJECT_TYPES_RENDER))
preview_render_do(render_context, 'scenes', scene.name, objects)

View File

@@ -98,5 +98,6 @@ def main():
for value in read_blend_rend_chunk(arg):
print("%d %d %s" % value)
if __name__ == '__main__':
main()

View File

@@ -228,4 +228,5 @@ class BPyOpsSubModOp:
return ("<function bpy.ops.%s.%s at 0x%x'>" %
(self._module, self._func, id(self)))
ops_fake_module = BPyOps()

View File

@@ -183,6 +183,8 @@ def clean_name(name, replace="_"):
trans = maketrans_init()
return name.translate(trans)
clean_name._trans_cache = {}
@@ -223,6 +225,7 @@ def display_name(name):
name = _clean_utf8(name)
return name
def display_name_to_filepath(name):
"""
Performs the reverse of display_name using literal versions of characters

View File

@@ -31,4 +31,4 @@ __all__ = (
"mesh_utils",
"node_utils",
"view3d_utils",
)
)

View File

@@ -174,7 +174,7 @@ def bake_action_iter(
# Bendy Bones
if pbone.bone.bbone_segments > 1:
bbones[name] = {bb_prop : getattr(pbone, bb_prop) for bb_prop in BBONE_PROPS}
bbones[name] = {bb_prop: getattr(pbone, bb_prop) for bb_prop in BBONE_PROPS}
return matrix, bbones
if do_parents_clear:

View File

@@ -20,7 +20,7 @@
__all__ = (
"load_image",
)
)
# limited replacement for BPyImage.comprehensiveImageLoad

View File

@@ -32,7 +32,7 @@ __all__ = (
"path_reference_copy",
"path_reference_mode",
"unique_name"
)
)
import bpy
from bpy.props import (
@@ -53,23 +53,23 @@ def _check_axis_conversion(op):
class ExportHelper:
filepath = StringProperty(
name="File Path",
description="Filepath used for exporting the file",
maxlen=1024,
subtype='FILE_PATH',
)
name="File Path",
description="Filepath used for exporting the file",
maxlen=1024,
subtype='FILE_PATH',
)
check_existing = BoolProperty(
name="Check Existing",
description="Check and warn on overwriting existing files",
default=True,
options={'HIDDEN'},
)
name="Check Existing",
description="Check and warn on overwriting existing files",
default=True,
options={'HIDDEN'},
)
# needed for mix-ins
order = [
"filepath",
"check_existing",
]
]
# subclasses can override with decorator
# True == use ext, False == no ext, None == do nothing.
@@ -113,16 +113,16 @@ class ExportHelper:
class ImportHelper:
filepath = StringProperty(
name="File Path",
description="Filepath used for importing the file",
maxlen=1024,
subtype='FILE_PATH',
)
name="File Path",
description="Filepath used for importing the file",
maxlen=1024,
subtype='FILE_PATH',
)
# needed for mix-ins
order = [
"filepath",
]
]
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
@@ -138,43 +138,45 @@ def orientation_helper_factory(name, axis_forward='Y', axis_up='Z'):
def _update_axis_forward(self, context):
if self.axis_forward[-1] == self.axis_up[-1]:
self.axis_up = (self.axis_up[0:-1] +
'XYZ'[('XYZ'.index(self.axis_up[-1]) + 1) % 3])
'XYZ'[('XYZ'.index(self.axis_up[-1]) + 1) % 3])
members['axis_forward'] = EnumProperty(
name="Forward",
items=(('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default=axis_forward,
update=_update_axis_forward,
)
name="Forward",
items=(
('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default=axis_forward,
update=_update_axis_forward,
)
def _update_axis_up(self, context):
if self.axis_up[-1] == self.axis_forward[-1]:
self.axis_forward = (self.axis_forward[0:-1] +
'XYZ'[('XYZ'.index(self.axis_forward[-1]) + 1) % 3])
'XYZ'[('XYZ'.index(self.axis_forward[-1]) + 1) % 3])
members['axis_up'] = EnumProperty(
name="Up",
items=(('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default=axis_up,
update=_update_axis_up,
)
name="Up",
items=(
('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default=axis_up,
update=_update_axis_up,
)
members["order"] = [
"axis_forward",
"axis_up",
]
]
return type(name, (object,), members)
@@ -205,7 +207,7 @@ _axis_convert_matrix = (
((1.0, 0.0, 0.0), (0.0, -1.0, 0.0), (0.0, 0.0, -1.0)),
((1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, -1.0, 0.0)),
((1.0, 0.0, 0.0), (0.0, 0.0, -1.0), (0.0, 1.0, 0.0)),
)
)
# store args as a single int
# (X Y Z -X -Y -Z) --> (0, 1, 2, 3, 4, 5)
@@ -282,7 +284,7 @@ _axis_convert_lut = (
{0x408, 0x810, 0xA20, 0x228, 0x081, 0x891, 0x699, 0x2A9, 0x102, 0x50A,
0x71A, 0xB22, 0x4CB, 0x8D3, 0xAE3, 0x2EB, 0x144, 0x954, 0x75C, 0x36C,
0x045, 0x44D, 0x65D, 0xA65},
)
)
_axis_convert_num = {'X': 0, 'Y': 1, 'Z': 2, '-X': 3, '-Y': 4, '-Z': 5}
@@ -303,11 +305,11 @@ def axis_conversion(from_forward='Y', from_up='Z', to_forward='Y', to_up='Z'):
"can't use up/forward on the same axis")
value = reduce(int.__or__, (_axis_convert_num[a] << (i * 3)
for i, a in enumerate((from_forward,
from_up,
to_forward,
to_up,
))))
for i, a in enumerate((from_forward,
from_up,
to_forward,
to_up,
))))
for i, axis_lut in enumerate(_axis_convert_lut):
if value in axis_lut:
@@ -392,20 +394,21 @@ def unpack_face_list(list_of_tuples):
path_reference_mode = EnumProperty(
name="Path Mode",
description="Method used to reference paths",
items=(('AUTO', "Auto", "Use Relative paths with subdirectories only"),
('ABSOLUTE', "Absolute", "Always write absolute paths"),
('RELATIVE', "Relative", "Always write relative paths "
"(where possible)"),
('MATCH', "Match", "Match Absolute/Relative "
"setting with input path"),
('STRIP', "Strip Path", "Filename only"),
('COPY', "Copy", "Copy the file to the destination path "
"(or subdirectory)"),
),
default='AUTO',
)
name="Path Mode",
description="Method used to reference paths",
items=(
('AUTO', "Auto", "Use Relative paths with subdirectories only"),
('ABSOLUTE', "Absolute", "Always write absolute paths"),
('RELATIVE', "Relative", "Always write relative paths "
"(where possible)"),
('MATCH', "Match", "Match Absolute/Relative "
"setting with input path"),
('STRIP', "Strip Path", "Filename only"),
('COPY', "Copy", "Copy the file to the destination path "
"(or subdirectory)"),
),
default='AUTO',
)
def path_reference(filepath,

View File

@@ -176,8 +176,8 @@ def addon_keymap_register(wm, keymaps_description):
for km_info, km_items in keymaps_description:
km_name, km_sptype, km_regtype, km_ismodal = km_info
kmap = [k for k in kconf.keymaps
if k.name == km_name and k.region_type == km_regtype and
k.space_type == km_sptype and k.is_modal == km_ismodal]
if k.name == km_name and k.region_type == km_regtype and
k.space_type == km_sptype and k.is_modal == km_ismodal]
if kmap:
kmap = kmap[0]
else:
@@ -201,8 +201,8 @@ def addon_keymap_unregister(wm, keymaps_description):
for km_info, km_items in keymaps_description:
km_name, km_sptype, km_regtype, km_ismodal = km_info
kmaps = (k for k in kconf.keymaps
if k.name == km_name and k.region_type == km_regtype and
k.space_type == km_sptype and k.is_modal == km_ismodal)
if k.name == km_name and k.region_type == km_regtype and
k.space_type == km_sptype and k.is_modal == km_ismodal)
for kmap in kmaps:
for kmi_kwargs, props in km_items:
idname = kmi_kwargs["idname"]

View File

@@ -27,7 +27,7 @@ __all__ = (
"edge_loops_from_edges",
"ngon_tessellate",
"face_random_points",
)
)
def mesh_linked_uv_islands(mesh):
@@ -286,7 +286,7 @@ def edge_loops_from_edges(mesh, edges=None):
ok = True
while ok:
ok = False
#for i, ed in enumerate(edges):
# for i, ed in enumerate(edges):
i = len(edges)
while i:
i -= 1
@@ -303,7 +303,7 @@ def edge_loops_from_edges(mesh, edges=None):
vert_end = line_poly[-1]
ok = 1
del edges[i]
#break
# break
elif v1 == vert_start:
line_poly.insert(0, v2)
vert_start = line_poly[0]
@@ -315,7 +315,7 @@ def edge_loops_from_edges(mesh, edges=None):
vert_start = line_poly[0]
ok = 1
del edges[i]
#break
# break
line_polys.append(line_poly)
return line_polys
@@ -481,7 +481,7 @@ def ngon_tessellate(from_data, indices, fix_loops=True):
ii += len(verts)
fill = tessellate_polygon([[v[0] for v in loop] for loop in loop_list])
#draw_loops(loop_list)
# draw_loops(loop_list)
#raise Exception("done loop")
# map to original indices
fill = [[vert_map[i] for i in reversed(f)] for f in fill]

View File

@@ -26,7 +26,7 @@ __all__ = (
"object_add_grid_scale_apply_operator",
"object_image_guess",
"world_to_camera_view",
)
)
import bpy

View File

@@ -153,6 +153,7 @@ def topretty_py(py_data, indent=" "):
return "\n".join(lines)
if __name__ == "__main__":
# testing code.

View File

@@ -19,12 +19,12 @@
# ##### END GPL LICENSE BLOCK #####
# Original copyright (see docstring):
#*****************************************************************************
# ****************************************************************************
# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
# ****************************************************************************
# <pep8 compliant>

View File

@@ -224,6 +224,7 @@ def execute(context, is_interactive):
return {'FINISHED'}
execute.hooks = []

View File

@@ -40,6 +40,7 @@ def shell_run(text):
add_scrollback(output, style)
PROMPT = "$ "
@@ -64,7 +65,7 @@ def execute(context, is_interactive):
def autocomplete(context):
#~ sc = context.space_data
# sc = context.space_data
# TODO
return {'CANCELLED'}

View File

@@ -140,7 +140,7 @@ def graph_armature(obj, filepath, FAKE_PARENT=True, CONSTRAINTS=True, DRIVERS=Tr
return None
#rna_path_bone = rna_path[:rna_path.index("]") + 1]
#return obj.path_resolve(rna_path_bone)
# return obj.path_resolve(rna_path_bone)
bone_name = rna_path.split("[")[1].split("]")[0]
return obj.pose.bones[bone_name[1:-1]]
@@ -179,6 +179,7 @@ def graph_armature(obj, filepath, FAKE_PARENT=True, CONSTRAINTS=True, DRIVERS=Tr
print("\nSaved:", filepath)
return True
if __name__ == "__main__":
import os
tmppath = "/tmp/test.dot"

View File

@@ -36,7 +36,7 @@ __all__ = (
"RKS_GEN_rotation",
"RKS_GEN_scaling",
"RKS_GEN_bendy_bones",
)
)
import bpy
@@ -221,6 +221,7 @@ def RKS_GEN_scaling(ksi, context, ks, data):
# ------
# Property identifiers for Bendy Bones
bbone_property_ids = (
"bbone_curveinx",

View File

@@ -121,7 +121,7 @@ def register_node_categories(identifier, cat_list):
"category": cat,
"poll": cat.poll,
"draw": draw_node_item,
})
})
panel_type = type("NODE_PT_category_" + cat.identifier, (bpy.types.Panel,), {
"bl_space_type": 'NODE_EDITOR',
"bl_region_type": 'TOOLS',
@@ -130,7 +130,7 @@ def register_node_categories(identifier, cat_list):
"category": cat,
"poll": cat.poll,
"draw": draw_node_item,
})
})
menu_types.append(menu_type)
panel_types.append(panel_type)

View File

@@ -25,7 +25,7 @@ __all__ = (
"draw_filtered",
"draw_hierarchy",
"draw_keymaps",
)
)
import bpy
@@ -174,7 +174,7 @@ def draw_kmi(display_keymaps, kc, km, kmi, layout, level):
sub.prop(kmi, "propvalue", text="")
else:
# One day...
#~ sub.prop_search(kmi, "idname", bpy.context.window_manager, "operators_all", text="")
# sub.prop_search(kmi, "idname", bpy.context.window_manager, "operators_all", text="")
sub.prop(kmi, "idname", text="")
if map_type not in {'TEXTINPUT', 'TIMER'}:
@@ -207,6 +207,7 @@ def draw_kmi(display_keymaps, kc, km, kmi, layout, level):
draw_km(display_keymaps, kc, kmm, None, layout, level + 1)
layout.context_pointer_set("keymap", km)
_EVENT_TYPES = set()
_EVENT_TYPE_MAP = {}
_EVENT_TYPE_MAP_EXTRA = {}
@@ -234,10 +235,10 @@ def draw_filtered(display_keymaps, filter_type, filter_text, layout):
"RMB": 'RIGHTMOUSE',
"LMB": 'LEFTMOUSE',
"MMB": 'MIDDLEMOUSE',
})
})
_EVENT_TYPE_MAP_EXTRA.update({
"%d" % i: "NUMPAD_%d" % i for i in range(10)
})
})
# done with once off init
filter_text_split = filter_text.strip()
@@ -251,7 +252,7 @@ def draw_filtered(display_keymaps, filter_type, filter_text, layout):
"cmd": "oskey",
"oskey": "oskey",
"any": "any",
}
}
# KeyMapItem like dict, use for comparing against
# attr: {states, ...}
kmi_test_dict = {}
@@ -260,8 +261,8 @@ def draw_filtered(display_keymaps, filter_type, filter_text, layout):
kmi_test_type = []
# initialize? - so if a if a kmi has a MOD assigned it wont show up.
#~ for kv in key_mod.values():
#~ kmi_test_dict[kv] = {False}
# for kv in key_mod.values():
# kmi_test_dict[kv] = {False}
# altname: attr
for kk, kv in key_mod.items():
@@ -374,7 +375,7 @@ def draw_keymaps(context, layout):
row = subcol.row(align=True)
#~ row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config")
# row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config")
text = bpy.path.display_name(wm.keyconfigs.active.name)
if not text:
text = "Blender (default)"
@@ -382,8 +383,8 @@ def draw_keymaps(context, layout):
row.operator("wm.keyconfig_preset_add", text="", icon='ZOOMIN')
row.operator("wm.keyconfig_preset_add", text="", icon='ZOOMOUT').remove_active = True
#~ layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
#~ row.operator("wm.keyconfig_remove", text="", icon='X')
# layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
# row.operator("wm.keyconfig_remove", text="", icon='X')
row.separator()
rowsub = row.split(align=True, percentage=0.33)
# postpone drawing into rowsub, so we can set alert!

View File

@@ -26,7 +26,7 @@ import bmesh
__all__ = (
"select_prev",
"select_next",
)
)
def other_edges_over_face(e):
@@ -303,8 +303,9 @@ def select_next(bm, report):
for fn in (pass_fn, set, sum_set, len):
uuid_cmp_test = fn(uuid_cmp)
ele_pair_next_uuid_test = [
(ele, uuid) for (ele, uuid) in ele_pair_next_uuid
if uuid_cmp_test == fn(uuid)]
(ele, uuid) for (ele, uuid) in ele_pair_next_uuid
if uuid_cmp_test == fn(uuid)
]
if len(ele_pair_next_uuid_test) > 1:
ele_pair_next_uuid = ele_pair_next_uuid_test
elif len(ele_pair_next_uuid_test) == 1:

View File

@@ -527,6 +527,8 @@ class BUILTIN_KSI_WholeCharacter(KeyingSetInfo):
ksi.addProp(ks, bone, prop)
# All properties that are likely to get animated in a character rig, only selected bones.
class BUILTIN_KSI_WholeCharacterSelected(KeyingSetInfo):
"""Insert a keyframe for all properties that are likely to get animated in a character rig """
"""(only selected bones)"""
@@ -557,6 +559,8 @@ class BUILTIN_KSI_WholeCharacterSelected(KeyingSetInfo):
###############################
# Delta Location
class BUILTIN_KSI_DeltaLocation(KeyingSetInfo):
"""Insert keyframes for additional location offset"""
bl_label = "Delta Location"
@@ -643,6 +647,7 @@ class BUILTIN_KSI_DeltaScale(KeyingSetInfo):
###############################
# Note that this controls order of options in 'insert keyframe' menu.
# Better try to keep some logical order here beyond mere alphabetical one, also because of menu entries shortcut.
# See also T51867.

View File

@@ -61,12 +61,13 @@ def group_tools_draw(self, layout, context):
layout.operator("node.group_ungroup")
layout.separator()
# maps node tree type to group node type
node_tree_group_type = {
'CompositorNodeTree': 'CompositorNodeGroup',
'ShaderNodeTree': 'ShaderNodeGroup',
'TextureNodeTree': 'TextureNodeGroup',
}
}
# generic node group items generator for shader, compositor and texture node groups
@@ -185,14 +186,14 @@ shader_node_categories = [
NodeItem("ShaderNodeUVMap"),
NodeItem("ShaderNodeUVAlongStroke", poll=line_style_shader_nodes_poll),
NodeItem("NodeGroupInput", poll=group_input_output_item_poll),
]),
]),
ShaderNodeCategory("SH_NEW_OUTPUT", "Output", items=[
NodeItem("ShaderNodeOutputMaterial", poll=object_eevee_cycles_shader_nodes_poll),
NodeItem("ShaderNodeOutputLamp", poll=object_cycles_shader_nodes_poll),
NodeItem("ShaderNodeOutputWorld", poll=world_shader_nodes_poll),
NodeItem("ShaderNodeOutputLineStyle", poll=line_style_shader_nodes_poll),
NodeItem("NodeGroupOutput", poll=group_input_output_item_poll),
]),
]),
ShaderNodeCategory("SH_NEW_SHADER", "Shader", items=[
NodeItem("ShaderNodeMixShader", poll=eevee_cycles_shader_nodes_poll),
NodeItem("ShaderNodeAddShader", poll=eevee_cycles_shader_nodes_poll),
@@ -215,7 +216,7 @@ shader_node_categories = [
NodeItem("ShaderNodeVolumeScatter", poll=eevee_cycles_shader_nodes_poll),
NodeItem("ShaderNodeVolumePrincipled"),
NodeItem("ShaderNodeEeveeSpecular", poll=object_eevee_shader_nodes_poll),
]),
]),
ShaderNodeCategory("SH_NEW_TEXTURE", "Texture", items=[
NodeItem("ShaderNodeTexImage"),
NodeItem("ShaderNodeTexEnvironment"),
@@ -230,7 +231,7 @@ shader_node_categories = [
NodeItem("ShaderNodeTexBrick"),
NodeItem("ShaderNodeTexPointDensity"),
NodeItem("ShaderNodeTexIES"),
]),
]),
ShaderNodeCategory("SH_NEW_OP_COLOR", "Color", items=[
NodeItem("ShaderNodeMixRGB"),
NodeItem("ShaderNodeRGBCurve"),
@@ -239,7 +240,7 @@ shader_node_categories = [
NodeItem("ShaderNodeHueSaturation"),
NodeItem("ShaderNodeGamma"),
NodeItem("ShaderNodeBrightContrast"),
]),
]),
ShaderNodeCategory("SH_NEW_OP_VECTOR", "Vector", items=[
NodeItem("ShaderNodeMapping"),
NodeItem("ShaderNodeBump"),
@@ -249,7 +250,7 @@ shader_node_categories = [
NodeItem("ShaderNodeNormal"),
NodeItem("ShaderNodeVectorCurve"),
NodeItem("ShaderNodeVectorTransform"),
]),
]),
ShaderNodeCategory("SH_NEW_CONVERTOR", "Converter", items=[
NodeItem("ShaderNodeMath"),
NodeItem("ShaderNodeValToRGB"),
@@ -264,16 +265,16 @@ shader_node_categories = [
NodeItem("ShaderNodeCombineHSV"),
NodeItem("ShaderNodeWavelength"),
NodeItem("ShaderNodeBlackbody"),
]),
]),
ShaderNodeCategory("SH_NEW_SCRIPT", "Script", items=[
NodeItem("ShaderNodeScript"),
]),
]),
ShaderNodeCategory("SH_NEW_GROUP", "Group", items=node_group_items),
ShaderNodeCategory("SH_NEW_LAYOUT", "Layout", items=[
NodeItem("NodeFrame"),
NodeItem("NodeReroute"),
]),
]
]),
]
compositor_node_categories = [
# Compositor Nodes
@@ -289,7 +290,7 @@ compositor_node_categories = [
NodeItem("CompositorNodeTime"),
NodeItem("CompositorNodeTrackPos"),
NodeItem("NodeGroupInput", poll=group_input_output_item_poll),
]),
]),
CompositorNodeCategory("CMP_OUTPUT", "Output", items=[
NodeItem("CompositorNodeComposite"),
NodeItem("CompositorNodeViewer"),
@@ -297,7 +298,7 @@ compositor_node_categories = [
NodeItem("CompositorNodeOutputFile"),
NodeItem("CompositorNodeLevels"),
NodeItem("NodeGroupOutput", poll=group_input_output_item_poll),
]),
]),
CompositorNodeCategory("CMP_OP_COLOR", "Color", items=[
NodeItem("CompositorNodeMixRGB"),
NodeItem("CompositorNodeAlphaOver"),
@@ -311,7 +312,7 @@ compositor_node_categories = [
NodeItem("CompositorNodeColorCorrection"),
NodeItem("CompositorNodeTonemap"),
NodeItem("CompositorNodeZcombine"),
]),
]),
CompositorNodeCategory("CMP_CONVERTOR", "Converter", items=[
NodeItem("CompositorNodeMath"),
NodeItem("CompositorNodeValToRGB"),
@@ -328,7 +329,7 @@ compositor_node_categories = [
NodeItem("CompositorNodeSepYCCA"),
NodeItem("CompositorNodeCombYCCA"),
NodeItem("CompositorNodeSwitchView"),
]),
]),
CompositorNodeCategory("CMP_OP_FILTER", "Filter", items=[
NodeItem("CompositorNodeBlur"),
NodeItem("CompositorNodeBilateralblur"),
@@ -343,14 +344,14 @@ compositor_node_categories = [
NodeItem("CompositorNodeDBlur"),
NodeItem("CompositorNodePixelate"),
NodeItem("CompositorNodeSunBeams"),
]),
]),
CompositorNodeCategory("CMP_OP_VECTOR", "Vector", items=[
NodeItem("CompositorNodeNormal"),
NodeItem("CompositorNodeMapValue"),
NodeItem("CompositorNodeMapRange"),
NodeItem("CompositorNodeNormalize"),
NodeItem("CompositorNodeCurveVec"),
]),
]),
CompositorNodeCategory("CMP_MATTE", "Matte", items=[
NodeItem("CompositorNodeKeying"),
NodeItem("CompositorNodeKeyingScreen"),
@@ -364,7 +365,7 @@ compositor_node_categories = [
NodeItem("CompositorNodeChromaMatte"),
NodeItem("CompositorNodeColorMatte"),
NodeItem("CompositorNodeDoubleEdgeMask"),
]),
]),
CompositorNodeCategory("CMP_DISTORT", "Distort", items=[
NodeItem("CompositorNodeScale"),
NodeItem("CompositorNodeLensdist"),
@@ -379,14 +380,14 @@ compositor_node_categories = [
NodeItem("CompositorNodeStabilize"),
NodeItem("CompositorNodePlaneTrackDeform"),
NodeItem("CompositorNodeCornerPin"),
]),
]),
CompositorNodeCategory("CMP_GROUP", "Group", items=node_group_items),
CompositorNodeCategory("CMP_LAYOUT", "Layout", items=[
NodeItem("NodeFrame"),
NodeItem("NodeReroute"),
NodeItem("CompositorNodeSwitch"),
]),
]
]),
]
texture_node_categories = [
# Texture Nodes
@@ -396,12 +397,12 @@ texture_node_categories = [
NodeItem("TextureNodeTexture"),
NodeItem("TextureNodeImage"),
NodeItem("NodeGroupInput", poll=group_input_output_item_poll),
]),
]),
TextureNodeCategory("TEX_OUTPUT", "Output", items=[
NodeItem("TextureNodeOutput"),
NodeItem("TextureNodeViewer"),
NodeItem("NodeGroupOutput", poll=group_input_output_item_poll),
]),
]),
TextureNodeCategory("TEX_OP_COLOR", "Color", items=[
NodeItem("TextureNodeMixRGB"),
NodeItem("TextureNodeCurveRGB"),
@@ -409,11 +410,11 @@ texture_node_categories = [
NodeItem("TextureNodeHueSaturation"),
NodeItem("TextureNodeCompose"),
NodeItem("TextureNodeDecompose"),
]),
]),
TextureNodeCategory("TEX_PATTERN", "Pattern", items=[
NodeItem("TextureNodeChecker"),
NodeItem("TextureNodeBricks"),
]),
]),
TextureNodeCategory("TEX_TEXTURE", "Textures", items=[
NodeItem("TextureNodeTexNoise"),
NodeItem("TextureNodeTexDistNoise"),
@@ -425,26 +426,26 @@ texture_node_categories = [
NodeItem("TextureNodeTexWood"),
NodeItem("TextureNodeTexMusgrave"),
NodeItem("TextureNodeTexStucci"),
]),
]),
TextureNodeCategory("TEX_CONVERTOR", "Converter", items=[
NodeItem("TextureNodeMath"),
NodeItem("TextureNodeValToRGB"),
NodeItem("TextureNodeRGBToBW"),
NodeItem("TextureNodeValToNor"),
NodeItem("TextureNodeDistance"),
]),
]),
TextureNodeCategory("TEX_DISTORT", "Distort", items=[
NodeItem("TextureNodeScale"),
NodeItem("TextureNodeTranslate"),
NodeItem("TextureNodeRotate"),
NodeItem("TextureNodeAt"),
]),
]),
TextureNodeCategory("TEX_GROUP", "Group", items=node_group_items),
TextureNodeCategory("TEX_LAYOUT", "Layout", items=[
NodeItem("NodeFrame"),
NodeItem("NodeReroute"),
]),
]
]),
]
def register():

View File

@@ -68,7 +68,7 @@ def image_from_file(filepath):
if bpy is not None:
pixels, pixel_w, pixel_h = image_from_file__bpy(filepath)
#else:
# else:
# pixels, pixel_w, pixel_h = image_from_file__py(filepath)
return pixels, pixel_w, pixel_h
@@ -95,12 +95,14 @@ def write_subimage(sub_x, sub_y, sub_w, sub_h,
with open(filepath, 'wb') as f:
f.write(struct.pack('<6I',
f.write(
struct.pack(
'<6I',
sub_w, sub_h,
sub_x, sub_y,
# redundant but including to maintain consistency
pixel_w, pixel_h,
))
pixel_w, pixel_h,
))
for y in range(sub_h):
for x in range(sub_w):
@@ -113,8 +115,9 @@ def write_subimage(sub_x, sub_y, sub_w, sub_h,
_dice_icon_name_cache = {}
def dice_icon_name(x, y, parts_x, parts_y,
name_style=None, prefix=""):
def dice_icon_name(
x, y, parts_x, parts_y,
name_style=None, prefix=""):
"""
How to name icons, this is mainly for what name we get in git,
the actual names don't really matter, its just nice to have the
@@ -143,7 +146,7 @@ def dice_icon_name(x, y, parts_x, parts_y,
icon_name = _dice_icon_name_cache[index]
# for debugging its handy to sort by number
#~ id_str = "%03d_%s%s.dat" % (index, prefix, icon_name)
# ~ id_str = "%03d_%s%s.dat" % (index, prefix, icon_name)
id_str = "%s%s.dat" % (prefix, icon_name)
@@ -158,16 +161,18 @@ def dice_icon_name(x, y, parts_x, parts_y,
return id_str
def dice(filepath, output, output_prefix, name_style,
parts_x, parts_y,
minx, miny, maxx, maxy,
minx_icon, miny_icon, maxx_icon, maxy_icon,
spacex_icon, spacey_icon,
):
def dice(
filepath, output, output_prefix, name_style,
parts_x, parts_y,
minx, miny, maxx, maxy,
minx_icon, miny_icon, maxx_icon, maxy_icon,
spacex_icon, spacey_icon,
):
is_simple = (max(minx, miny, maxx, maxy,
minx_icon, miny_icon, maxx_icon, maxy_icon,
spacex_icon, spacey_icon) == 0)
is_simple = (max(
minx, miny, maxx, maxy,
minx_icon, miny_icon, maxx_icon, maxy_icon,
spacex_icon, spacey_icon) == 0)
pixels, pixel_w, pixel_h = image_from_file(filepath)
@@ -199,9 +204,11 @@ def dice(filepath, output, output_prefix, name_style,
for x in range(parts_x):
for y in range(parts_y):
id_str = dice_icon_name(x, y,
parts_x, parts_y,
name_style=name_style, prefix=output_prefix)
id_str = dice_icon_name(
x, y,
parts_x, parts_y,
name_style=name_style, prefix=output_prefix
)
filepath = os.path.join(output, id_str)
if VERBOSE:
print(" writing:", filepath)
@@ -235,25 +242,35 @@ def main():
parser = argparse.ArgumentParser(description=__doc__, epilog=epilog)
# File path options
parser.add_argument("--image", dest="image", metavar='FILE',
help="Image file")
parser.add_argument("--output", dest="output", metavar='DIR',
help="Output directory")
parser.add_argument("--output_prefix", dest="output_prefix", metavar='STRING',
help="Output prefix")
parser.add_argument(
"--image", dest="image", metavar='FILE',
help="Image file",
)
parser.add_argument(
"--output", dest="output", metavar='DIR',
help="Output directory",
)
parser.add_argument(
"--output_prefix", dest="output_prefix", metavar='STRING',
help="Output prefix",
)
# Icon naming option
parser.add_argument("--name_style", dest="name_style", metavar='ENUM', type=str,
choices=('', 'UI_ICONS'),
help="The metod used for naming output data")
parser.add_argument(
"--name_style", dest="name_style", metavar='ENUM', type=str,
choices=('', 'UI_ICONS'),
help="The metod used for naming output data",
)
# Options for dicing up the image
parser.add_argument("--parts_x", dest="parts_x", metavar='INT', type=int,
help="Grid X parts")
parser.add_argument("--parts_y", dest="parts_y", metavar='INT', type=int,
help="Grid Y parts")
parser.add_argument(
"--parts_x", dest="parts_x", metavar='INT', type=int,
help="Grid X parts",
)
parser.add_argument(
"--parts_y", dest="parts_y", metavar='INT', type=int,
help="Grid Y parts",
)
_help = "Inset from the outer edge (in pixels)"
parser.add_argument("--minx", dest="minx", metavar='INT', type=int, help=_help)
@@ -287,5 +304,6 @@ def main():
args.spacex_icon, args.spacey_icon,
)
if __name__ == "__main__":
main()

View File

@@ -67,5 +67,6 @@ def main():
icondata_to_png(file_src, file_dst)
if __name__ == "__main__":
main()

View File

@@ -56,19 +56,19 @@ def check_commandline():
"""
import sys
# Usage
if len(sys.argv)==1 or len(sys.argv)>3:
if len(sys.argv) == 1 or len(sys.argv) > 3:
usage()
if sys.argv[1] == '-h':
help()
elif not sys.argv[1].endswith((".txt", ".py")):
print ('\nBad input file extension... exiting.')
print('\nBad input file extension... exiting.')
usage()
else:
inputfile = sys.argv[1]
if len(sys.argv) == 2:
sort_priority = default_sort_choice
print ('\nSecond parameter missing: choosing to order by %s.' % font_bold(sort_priority))
elif len(sys.argv)==3:
print('\nSecond parameter missing: choosing to order by %s.' % font_bold(sort_priority))
elif len(sys.argv) == 3:
sort_priority = sys.argv[2]
if sort_priority not in sort_choices:
print('\nWrong sort_priority... exiting.')
@@ -93,9 +93,11 @@ def check_prefix(prop, btype):
return ""
def check_if_changed(a,b):
if a != b: return 'changed'
else: return 'same'
def check_if_changed(a, b):
if a != b:
return 'changed'
else:
return 'same'
def get_props_from_txt(input_filename):
@@ -103,12 +105,12 @@ def get_props_from_txt(input_filename):
If the file is *.txt, the script assumes it is formatted as outlined in this script docstring
"""
file=open(input_filename,'r')
file_lines=file.readlines()
file = open(input_filename, 'r')
file_lines = file.readlines()
file.close()
props_list=[]
props_length_max=[0,0,0,0,0,0,0,0]
props_list = []
props_length_max = [0, 0, 0, 0, 0, 0, 0, 0]
done_text = "+"
done = 0
@@ -117,7 +119,7 @@ def get_props_from_txt(input_filename):
for iii, line in enumerate(file_lines):
# debug
#print(line)
# print(line)
line_strip = line.strip()
# empty line or comment
if not line_strip:
@@ -136,7 +138,7 @@ def get_props_from_txt(input_filename):
if '*' in bclass:
comment, bclass = [x.strip() for x in bclass.split('*', 1)]
else:
comment= ''
comment = ''
# skipping the header if we have one.
# the header is assumed to be "NOTE * CLASS.FROM -> TO: TYPE DESCRIPTION"
@@ -155,7 +157,7 @@ def get_props_from_txt(input_filename):
# make life easy and strip quotes
description = description.replace("'", "").replace('"', "").replace("\\", "").strip()
except ValueError:
btype, description = [tail,'NO DESCRIPTION']
btype, description = [tail, 'NO DESCRIPTION']
# keyword-check
kwcheck = check_prefix(bto, btype)
@@ -164,17 +166,17 @@ def get_props_from_txt(input_filename):
changed = check_if_changed(bfrom, bto)
# lists formatting
props=[comment, changed, bclass, bfrom, bto, kwcheck, btype, description]
props = [comment, changed, bclass, bfrom, bto, kwcheck, btype, description]
props_list.append(props)
props_length_max=list(map(max,zip(props_length_max,list(map(len,props)))))
props_length_max = list(map(max, zip(props_length_max, list(map(len, props)))))
if done_text in comment:
done += 1
tot += 1
print("Total done %.2f" % (done / tot * 100.0) )
print("Total done %.2f" % (done / tot * 100.0))
return (props_list,props_length_max)
return (props_list, props_length_max)
def get_props_from_py(input_filename):
@@ -185,25 +187,25 @@ def get_props_from_py(input_filename):
# adds the list "rna_api" to this function's scope
rna_api = __import__(input_filename[:-3]).rna_api
props_length_max = [0 for i in rna_api[0]] # this way if the vector will take more elements we are safe
for index,props in enumerate(rna_api):
props_length_max = [0 for i in rna_api[0]] # this way if the vector will take more elements we are safe
for index, props in enumerate(rna_api):
comment, changed, bclass, bfrom, bto, kwcheck, btype, description = props
kwcheck = check_prefix(bto, btype) # keyword-check
changed = check_if_changed(bfrom, bto) # changed?
description = repr(description)
description = description.replace("'", "").replace('"', "").replace("\\", "").strip()
rna_api[index] = [comment, changed, bclass, bfrom, bto, kwcheck, btype, description]
props_length = list(map(len,props)) # lengths
props_length_max = list(map(max,zip(props_length_max,props_length))) # max lengths
return (rna_api,props_length_max)
props_length = list(map(len, props)) # lengths
props_length_max = list(map(max, zip(props_length_max, props_length))) # max lengths
return (rna_api, props_length_max)
def get_props(input_filename):
if input_filename.endswith(".txt"):
props_list,props_length_max = get_props_from_txt(input_filename)
props_list, props_length_max = get_props_from_txt(input_filename)
elif input_filename.endswith(".py"):
props_list,props_length_max = get_props_from_py(input_filename)
return (props_list,props_length_max)
props_list, props_length_max = get_props_from_py(input_filename)
return (props_list, props_length_max)
def sort(props_list, sort_priority):
@@ -222,7 +224,7 @@ def sort(props_list, sort_priority):
else:
props_list = sorted(props_list, key=lambda p: p[i])
print ('\nSorted by %s.' % font_bold(sort_priority))
print('\nSorted by %s.' % font_bold(sort_priority))
return props_list
@@ -250,30 +252,35 @@ def write_files(basename, props_list, props_length_max):
* rna_api.py: unformatted, just as final output
"""
f_rna = open("rna_api.py",'w')
f_txt = open(basename + '_work.txt','w')
f_py = open(basename + '_work.py','w')
f_rna = open("rna_api.py", 'w')
f_txt = open(basename + '_work.txt', 'w')
f_py = open(basename + '_work.py', 'w')
# reminder: props=[comment, changed, bclass, bfrom, bto, kwcheck, btype, description]
# [comment *] ToolSettings.snap_align_rotation -> use_snap_align_rotation: boolean [Align rotation with the snapping target]
rna = py = txt = ''
props_list = [['NOTE', 'CHANGED', 'CLASS', 'FROM', 'TO', 'KEYWORD-CHECK', 'TYPE', 'DESCRIPTION']] + props_list
for props in props_list:
#txt
# txt
# quick way we can tell if it changed
if props[3] == props[4]: txt += "#"
else: txt += " "
if props[3] == props[4]:
txt += "#"
else:
txt += " "
if props[0] != '': txt += '%s * ' % props[0] # comment
txt += '%s.%s -> %s: %s "%s"\n' % tuple(props[2:5] + props[6:]) # skipping keyword-check
if props[0] != '':
txt += '%s * ' % props[0] # comment
txt += '%s.%s -> %s: %s "%s"\n' % tuple(props[2:5] + props[6:]) # skipping keyword-check
# rna_api
if props[0] == 'NOTE': indent = '# '
else: indent = ' '
rna += indent + '("%s", "%s", "%s", "%s", "%s"),\n' % tuple(props[2:5] + props[6:]) # description is already string formatted
if props[0] == 'NOTE':
indent = '# '
else:
indent = ' '
rna += indent + '("%s", "%s", "%s", "%s", "%s"),\n' % tuple(props[2:5] + props[6:]) # description is already string formatted
# py
blanks = [' '* (x[0]-x[1]) for x in zip(props_length_max,list(map(len,props)))]
props = [('"%s"%s' if props[-1] != x[0] else "%s%s") % (x[0],x[1]) for x in zip(props,blanks)]
blanks = [' ' * (x[0] - x[1]) for x in zip(props_length_max, list(map(len, props)))]
props = [('"%s"%s' if props[-1] != x[0] else "%s%s") % (x[0], x[1]) for x in zip(props, blanks)]
py += indent + '(%s, %s, %s, %s, %s, %s, %s, "%s"),\n' % tuple(props)
f_txt.write(txt)
@@ -290,7 +297,7 @@ def write_files(basename, props_list, props_length_max):
f_py.close()
f_rna.close()
print ('\nSaved %s, %s and %s.\n' % (font_bold(f_txt.name), font_bold(f_py.name), font_bold(f_rna.name) ) )
print('\nSaved %s, %s and %s.\n' % (font_bold(f_txt.name), font_bold(f_py.name), font_bold(f_rna.name)))
def main():
@@ -298,21 +305,21 @@ def main():
global sort_choices, default_sort_choice
global kw_prefixes, kw
sort_choices = ['note','changed','class','from','to','kw', 'class.to']
sort_choices = ['note', 'changed', 'class', 'from', 'to', 'kw', 'class.to']
default_sort_choice = sort_choices[-1]
kw_prefixes = [ 'active','apply','bl','exclude','has','invert','is','lock', \
'pressed','show','show_only','use','use_only','layers','states', 'select']
kw = ['active','hide','invert','select','layers','mute','states','use','lock']
kw_prefixes = ['active', 'apply', 'bl', 'exclude', 'has', 'invert', 'is', 'lock',
'pressed', 'show', 'show_only', 'use', 'use_only', 'layers', 'states', 'select']
kw = ['active', 'hide', 'invert', 'select', 'layers', 'mute', 'states', 'use', 'lock']
input_filename, sort_priority = check_commandline()
props_list,props_length_max = get_props(input_filename)
props_list = sort(props_list,sort_priority)
props_list, props_length_max = get_props(input_filename)
props_list = sort(props_list, sort_priority)
output_basename = file_basename(input_filename)
write_files(output_basename, props_list,props_length_max)
write_files(output_basename, props_list, props_length_max)
if __name__=='__main__':
if __name__ == '__main__':
import sys
if not sys.version.startswith("3"):
print("Incorrect python version, use python 3!")

View File

@@ -6,56 +6,58 @@ import sys
Example usage:
python3 rna_cleaner_merge.py out_work.py rna_booleans_work.py
'''
def main():
def work_line_id(line):
return line[2].split("|")[-1], line[3] # class/from
def work_line_id(line):
return line[2].split("|")[-1], line[3] # class/from
if not (sys.argv[-1].endswith(".py") and sys.argv[-2].endswith(".py")):
print("Only accepts 2 py files as arguments.")
if not (sys.argv[-1].endswith(".py") and sys.argv[-2].endswith(".py")):
print("Only accepts 2 py files as arguments.")
sys.path.insert(0, ".")
sys.path.insert(0, ".")
mod_from = __import__(sys.argv[-1][:-3])
mod_to = __import__(sys.argv[-2][:-3])
mod_from = __import__(sys.argv[-1][:-3])
mod_to = __import__(sys.argv[-2][:-3])
mod_to_dict = dict([(work_line_id(line), line) for line in mod_to.rna_api])
mod_from_dict = dict([(work_line_id(line), line) for line in mod_from.rna_api])
mod_to_dict = dict([(work_line_id(line), line) for line in mod_to.rna_api])
mod_from_dict = dict([(work_line_id(line), line) for line in mod_from.rna_api])
rna_api_new = []
rna_api_new = []
for key, val_orig in mod_to_dict.items():
try:
val_new = mod_from_dict.pop(key)
except:
# print("not found", key)
val_new = val_orig
for key, val_orig in mod_to_dict.items():
try:
val_new = mod_from_dict.pop(key)
except:
# print("not found", key)
val_new = val_orig
# always take the class from the base
val = list(val_orig)
val[0] = val_new[0] # comment
val[4] = val_new[4] # -> to
val = tuple(val)
rna_api_new.append(val)
# always take the class from the base
val = list(val_orig)
val[0] = val_new[0] # comment
val[4] = val_new[4] # -> to
val = tuple(val)
rna_api_new.append(val)
def write_work_file(file_path, rna_api):
rna_api = list(rna_api)
rna_api.sort(key=work_line_id)
file_out = open(file_path, "w")
file_out.write("rna_api = [\n")
for line in rna_api:
file_out.write(" %s,\n" % (repr(line)))
file_out.write("]\n")
file_out.close()
def write_work_file(file_path, rna_api):
rna_api = list(rna_api)
rna_api.sort(key=work_line_id)
file_out = open(file_path, "w")
file_out.write("rna_api = [\n")
for line in rna_api:
file_out.write(" %s,\n" % (repr(line)))
file_out.write("]\n")
file_out.close()
file_path = sys.argv[-2][:-3] + "_merged.py"
write_work_file(file_path, rna_api_new)
file_path = sys.argv[-2][:-3] + "_merged.py"
write_work_file(file_path, rna_api_new)
if mod_from_dict:
file_path = sys.argv[-2][:-3] + "_lost.py"
write_work_file(file_path, list(mod_from_dict.values()))
print("Warning '%s' contains lost %d items from module %s.py" % (file_path, len(mod_from_dict), mod_from.__name__))
if mod_from_dict:
file_path = sys.argv[-2][:-3] + "_lost.py"
write_work_file(file_path, list(mod_from_dict.values()))
print("Warning '%s' contains lost %d items from module %s.py" % (file_path, len(mod_from_dict), mod_from.__name__))
if __name__ == "__main__":
main()
main()

View File

@@ -129,6 +129,7 @@ def seek(r, txt, recurs):
newtxt = txt + '[' + str(i) + ']'
seek(r[i], newtxt, recurs + 1)
seek(bpy.data, 'bpy.data', 0)
# seek(bpy.types, 'bpy.types', 0)
'''
@@ -140,8 +141,8 @@ for d in dir(bpy.types):
seek(r, 'bpy.types.' + d + '.bl_rna', 0)
'''
#print dir(bpy)
# print dir(bpy)
#import sys
#sys.exit()
# sys.exit()
print("iter over ", seek_count, "rna items")

View File

@@ -50,8 +50,10 @@ for d in defs.split('\n'):
if not w:
continue
try: w.remove("#define")
except: pass
try:
w.remove("#define")
except:
pass
# print w

View File

@@ -142,5 +142,6 @@ def main():
else:
print("\nnone found!")
if __name__ == '__main__':
main()

View File

@@ -33,7 +33,6 @@ from modules.test_utils import (with_tempdir,
)
class AbcPropError(Exception):
"""Raised when AbstractAlembicTest.abcprop() finds an error."""

View File

@@ -109,7 +109,6 @@ class SimpleImportTest(AbstractAlembicTest):
self.assertAlmostEqual(0, y)
self.assertAlmostEqual(2, z)
def test_select_after_import(self):
# Add a sphere, so that there is something in the scene, selected, and active,
# before we do the Alembic import.

View File

@@ -33,7 +33,7 @@ import imp
BLACKLIST_DIRS = (
os.path.join(bpy.utils.resource_path('USER'), "scripts"),
) + tuple(addon_utils.paths()[1:])
) + tuple(addon_utils.paths()[1:])
BLACKLIST_ADDONS = set()
@@ -54,9 +54,10 @@ def _init_addon_blacklist():
def addon_modules_sorted():
modules = addon_utils.modules({})
modules[:] = [
mod for mod in modules
if not (mod.__file__.startswith(BLACKLIST_DIRS))
if not (mod.__name__ in BLACKLIST_ADDONS)]
mod for mod in modules
if not (mod.__file__.startswith(BLACKLIST_DIRS))
if not (mod.__name__ in BLACKLIST_ADDONS)
]
modules.sort(key=lambda mod: mod.__name__)
return modules

View File

@@ -39,20 +39,20 @@ BLACKLIST = {
# The unpacked wheel is only loaded when actually used, not directly on import:
os.path.join("io_blend_utils", "blender_bam-unpacked.whl"),
}
}
# Some modules need to add to the `sys.path`.
MODULE_SYS_PATHS = {
# Runs in a Python subprocess, so its expected its basedir can be imported.
"io_blend_utils.blendfile_pack": ".",
}
}
if not bpy.app.build_options.freestyle:
BLACKLIST.add("render_freestyle_svg")
BLACKLIST_DIRS = (
os.path.join(bpy.utils.resource_path('USER'), "scripts"),
) + tuple(addon_utils.paths()[1:])
) + tuple(addon_utils.paths()[1:])
def module_names_recursive(mod_dir, *, parent=None):
@@ -168,7 +168,7 @@ def load_modules():
os.sep + "presets" + os.sep,
os.sep + "templates" + os.sep,
] + ([(os.sep + f + os.sep) for f in BLACKLIST] +
[(os.sep + f + ".py") for f in BLACKLIST])
[(os.sep + f + ".py") for f in BLACKLIST])
#
# now submodules
@@ -185,7 +185,7 @@ def load_modules():
sys.path.extend([
os.path.normpath(os.path.join(mod_dir, f))
for f in MODULE_SYS_PATHS.get(mod_name_full, ())
])
])
try:
__import__(mod_name_full)
@@ -248,6 +248,7 @@ def main():
load_addons()
load_modules()
if __name__ == "__main__":
# So a python error exits(1)
try:

View File

@@ -61,11 +61,11 @@ def render_gl(context, filepath, shade):
ctx_shading_type(context, shade)
#~ # stop to inspect!
#~ if filepath == "test_cube_shell_solidify_subsurf_wp_wire":
#~ assert(0)
#~ else:
#~ return
# stop to inspect!
# if filepath == "test_cube_shell_solidify_subsurf_wp_wire":
# assert(0)
# else:
# return
bpy.ops.render.opengl(write_still=True,
view_context=True)
@@ -219,6 +219,7 @@ def mesh_bmesh_poly_elems(poly, elems):
vert_total = poly.loop_total
return elems[vert_start:vert_start + vert_total]
def mesh_bmesh_poly_vertices(poly):
return [loop.vertex_index
for loop in mesh_bmesh_poly_elems(poly, poly.id_data.loops)]
@@ -505,7 +506,7 @@ cube_like_vertices = (
(-1, 1, 3),
(0, 1, 3),
(0, 0, 3),
)
)
cube_like_faces = (
@@ -547,7 +548,7 @@ cube_like_faces = (
(31, 30, 36, 33),
(32, 31, 33, 34),
(35, 34, 33, 36),
)
)
# useful since its a shell for solidify and it can be mirrored
@@ -564,7 +565,7 @@ cube_shell_vertices = (
(0, -1, 0),
(0, 0, -1),
(0, 1, -1),
)
)
cube_shell_face = (
@@ -577,7 +578,7 @@ cube_shell_face = (
(6, 5, 11),
(7, 4, 9, 8),
(10, 7, 6, 11),
)
)
def make_cube(scene):
@@ -678,59 +679,77 @@ def make_monkey_extra(scene):
global_tests = []
global_tests.append(("none",
(),
))
global_tests.append(
("none",
(),
)
)
# single
global_tests.append(("subsurf_single",
((modifier_subsurf_add, dict(levels=2)), ),
))
global_tests.append(
("subsurf_single",
((modifier_subsurf_add, dict(levels=2)), ),
)
)
global_tests.append(
("armature_single",
((modifier_armature_add, dict()), ),
)
)
global_tests.append(("armature_single",
((modifier_armature_add, dict()), ),
))
global_tests.append(
("mirror_single",
((modifier_mirror_add, dict()), ),
)
)
global_tests.append(
("hook_single",
((modifier_hook_add, dict()), ),
)
)
global_tests.append(("mirror_single",
((modifier_mirror_add, dict()), ),
))
global_tests.append(
("decimate_single",
((modifier_decimate_add, dict()), ),
)
)
global_tests.append(("hook_single",
((modifier_hook_add, dict()), ),
))
global_tests.append(
("build_single",
((modifier_build_add, dict()), ),
)
)
global_tests.append(("decimate_single",
((modifier_decimate_add, dict()), ),
))
global_tests.append(("build_single",
((modifier_build_add, dict()), ),
))
global_tests.append(("mask_single",
((modifier_mask_add, dict()), ),
))
global_tests.append(
("mask_single",
((modifier_mask_add, dict()), ),
)
)
# combinations
global_tests.append(("mirror_subsurf",
((modifier_mirror_add, dict()),
(modifier_subsurf_add, dict(levels=2))),
))
global_tests.append(
("mirror_subsurf",
((modifier_mirror_add, dict()),
(modifier_subsurf_add, dict(levels=2))),
)
)
global_tests.append(("solidify_subsurf",
((modifier_solidify_add, dict()),
(modifier_subsurf_add, dict(levels=2))),
))
global_tests.append(
("solidify_subsurf",
((modifier_solidify_add, dict()),
(modifier_subsurf_add, dict(levels=2))),
)
)
def apply_test(test, scene, obj,
render_func=None,
render_args=None,
render_kwargs=None,
):
def apply_test(
test, scene, obj,
render_func=None,
render_args=None,
render_kwargs=None,
):
test_name, test_funcs = test
@@ -756,10 +775,12 @@ def test_cube(context, test):
obj = make_cube_extra(scene)
ctx_camera_setup(context, location=(3, 3, 3))
apply_test(test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami()))
apply_test(
test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami())
)
def test_cube_like(context, test):
@@ -767,10 +788,12 @@ def test_cube_like(context, test):
obj = make_cube_like_extra(scene)
ctx_camera_setup(context, location=(5, 5, 5))
apply_test(test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami()))
apply_test(
test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami())
)
def test_cube_shell(context, test):
@@ -778,10 +801,12 @@ def test_cube_shell(context, test):
obj = make_cube_shell_extra(scene)
ctx_camera_setup(context, location=(4, 4, 4))
apply_test(test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami()))
apply_test(
test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami())
)
# -----------------------------------------------------------------------------

View File

@@ -45,20 +45,20 @@ MESHES = {
( # Edges
),
( # Loops
0, 1, 4, 3,
3, 4, 6,
1, 2, 5, 4,
3, 4, 6,
4, 7, 6,
4, 5, 9, 4, 8, 7,
0, 1, 4, 3,
3, 4, 6,
1, 2, 5, 4,
3, 4, 6,
4, 7, 6,
4, 5, 9, 4, 8, 7,
),
( # Polygons
(0, 4),
(4, 3),
(7, 4),
(11, 3),
(14, 3),
(16, 6),
(0, 4),
(4, 3),
(7, 4),
(11, 3),
(14, 3),
(16, 6),
),
),
),
@@ -66,17 +66,17 @@ MESHES = {
BUILTINS = (
"primitive_plane_add",
"primitive_cube_add",
"primitive_circle_add",
"primitive_uv_sphere_add",
"primitive_ico_sphere_add",
"primitive_cylinder_add",
"primitive_cone_add",
"primitive_grid_add",
"primitive_monkey_add",
"primitive_torus_add",
)
"primitive_plane_add",
"primitive_cube_add",
"primitive_circle_add",
"primitive_uv_sphere_add",
"primitive_ico_sphere_add",
"primitive_cylinder_add",
"primitive_cone_add",
"primitive_grid_add",
"primitive_monkey_add",
"primitive_torus_add",
)
BUILTINS_NBR = 4
BUILTINS_NBRCHANGES = 5

View File

@@ -5,25 +5,26 @@ import unittest
from bpy.utils import units
class UnitsTesting(unittest.TestCase):
# From user typing to 'internal' Blender value.
INPUT_TESTS = (
# system, type, ref, input, value
##### LENGTH
# LENGTH
('IMPERIAL', 'LENGTH', "", "1ft", 0.3048),
('IMPERIAL', 'LENGTH', "", "(1+1)ft", 0.3048 * 2),
('IMPERIAL', 'LENGTH', "", "1mi4\"", 1609.344 + 0.0254 * 4),
('METRIC', 'LENGTH', "", "0.005µm", 0.000001 * 0.005),
('METRIC', 'LENGTH', "", "1e6km", 1000.0 * 1e6),
('METRIC', 'LENGTH', "", "0.005µm", 0.000001 * 0.005),
('METRIC', 'LENGTH', "", "1e6km", 1000.0 * 1e6),
('IMPERIAL', 'LENGTH', "", "1ft5cm", 0.3048 + 0.01 * 5),
('METRIC', 'LENGTH', "", "1ft5cm", 0.3048 + 0.01 * 5),
('METRIC', 'LENGTH', "", "1ft5cm", 0.3048 + 0.01 * 5),
# Using reference string to find a unit when none is given.
('IMPERIAL', 'LENGTH', "33.3ft", "1", 0.3048),
('METRIC', 'LENGTH', "33.3dm", "1", 0.1),
('METRIC', 'LENGTH', "33.3dm", "1", 0.1),
('IMPERIAL', 'LENGTH', "33.3cm", "1", 0.3048), # ref unit is not in IMPERIAL system, default to feet...
('IMPERIAL', 'LENGTH', "33.3ft", "1\"", 0.0254), # unused ref unit, since one is given already!
('IMPERIAL', 'LENGTH', "", "1+1ft", 0.3048 * 2), # default unit taken from current string (feet).
('METRIC', 'LENGTH', "", "1+1ft", 1.3048), # no metric units, we default to meters.
('METRIC', 'LENGTH', "", "1+1ft", 1.3048), # no metric units, we default to meters.
('IMPERIAL', 'LENGTH', "", "3+1in+1ft", 0.3048 * 4 + 0.0254), # bigger unit becomes default one!
('IMPERIAL', 'LENGTH', "", "(3+1)in+1ft", 0.3048 + 0.0254 * 4),
)
@@ -31,18 +32,18 @@ class UnitsTesting(unittest.TestCase):
# From 'internal' Blender value to user-friendly printing
OUTPUT_TESTS = (
# system, type, prec, sep, compat, value, output
##### LENGTH
# LENGTH
# Note: precision handling is a bit complicated when using multi-units...
('IMPERIAL', 'LENGTH', 3, False, False, 0.3048, "1'"),
('IMPERIAL', 'LENGTH', 3, False, True, 0.3048, "1ft"),
('IMPERIAL', 'LENGTH', 4, True, False, 0.3048 * 2 + 0.0254 * 5.5, "2' 5.5\""),
('IMPERIAL', 'LENGTH', 3, False, True, 0.3048, "1ft"),
('IMPERIAL', 'LENGTH', 4, True, False, 0.3048 * 2 + 0.0254 * 5.5, "2' 5.5\""),
('IMPERIAL', 'LENGTH', 3, False, False, 1609.344 * 1e6, "1000000mi"),
('IMPERIAL', 'LENGTH', 6, False, False, 1609.344 * 1e6, "1000000mi"),
('METRIC', 'LENGTH', 3, True, False, 1000 * 2 + 0.001 * 15, "2km 2cm"),
('METRIC', 'LENGTH', 5, True, False, 1234.56789, "1km 234.6m"),
('METRIC', 'LENGTH', 6, True, False, 1234.56789, "1km 234.57m"),
('METRIC', 'LENGTH', 9, False, False, 1234.56789, "1.234568km"),
('METRIC', 'LENGTH', 9, True, False, 1000.000123456789, "1km 0.123mm"),
('METRIC', 'LENGTH', 3, True, False, 1000 * 2 + 0.001 * 15, "2km 2cm"),
('METRIC', 'LENGTH', 5, True, False, 1234.56789, "1km 234.6m"),
('METRIC', 'LENGTH', 6, True, False, 1234.56789, "1km 234.57m"),
('METRIC', 'LENGTH', 9, False, False, 1234.56789, "1.234568km"),
('METRIC', 'LENGTH', 9, True, False, 1000.000123456789, "1km 0.123mm"),
)
def test_units_inputs(self):
@@ -63,9 +64,13 @@ class UnitsTesting(unittest.TestCase):
def test_units_outputs(self):
for usys, utype, prec, sep, compat, val, output in self.OUTPUT_TESTS:
opt_str = units.to_string(usys, utype, val, prec, sep, compat)
self.assertEqual(opt_str, output,
msg="%s, %s: %f (precision: %d, separate units: %d, compat units: %d) => "
"\"%s\", expected \"%s\"" % (usys, utype, val, prec, sep, compat, opt_str, output))
self.assertEqual(
opt_str, output,
msg=(
"%s, %s: %f (precision: %d, separate units: %d, compat units: %d) => "
"\"%s\", expected \"%s\""
) % (usys, utype, val, prec, sep, compat, opt_str, output)
)
if __name__ == '__main__':

View File

@@ -198,6 +198,7 @@ class TestBufferProtocol(TestHelper, unittest.TestCase):
self.assertEqual(list(view1), list(view2))
self.assertEqual(view1.tobytes(), view2.tobytes())
if __name__ == '__main__':
import sys
sys.argv = [__file__] + (sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else [])

View File

@@ -61,8 +61,10 @@ class TestClass(bpy.types.PropertyGroup):
def get_scene(lib_name, sce_name):
for s in bpy.data.scenes:
if s.name == sce_name:
if (s.library and s.library.name == lib_name) or \
(lib_name == None and s.library == None):
if (
(s.library and s.library.name == lib_name) or
(lib_name is None and s.library is None)
):
return s
@@ -309,6 +311,7 @@ def test_restrictions2():
class TestUIList(UIList):
test = bpy.props.PointerProperty(type=bpy.types.Object)
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
layout.prop(item, "name", text="", emboss=False, icon_value=icon)

View File

@@ -21,22 +21,22 @@ vector_data = (
(-0.854645, 0.518036, 0.033936),
(0.42514, -0.437866, -0.792114),
(-0.358948, 0.597046, 0.717377),
(-0.985413,0.144714, 0.089294),
)
(-0.985413, 0.144714, 0.089294),
)
# get data at different scales
vector_data = sum(
(tuple(tuple(a * scale for a in v) for v in vector_data)
for scale in (s * sign for s in (0.0001, 0.1, 1.0, 10.0, 1000.0, 100000.0)
for sign in (1.0, -1.0))), ()) + ((0.0, 0.0, 0.0),)
for scale in (s * sign for s in (0.0001, 0.1, 1.0, 10.0, 1000.0, 100000.0)
for sign in (1.0, -1.0))), ()) + ((0.0, 0.0, 0.0),)
class MatrixTesting(unittest.TestCase):
def test_matrix_column_access(self):
#mat =
#[ 1 2 3 4 ]
#[ 1 2 3 4 ]
#[ 1 2 3 4 ]
# mat =
# [ 1 2 3 4 ]
# [ 1 2 3 4 ]
# [ 1 2 3 4 ]
mat = Matrix(((1, 11, 111),
(2, 22, 222),
(3, 33, 333),
@@ -81,11 +81,11 @@ class MatrixTesting(unittest.TestCase):
self.assertIn(item, indices)
def test_matrix_to_3x3(self):
#mat =
#[ 1 2 3 4 ]
#[ 2 4 6 8 ]
#[ 3 6 9 12 ]
#[ 4 8 12 16 ]
# mat =
# [ 1 2 3 4 ]
# [ 2 4 6 8 ]
# [ 3 6 9 12 ]
# [ 4 8 12 16 ]
mat = Matrix(tuple((i, 2 * i, 3 * i, 4 * i) for i in range(1, 5)))
mat_correct = Matrix(((1, 2, 3), (2, 4, 6), (3, 6, 9)))
self.assertEqual(mat.to_3x3(), mat_correct)
@@ -372,7 +372,6 @@ class KDTreeTesting(unittest.TestCase):
ret_filter = k_all.find(co, lambda i: (i % 2) == 0)
self.assertAlmostEqualVector(ret_regular, ret_filter)
# filter out all values (search odd tree for even values and the reverse)
co = (0,) * 3
ret_filter = k_odd.find(co, lambda i: (i % 2) == 0)

View File

@@ -8,7 +8,7 @@ DUMMY_NAME = "Untitled"
DUMMY_PATH = __file__
GLOBALS = {
"error_num": 0,
}
}
def as_float_32(f):
@@ -142,5 +142,6 @@ def main():
print("Error (total): %d" % GLOBALS["error_num"])
if __name__ == "__main__":
main()

View File

@@ -155,5 +155,6 @@ def main():
test_language_coverage()
test_urls()
if __name__ == "__main__":
main()

View File

@@ -36,7 +36,7 @@ RANDOM_MULTIPLY = 10
STATE = {
"counter": 0,
}
}
op_blacklist = (
@@ -91,7 +91,7 @@ op_blacklist = (
"wm.keymap_restore", # another annoying one
"wm.addon_*", # harmless, but dont change state
"console.*", # just annoying - but harmless
)
)
def blend_list(mainpath):
@@ -114,6 +114,7 @@ def blend_list(mainpath):
return list(sorted(file_list(mainpath, is_blend)))
if USE_FILES:
USE_FILES_LS = blend_list(USE_FILES)
# print(USE_FILES_LS)
@@ -176,7 +177,7 @@ if USE_ATTRSET:
CLS_BLACKLIST = (
bpy.types.BrushTextureSlot,
bpy.types.Brush,
)
)
property_typemap = build_property_typemap(CLS_BLACKLIST)
bpy_struct_type = bpy.types.Struct.__base__
@@ -228,7 +229,7 @@ if USE_ATTRSET:
{0: "", 1: "hello", 2: "test"}, {"": 0, "hello": 1, "test": 2},
set(), {"", "test", "."}, {None, ..., type},
range(10), (" " * i for i in range(10)),
)
)
def attrset_data():
for attr in dir(bpy.data):
@@ -480,8 +481,9 @@ def main():
print("Finished %r" % __file__)
if __name__ == "__main__":
#~ for i in range(200):
#~ RANDOM_SEED[0] += 1
# ~ for i in range(200):
# ~ RANDOM_SEED[0] += 1
#~ main()
main()

View File

@@ -31,6 +31,7 @@ import difflib
import pathlib
from pathlib import Path
def with_tempdir(wrapped):
"""Creates a temporary directory for the function, cleaning up after it returns normally.
@@ -56,8 +57,10 @@ def with_tempdir(wrapped):
return decorator
LINE = "+----------------------------------------------------------------"
class AbstractColladaTest(unittest.TestCase):
@classmethod
@@ -71,33 +74,33 @@ class AbstractColladaTest(unittest.TestCase):
ref = open(reference)
exp = open(export)
diff=difflib.unified_diff(ref.readlines(), exp.readlines(), lineterm='', n=0)
diff = difflib.unified_diff(ref.readlines(), exp.readlines(), lineterm='', n=0)
ref.close()
exp.close()
diff_count = 0;
diff_count = 0
for line in diff:
error = True
for prefix in ('---', '+++', '@@'):
# Ignore diff metadata
if line.startswith(prefix):
error=False
error = False
break
else:
# Ignore time stamps
for ignore in ('<created>', '<modified>', '<authoring_tool>'):
if line[1:].strip().startswith(ignore):
error=False
error = False
break
if error:
diff_count +=1
diff_count += 1
pline = line.strip()
if diff_count == 1:
print("\n%s" % LINE)
print("|Test has errors:")
print(LINE)
pre = "reference" if pline[0] == "-" else "generated"
print ("| %s:%s"% (pre, pline[1:]))
print("| %s:%s" % (pre, pline[1:]))
if diff_count > 0:
print(LINE)
@@ -107,14 +110,16 @@ class AbstractColladaTest(unittest.TestCase):
return diff_count == 0
class MeshExportTest4(AbstractColladaTest):
@with_tempdir
def test_export_animation_suzannes_sample_matrix(self, tempdir: pathlib.Path):
test = "suzannes_parent_inverse_sample_10_matrix"
reference_dae = self.testdir / Path("%s.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
bpy.ops.wm.collada_export(filepath="%s" % str(outfile),
bpy.ops.wm.collada_export(
filepath="%s" % str(outfile),
check_existing=True,
filemode=8,
display_type='DEFAULT',
@@ -142,20 +147,23 @@ class MeshExportTest4(AbstractColladaTest):
export_texture_type_selection='mat',
open_sim=False,
limit_precision=True,
keep_bind_info=False)
keep_bind_info=False,
)
# Now check the resulting Collada file.
if not self.checkdae(reference_dae, outfile):
self.fail()
class MeshExportTest3(AbstractColladaTest):
@with_tempdir
def test_export_animation_suzannes_sample_locrotscale(self, tempdir: pathlib.Path):
test = "suzannes_parent_inverse_sample_10_channels"
reference_dae = self.testdir / Path("%s.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
bpy.ops.wm.collada_export(filepath="%s" % str(outfile),
bpy.ops.wm.collada_export(
filepath="%s" % str(outfile),
check_existing=True,
filemode=8,
display_type='DEFAULT',
@@ -183,20 +191,23 @@ class MeshExportTest3(AbstractColladaTest):
export_texture_type_selection='mat',
open_sim=False,
limit_precision=True,
keep_bind_info=False)
keep_bind_info=False,
)
# Now check the resulting Collada file.
if not self.checkdae(reference_dae, outfile):
self.fail()
class MeshExportTest2(AbstractColladaTest):
@with_tempdir
def test_export_animation_suzannes_keyframe_matrix(self, tempdir: pathlib.Path):
test = "suzannes_parent_inverse_keyframes_matrix"
reference_dae = self.testdir / Path("%s.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
bpy.ops.wm.collada_export(filepath="%s" % str(outfile),
bpy.ops.wm.collada_export(
filepath="%s" % str(outfile),
check_existing=True,
filemode=8,
display_type='DEFAULT',
@@ -224,20 +235,23 @@ class MeshExportTest2(AbstractColladaTest):
export_texture_type_selection='mat',
open_sim=False,
limit_precision=True,
keep_bind_info=False)
keep_bind_info=False,
)
# Now check the resulting Collada file.
if not self.checkdae(reference_dae, outfile):
self.fail()
class MeshExportTest1(AbstractColladaTest):
@with_tempdir
def test_export_animation_suzannes_keyframe_locrotscale(self, tempdir: pathlib.Path):
test = "suzannes_parent_inverse_keyframes_channels"
reference_dae = self.testdir / Path("%s.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
bpy.ops.wm.collada_export(filepath="%s" % str(outfile),
bpy.ops.wm.collada_export(
filepath="%s" % str(outfile),
check_existing=True,
filemode=8,
display_type='DEFAULT',
@@ -265,7 +279,8 @@ class MeshExportTest1(AbstractColladaTest):
export_texture_type_selection='mat',
open_sim=False,
limit_precision=True,
keep_bind_info=False)
keep_bind_info=False,
)
# Now check the resulting Collada file.
if not self.checkdae(reference_dae, outfile):
@@ -277,4 +292,4 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--testdir', required=True)
args, remaining = parser.parse_known_args()
unittest.main(argv=sys.argv[0:1]+remaining)
unittest.main(argv=sys.argv[0:1] + remaining)

View File

@@ -31,6 +31,7 @@ import difflib
import pathlib
from pathlib import Path
def with_tempdir(wrapped):
"""Creates a temporary directory for the function, cleaning up after it returns normally.
@@ -56,8 +57,10 @@ def with_tempdir(wrapped):
return decorator
LINE = "+----------------------------------------------------------------"
class AbstractColladaTest(unittest.TestCase):
@classmethod
@@ -71,33 +74,33 @@ class AbstractColladaTest(unittest.TestCase):
ref = open(reference)
exp = open(export)
diff=difflib.unified_diff(ref.readlines(), exp.readlines(), lineterm='', n=0)
diff = difflib.unified_diff(ref.readlines(), exp.readlines(), lineterm='', n=0)
ref.close()
exp.close()
diff_count = 0;
diff_count = 0
for line in diff:
error = True
for prefix in ('---', '+++', '@@'):
# Ignore diff metadata
if line.startswith(prefix):
error=False
error = False
break
else:
# Ignore time stamps
for ignore in ('<created>', '<modified>', '<authoring_tool>'):
if line[1:].strip().startswith(ignore):
error=False
error = False
break
if error:
diff_count +=1
diff_count += 1
pline = line.strip()
if diff_count == 1:
print("\n%s" % LINE)
print("|Test has errors:")
print(LINE)
pre = "reference" if pline[0] == "-" else "generated"
print ("| %s:%s"% (pre, pline[1:]))
print("| %s:%s" % (pre, pline[1:]))
if diff_count > 0:
print(LINE)
@@ -107,14 +110,16 @@ class AbstractColladaTest(unittest.TestCase):
return diff_count == 0
class MeshExportTest(AbstractColladaTest):
@with_tempdir
def test_export_single_mesh(self, tempdir: pathlib.Path):
test = "mesh_simple_001"
reference_dae = self.testdir / Path("%s.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
outfile = tempdir / Path("%s_out.dae" % test)
bpy.ops.wm.collada_export(filepath="%s" % str(outfile),
bpy.ops.wm.collada_export(
filepath="%s" % str(outfile),
check_existing=True,
filemode=8,
display_type="DEFAULT",
@@ -140,15 +145,17 @@ class MeshExportTest(AbstractColladaTest):
export_texture_type_selection="mat",
open_sim=False,
limit_precision=False,
keep_bind_info=False)
keep_bind_info=False,
)
# Now check the resulting Collada file.
if not self.checkdae(reference_dae, outfile):
self.fail()
if __name__ == '__main__':
sys.argv = [__file__] + (sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else [])
parser = argparse.ArgumentParser()
parser.add_argument('--testdir', required=True)
args, remaining = parser.parse_known_args()
unittest.main(argv=sys.argv[0:1]+remaining)
unittest.main(argv=sys.argv[0:1] + remaining)

View File

@@ -23,6 +23,7 @@ class COLORS_DUMMY:
GREEN = ''
ENDC = ''
COLORS = COLORS_DUMMY
@@ -55,10 +56,12 @@ def blend_list(dirpath):
filepath = os.path.join(dirpath, filename)
yield filepath
def test_get_name(filepath):
filename = os.path.basename(filepath)
return os.path.splitext(filename)[0]
def test_get_images(output_dir, filepath, reference_dir):
testname = test_get_name(filepath)
dirpath = os.path.dirname(filepath)
@@ -99,7 +102,7 @@ class Report:
'passed_tests',
'compare_tests',
'compare_engines'
)
)
def __init__(self, title, output_dir, idiff):
self.title = title
@@ -158,7 +161,7 @@ class Report:
filepath = os.path.join(outdir, "compare.data")
pathlib.Path(filepath).write_text(self.compare_tests)
def _write_html(self, comparison = False):
def _write_html(self, comparison=False):
# Gather intermediate data for all tests.
if comparison:
failed_data = []
@@ -307,7 +310,6 @@ class Report:
self.compare_tests += test_html
def _diff_output(self, filepath, tmp_filepath):
old_img, ref_img, new_img, diff_img = test_get_images(self.output_dir, filepath, self.reference_dir)
@@ -330,7 +332,7 @@ class Report:
"-failpercent", "1",
ref_img,
tmp_filepath,
)
)
try:
subprocess.check_output(command)
failed = False
@@ -357,7 +359,7 @@ class Report:
"-abs", "-scale", "16",
ref_img,
tmp_filepath
)
)
try:
subprocess.check_output(command)
@@ -367,7 +369,6 @@ class Report:
return not failed
def _run_test(self, filepath, render_cb):
testname = test_get_name(filepath)
print_message(testname, 'SUCCESS', 'RUN')
@@ -394,7 +395,7 @@ class Report:
return error
elif error == "NO_START":
print_message('Can not perform tests because blender fails to start.',
'Make sure INSTALL target was run.')
'Make sure INSTALL target was run.')
return error
elif error == 'VERIFY':
print_message("Rendered result is different from reference image")
@@ -404,7 +405,6 @@ class Report:
'FAILURE', 'FAILED')
return error
def _run_all_tests(self, dirname, dirpath, render_cb):
passed_tests = []
failed_tests = []
@@ -437,8 +437,8 @@ class Report:
'SUCCESS', 'PASSED')
if failed_tests:
print_message("{} tests, listed below:" .
format(len(failed_tests)),
'FAILURE', 'FAILED')
format(len(failed_tests)),
'FAILURE', 'FAILED')
failed_tests.sort()
for test in failed_tests:
print_message("{}" . format(test), 'FAILURE', "FAILED")

View File

@@ -62,7 +62,6 @@ class AbstractBlenderRunnerTest(unittest.TestCase):
blender: pathlib.Path = None
testdir: pathlib.Path = None
def run_blender(self, filepath: str, python_script: str, timeout: int=300) -> str:
"""Runs Blender by opening a blendfile and executing a script.
@@ -92,7 +91,7 @@ class AbstractBlenderRunnerTest(unittest.TestCase):
'-E', 'CYCLES',
'--python-exit-code', '47',
'--python-expr', python_script,
]
]
)
proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,

View File

@@ -8,6 +8,7 @@ import shutil
import subprocess
import sys
def screenshot():
import bpy
@@ -19,6 +20,7 @@ def screenshot():
bpy.ops.wm.quit_blender()
# When run from inside Blender, take screenshot and exit.
try:
import bpy
@@ -93,5 +95,6 @@ def main():
sys.exit(not ok)
if __name__ == "__main__":
main()

View File

@@ -93,7 +93,7 @@ def check_files_flake8(files):
# times types are compared,
# I rather keep them specific
"E721",
)
)
for f, pep8_type in files:
@@ -129,10 +129,10 @@ def check_files_pylint(files):
"C0413," # import should be placed at the top
"W0613," # unused argument, may add this back
# but happens a lot for 'context' for eg.
"W0232," # class has no __init__, Operator/Panel/Menu etc
"W0232," # class has no __init__, Operator/Panel/Menu etc
"W0142," # Used * or ** magic
# even needed in some cases
"R0902," # Too many instance attributes
"R0902," # Too many instance attributes
"R0903," # Too many statements
"R0911," # Too many return statements
"R0912," # Too many branches
@@ -204,6 +204,5 @@ def main():
print("Skipping pylint checks (command not found)")
if __name__ == "__main__":
main()

View File

@@ -3,7 +3,7 @@
import unittest
import random
test= bpy.data.test
test = bpy.data.test
# farr - 1-dimensional array of float
# fdarr - dynamic 1-dimensional array of float
@@ -12,6 +12,7 @@ test= bpy.data.test
# same as above for other types except that the first letter is "i" for int and "b" for bool
class TestArray(unittest.TestCase):
# test that assignment works by: assign -> test value
# - rvalue = list of float
@@ -20,14 +21,14 @@ class TestArray(unittest.TestCase):
# bpy.data.test.farr[3], iarr[3], barr[...], fmarr, imarr, bmarr
def setUp(self):
test.farr= (1.0, 2.0, 3.0)
test.iarr= (7, 8, 9)
test.barr= (False, True, False)
test.farr = (1.0, 2.0, 3.0)
test.iarr = (7, 8, 9)
test.barr = (False, True, False)
# test access
# test slice access, negative indices
def test_access(self):
rvals= ([1.0, 2.0, 3.0], [7, 8, 9], [False, True, False])
rvals = ([1.0, 2.0, 3.0], [7, 8, 9], [False, True, False])
for arr, rval in zip((test.farr, test.iarr, test.barr), rvals):
self.assertEqual(prop_to_list(arr), rval)
self.assertEqual(arr[0:3], rval)
@@ -39,12 +40,12 @@ class TestArray(unittest.TestCase):
# fail when index out of bounds
def test_access_fail(self):
for arr in (test.farr, test.iarr, test.barr):
self.assertRaises(IndexError, lambda : arr[4])
self.assertRaises(IndexError, lambda: arr[4])
# test assignment of a whole array
def test_assign_array(self):
# should accept int as float
test.farr= (1, 2, 3)
test.farr = (1, 2, 3)
# fail when: unexpected no. of items, invalid item type
def test_assign_array_fail(self):
@@ -55,20 +56,20 @@ class TestArray(unittest.TestCase):
self.assertRaises(ValueError, assign_empty_list, arr)
def assign_invalid_float():
test.farr= (1.0, 2.0, "3.0")
test.farr = (1.0, 2.0, "3.0")
def assign_invalid_int():
test.iarr= ("1", 2, 3)
test.iarr = ("1", 2, 3)
def assign_invalid_bool():
test.barr= (True, 0.123, False)
test.barr = (True, 0.123, False)
for func in [assign_invalid_float, assign_invalid_int, assign_invalid_bool]:
self.assertRaises(TypeError, func)
# shouldn't accept float as int
def assign_float_as_int():
test.iarr= (1, 2, 3.0)
test.iarr = (1, 2, 3.0)
self.assertRaises(TypeError, assign_float_as_int)
# non-dynamic arrays cannot change size
@@ -81,14 +82,14 @@ class TestArray(unittest.TestCase):
def test_assign_item(self):
for arr, rand_func in zip((test.farr, test.iarr, test.barr), (rand_float, rand_int, rand_bool)):
for i in range(len(arr)):
val= rand_func()
val = rand_func()
arr[i] = val
self.assertEqual(arr[i], val)
# float prop should accept also int
for i in range(len(test.farr)):
val= rand_int()
val = rand_int()
test.farr[i] = val
self.assertEqual(test.farr[i], float(val))
@@ -112,7 +113,7 @@ class TestArray(unittest.TestCase):
# test various lengths here
for arr, rand_func in zip(("fdarr", "idarr", "bdarr"), (rand_float, rand_int, rand_bool)):
for length in range(1, 64):
rval= make_random_array(length, rand_func)
rval = make_random_array(length, rand_func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
@@ -136,7 +137,7 @@ class TestMArray(unittest.TestCase):
def test_assign_array(self):
for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
# assignment of [3][4][5]
rval= make_random_3d_array((3, 4, 5), func)
rval = make_random_3d_array((3, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
@@ -144,7 +145,7 @@ class TestMArray(unittest.TestCase):
def test_assign_array_fail(self):
def assign_empty_array():
test.fmarr= ()
test.fmarr = ()
self.assertRaises(ValueError, assign_empty_array)
def assign_invalid_size(arr, rval):
@@ -152,19 +153,19 @@ class TestMArray(unittest.TestCase):
# assignment of 3,4,4 or 3,3,5 should raise ex
for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
rval= make_random_3d_array((3, 4, 4), func)
rval = make_random_3d_array((3, 4, 4), func)
self.assertRaises(ValueError, assign_invalid_size, arr, rval)
rval= make_random_3d_array((3, 3, 5), func)
rval = make_random_3d_array((3, 3, 5), func)
self.assertRaises(ValueError, assign_invalid_size, arr, rval)
rval= make_random_3d_array((3, 3, 3), func)
rval = make_random_3d_array((3, 3, 3), func)
self.assertRaises(ValueError, assign_invalid_size, arr, rval)
def test_assign_item(self):
# arr[i] = x
for arr, func in zip(("fmarr", "imarr", "bmarr", "fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool) * 2):
rval= make_random_2d_array((4, 5), func)
rval = make_random_2d_array((4, 5), func)
for i in range(3):
getattr(test, arr)[i] = rval
@@ -173,23 +174,22 @@ class TestMArray(unittest.TestCase):
# arr[i][j] = x
for arr, func in zip(("fmarr", "imarr", "bmarr", "fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool) * 2):
arr= getattr(test, arr)
rval= make_random_array(5, func)
arr = getattr(test, arr)
rval = make_random_array(5, func)
for i in range(3):
for j in range(4):
arr[i][j] = rval
self.assertEqual(prop_to_list(arr[i][j]), rval)
def test_assign_item_fail(self):
def assign_wrong_size(arr, i, rval):
getattr(test, arr)[i] = rval
# assign wrong size at level 2
for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
rval1= make_random_2d_array((3, 5), func)
rval2= make_random_2d_array((4, 3), func)
rval1 = make_random_2d_array((3, 5), func)
rval2 = make_random_2d_array((4, 3), func)
for i in range(3):
self.assertRaises(ValueError, assign_wrong_size, arr, i, rval1)
@@ -198,22 +198,22 @@ class TestMArray(unittest.TestCase):
def test_dynamic_assign_array(self):
for arr, func in zip(("fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool)):
# assignment of [3][4][5]
rval= make_random_3d_array((3, 4, 5), func)
rval = make_random_3d_array((3, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
# [2][4][5]
rval= make_random_3d_array((2, 4, 5), func)
rval = make_random_3d_array((2, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
# [1][4][5]
rval= make_random_3d_array((1, 4, 5), func)
rval = make_random_3d_array((1, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
# test access
def test_access(self):
pass
@@ -221,26 +221,32 @@ class TestMArray(unittest.TestCase):
def test_access_fail(self):
pass
random.seed()
def rand_int():
return random.randint(-1000, 1000)
def rand_float():
return float(rand_int())
def rand_bool():
return bool(random.randint(0, 1))
def make_random_array(len, rand_func):
arr= []
arr = []
for i in range(len):
arr.append(rand_func())
return arr
def make_random_2d_array(dimsize, rand_func):
marr= []
marr = []
for i in range(dimsize[0]):
marr.append([])
@@ -249,8 +255,9 @@ def make_random_2d_array(dimsize, rand_func):
return marr
def make_random_3d_array(dimsize, rand_func):
marr= []
marr = []
for i in range(dimsize[0]):
marr.append([])
@@ -262,8 +269,9 @@ def make_random_3d_array(dimsize, rand_func):
return marr
def prop_to_list(prop):
ret= []
ret = []
for x in prop:
if type(x) not in {bool, int, float}:
@@ -273,8 +281,10 @@ def prop_to_list(prop):
return ret
def suite():
return unittest.TestSuite([unittest.TestLoader().loadTestsFromTestCase(TestArray), unittest.TestLoader().loadTestsFromTestCase(TestMArray)])
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())