correct spelling errors in comments

This commit is contained in:
2011-10-17 06:58:07 +00:00
parent 61389bba41
commit fda2045150
39 changed files with 181 additions and 182 deletions

View File

@@ -209,7 +209,7 @@ def enable(module_name, default_set=True):
:arg module_name: The name of the addon and module.
:type module_name: string
:return: the loaded module or None on failier.
:return: the loaded module or None on failure.
:rtype: module
"""
@@ -262,7 +262,7 @@ def enable(module_name, default_set=True):
# * OK loaded successfully! *
if default_set:
# just incase its enabled alredy
# just in case its enabled already
ext = _bpy.context.user_preferences.addons.get(module_name)
if not ext:
ext = _bpy.context.user_preferences.addons.new()
@@ -286,7 +286,7 @@ def disable(module_name, default_set=True):
import sys
mod = sys.modules.get(module_name)
# possible this addon is from a previous session and didnt load a
# possible this addon is from a previous session and didn't load a
# module this time. So even if the module is not found, still disable
# the addon in the user prefs.
if mod:

View File

@@ -73,7 +73,7 @@ class DataPathBuilder(object):
try:
print("base." + item_new)
base_new = eval("base." + item_new)
break # found, dont keep looking
break # found, don't keep looking
except:
pass

View File

@@ -69,7 +69,7 @@ def read_blend_rend_chunk(path):
struct.unpack('>i' if is_big_endian else '<i', blendfile.read(4))[0]
sizeof_bhead_left -= 4
# We dont care about the rest of the bhead struct
# We don't care about the rest of the bhead struct
blendfile.read(sizeof_bhead_left)
# Now we want the scene name, start and end frame. this is 32bites long

View File

@@ -56,7 +56,7 @@ def main():
# from bpy.types import Panel
sys.modules["bpy.types"] = types
# if "-d" in sys.argv: # Enable this to measure startup speed
#~ if "-d" in sys.argv: # Enable this to measure start up speed
if 0:
import cProfile
cProfile.run('import bpy; bpy.utils.load_scripts()', 'blender.prof')

View File

@@ -138,7 +138,7 @@ class BPyOpsSubModOp(object):
@staticmethod
def _scene_update(context):
scene = context.scene
if scene: # None in backgroud mode
if scene: # None in background mode
scene.update()
else:
import bpy

View File

@@ -142,9 +142,9 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
if reload_scripts:
_bpy_types.TypeMap.clear()
# just unload, dont change user defaults, this means we can sync
# just unload, don't change user defaults, this means we can sync
# to reload. note that they will only actually reload of the
# modification time changes. This `wont` work for packages so...
# modification time changes. This `won't` work for packages so...
# its not perfect.
for module_name in [ext.module for ext in prefs.addons]:
_addon_utils.disable(module_name, default_set=False)
@@ -220,7 +220,7 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
if _os.path.isdir(path):
_sys_path_ensure(path)
# only add this to sys.modules, dont run
# only add this to sys.modules, don't run
if path_subdir == "modules":
continue
@@ -374,7 +374,7 @@ def smpte_from_seconds(time, fps=None):
if time >= 3600.0: # hours
hours = int(time / 3600.0)
time = time % 3600.0
if time >= 60.0: # mins
if time >= 60.0: # minutes
minutes = int(time / 60.0)
time = time % 60.0

View File

@@ -43,10 +43,10 @@ def load_image(imagepath,
the end will be ignored.
:type dirname: string
:arg place_holder: if True a new place holder image will be created.
this is usefull so later you can relink the image to its original data.
this is useful so later you can relink the image to its original data.
:type place_holder: bool
:arg recursive: If True, directories will be recursivly searched.
Be carefull with this if you have files in your root directory because
:arg recursive: If True, directories will be recursively searched.
Be careful with this if you have files in your root directory because
it may take a long time.
:type recursive: bool
:arg ncase_cmp: on non windows systems, find the correct case for the file.

View File

@@ -114,7 +114,7 @@ class ImportHelper:
# Axis conversion function, not pretty LUT
# use lookup tabes to convert between any axis
# use lookup table to convert between any axis
_axis_convert_matrix = (
((-1.0, 0.0, 0.0), (0.0, -1.0, 0.0), (0.0, 0.0, 1.0)),
((-1.0, 0.0, 0.0), (0.0, 0.0, -1.0), (0.0, -1.0, 0.0)),
@@ -302,7 +302,7 @@ def free_derived_objects(ob):
def unpack_list(list_of_tuples):
flat_list = []
flat_list_extend = flat_list.extend # a tich faster
flat_list_extend = flat_list.extend # a tiny bit faster
for t in list_of_tuples:
flat_list_extend(t)
return flat_list
@@ -318,7 +318,7 @@ def unpack_face_list(list_of_tuples):
if len(t) == 3:
if t[2] == 0:
t = t[1], t[2], t[0]
else: # assuem quad
else: # assume quad
if t[3] == 0 or t[2] == 0:
t = t[2], t[3], t[0], t[1]
@@ -371,7 +371,7 @@ def path_reference(filepath,
:arg copy_subdir: the subdirectory of *base_dst* to use when mode='COPY'.
:type copy_subdir: string
:arg copy_set: collect from/to pairs when mode='COPY',
pass to *path_reference_copy* when exportign is done.
pass to *path_reference_copy* when exporting is done.
:type copy_set: set
:arg library: The library this path is relative to.
:type library: :class:`bpy.types.Library` or None
@@ -450,7 +450,7 @@ def unique_name(key, name, name_dict, name_max=-1, clean_func=None, sep="."):
:arg key: unique item this name belongs to, name_dict[key] will be reused
when available.
This can be the object, mesh, material, etc instance its self.
:type key: any hashable object assosiated with the *name*.
:type key: any hashable object associated with the *name*.
:arg name: The name used to create a unique value in *name_dict*.
:type name: string
:arg name_dict: This is used to cache namespace to ensure no collisions

View File

@@ -21,7 +21,7 @@
KM_HIERARCHY = [
('Window', 'EMPTY', 'WINDOW', []), # file save, window change, exit
('Screen', 'EMPTY', 'WINDOW', [ # full screen, undo, screenshot
('Screen Editing', 'EMPTY', 'WINDOW', []), # resizing, action corners
('Screen Editing', 'EMPTY', 'WINDOW', []), # re-sizing, action corners
]),
('View2D', 'EMPTY', 'WINDOW', []), # view 2d navigation (per region)

View File

@@ -50,7 +50,7 @@ def mesh_linked_faces(mesh):
face_groups = [[f] for f in mesh.faces]
face_mapping = list(range(len(mesh.faces))) # map old, new face location
# Now clump faces iterativly
# Now clump faces iteratively
ok = True
while ok:
ok = False

View File

@@ -119,7 +119,7 @@ def object_data_add(context, obdata, operator=None):
obj_act = scene.objects.active
# XXX
# caused because entering editmodedoes not add a empty undo slot!
# caused because entering edit-mode does not add a empty undo slot!
if context.user_preferences.edit.use_enter_edit_mode:
if not (obj_act and
obj_act.mode == 'EDIT' and

View File

@@ -27,7 +27,7 @@ __all__ = (
def region_2d_to_vector_3d(region, rv3d, coord):
"""
Return a direction vector from the viewport at the spesific 2d region
Return a direction vector from the viewport at the specific 2d region
coordinate.
:arg region: region of the 3D viewport, typically bpy.context.region.

View File

@@ -220,9 +220,9 @@ class _GenericBone:
@property
def children_recursive_basename(self):
"""
Returns a chain of children with the same base name as this bone
Only direct chains are supported, forks caused by multiple children with matching basenames will
terminate the function and not be returned.
Returns a chain of children with the same base name as this bone.
Only direct chains are supported, forks caused by multiple children
with matching base names will terminate the function and not be returned.
"""
basename = self.basename
chain = []
@@ -284,11 +284,11 @@ class EditBone(StructRNA, _GenericBone, metaclass=StructMetaPropGroup):
def transform(self, matrix, scale=True, roll=True):
"""
Transform the the bones head, tail, roll and envalope (when the matrix has a scale component).
Transform the the bones head, tail, roll and envelope (when the matrix has a scale component).
:arg matrix: 3x3 or 4x4 transformation matrix.
:type matrix: :class:`mathutils.Matrix`
:arg scale: Scale the bone envalope by the matrix.
:arg scale: Scale the bone envelope by the matrix.
:type scale: bool
:arg roll: Correct the roll to point in the same relative direction to the head and tail.
:type roll: bool
@@ -318,7 +318,7 @@ class Mesh(bpy_types.ID):
def from_pydata(self, vertices, edges, faces):
"""
Make a mesh from a list of verts/edges/faces
Make a mesh from a list of vertices/edges/faces
Until we have a nicer way to make geometry, use this.
:arg vertices: float triplets each representing (X, Y, Z) eg: [(0.0, 1.0, 0.5), ...].
@@ -553,7 +553,7 @@ class _GenericUI:
operator_context_default = self.layout.operator_context
for func in draw_ls._draw_funcs:
# so bad menu functions dont stop the entire menu from drawing.
# so bad menu functions don't stop the entire menu from drawing
try:
func(self, context)
except:

View File

@@ -65,7 +65,7 @@ def get_console(console_id):
consoles = get_console.consoles = {}
get_console.consoles_namespace_hash = hash_next
else:
# check if clearning the namespace is needed to avoid a memory leak.
# check if clearing the namespace is needed to avoid a memory leak.
# the window manager is normally loaded with new blend files
# so this is a reasonable way to deal with namespace clearing.
# bpy.data hashing is reset by undo so cant be used.
@@ -135,7 +135,7 @@ def execute(context):
sys.stdout = stdout
sys.stderr = stderr
# dont allow the stdin to be used, can lock blender.
# don't allow the stdin to be used, can lock blender.
stdin_backup = sys.stdin
sys.stdin = None
@@ -144,7 +144,7 @@ def execute(context):
sys.modules["__main__"] = console._bpy_main_mod
# in case exception happens
line = "" # incase of encodingf error
line = "" # in case of encoding error
is_multiline = False
try:
@@ -222,8 +222,8 @@ def autocomplete(context):
if not console:
return {'CANCELLED'}
# dont allow the stdin to be used, can lock blender.
# note: unlikely stdin would be used for autocomp. but its possible.
# don't allow the stdin to be used, can lock blender.
# note: unlikely stdin would be used for autocomplete. but its possible.
stdin_backup = sys.stdin
sys.stdin = None
@@ -238,8 +238,8 @@ def autocomplete(context):
current_line = sc.history[-1]
line = current_line.body
# This function isnt aware of the text editor or being an operator
# just does the autocomp then copy its results back
# This function isn't aware of the text editor or being an operator
# just does the autocomplete then copy its results back
result = intellisense.expand(
line=line,
cursor=current_line.current_character,
@@ -250,7 +250,7 @@ def autocomplete(context):
current_line.body, current_line.current_character, scrollback = result
del result
# update sel. setting body should really do this!
# update selection. setting body should really do this!
ofs = len(line_new) - len(line)
sc.select_start += ofs
sc.select_end += ofs
@@ -263,12 +263,12 @@ def autocomplete(context):
if _BPY_MAIN_OWN:
sys.modules["__main__"] = main_mod_back
# Separate automplete output by command prompts
# Separate autocomplete output by command prompts
if scrollback != '':
bpy.ops.console.scrollback_append(text=sc.prompt + current_line.body, type='INPUT')
# Now we need to copy back the line from blender back into the
# text editor. This will change when we dont use the text editor
# text editor. This will change when we don't use the text editor
# anymore
if scrollback:
add_scrollback(scrollback, 'INFO')

View File

@@ -64,7 +64,7 @@ def execute(context):
def autocomplete(context):
# sc = context.space_data
#~ sc = context.space_data
# TODO
return {'CANCELLED'}

View File

@@ -292,7 +292,7 @@ class InfoPropertyRNA:
elif as_arg:
if not self.is_required:
type_info.append("optional")
else: # readonly is only useful for selfs, not args
else: # readonly is only useful for self's, not args
if self.is_readonly:
type_info.append("readonly")
@@ -519,7 +519,7 @@ def BuildRNAInfo():
# Done ordering structs
# precalc vars to avoid a lot of looping
# precalculate vars to avoid a lot of looping
for (rna_base, identifier, rna_struct) in structs:
# rna_struct_path = full_rna_struct_path(rna_struct)
@@ -634,7 +634,7 @@ if __name__ == "__main__":
struct = rna_info.BuildRNAInfo()[0]
data = []
for struct_id, v in sorted(struct.items()):
struct_id_str = v.identifier # "".join(sid for sid in struct_id if struct_id)
struct_id_str = v.identifier #~ "".join(sid for sid in struct_id if struct_id)
for base in v.get_bases():
struct_id_str = base.identifier + "|" + struct_id_str

View File

@@ -87,7 +87,7 @@ class ANIM_OT_keying_set_export(Operator):
# --------------------------------------------------------
# generate and write set of lookups for id's used in paths
# cache for syncing ID-blocks to bpy paths + shorthands
# cache for syncing ID-blocks to bpy paths + shorthand's
id_to_paths_cache = {}
for ksp in ks.paths:

View File

@@ -62,7 +62,7 @@ class ConsoleAutocomplete(Operator):
class ConsoleBanner(Operator):
'''Print a message whem the terminal initializes'''
'''Print a message when the terminal initializes'''
bl_idname = "console.banner"
bl_label = "Console Banner"
@@ -97,7 +97,7 @@ class ConsoleLanguage(Operator):
def execute(self, context):
sc = context.space_data
# defailt to python
# default to python
sc.language = self.language
bpy.ops.console.banner()

View File

@@ -174,7 +174,7 @@ class SelectHierarchy(Operator):
select_new.sort(key=lambda obj_iter: obj_iter.name)
act_new = select_new[0]
# dont edit any object settings above this
# don't edit any object settings above this
if select_new:
if not self.extend:
bpy.ops.object.select_all(action='DESELECT')
@@ -331,7 +331,7 @@ class ShapeTransfer(Operator):
orig_shape_coords = me_cos(ob_act.active_shape_key.data)
orig_normals = me_nos(me.vertices)
# the actual mverts location isnt as relyable as the base shape :S
# the actual mverts location isn't as reliable as the base shape :S
# orig_coords = me_cos(me.vertices)
orig_coords = me_cos(me.shape_keys.key_blocks[0].data)
@@ -721,8 +721,8 @@ class TransformsToDeltasAnim(Operator):
fcu.data_path = "delta_rotation_quaternion"
obj.rotation_quaternion.identity()
# XXX: currently not implemented
# elif fcu.data_path == "rotation_axis_angle":
# fcu.data_path = "delta_rotation_axis_angle"
#~ elif fcu.data_path == "rotation_axis_angle":
#~ fcu.data_path = "delta_rotation_axis_angle"
elif fcu.data_path == "scale":
fcu.data_path = "delta_scale"
obj.scale = 1.0, 1.0, 1.0

View File

@@ -104,13 +104,13 @@ def extend(obj, operator, EXTEND_MODE):
uvs_vhash_target[edgepair_inner_target[1]][:] = uvs_vhash_source[edgepair_inner_source[iB]]
# Set the 2 UV's on the target face that are not touching
# for this we need to do basic expaning on the source faces UV's
# for this we need to do basic expanding on the source faces UV's
if EXTEND_MODE == 'LENGTH':
try: # divide by zero is possible
'''
measure the length of each face from the middle of each edge to the opposite
allong the axis we are copying, use this
along the axis we are copying, use this
'''
i1a = edgepair_outer_target[iB]
i2a = edgepair_inner_target[iA]
@@ -158,11 +158,11 @@ def extend(obj, operator, EXTEND_MODE):
# Modes
# 0 unsearched
# 1:mapped, use search from this face. - removed!!
# 2:all siblings have been searched. dont search again.
# 2:all siblings have been searched. don't search again.
face_modes = [0] * len(face_sel)
face_modes[face_act_local_index] = 1 # extend UV's from this face.
# Edge connectivty
# Edge connectivity
edge_faces = {}
for i, f in enumerate(face_sel):
for edkey in f.edge_keys:
@@ -181,7 +181,7 @@ def extend(obj, operator, EXTEND_MODE):
looplen[0] += (me_verts[ed[0]].co - me_verts[ed[1]].co).length
looplen[0] = looplen[0] / len(loop)
# remove seams, so we dont map accross seams.
# remove seams, so we don't map across seams.
for ed in me.edges:
if ed.use_seam:
# remove the edge pair if we can
@@ -213,7 +213,7 @@ def extend(obj, operator, EXTEND_MODE):
face_modes[i] = 1 # we can map from this one now.
ok = True # keep searching
face_modes[i] = 2 # dont search again
face_modes[i] = 2 # don't search again
if is_editmode:
bpy.ops.object.mode_set(mode='EDIT')

View File

@@ -36,7 +36,7 @@ class prettyface(object):
def __init__(self, data):
self.has_parent = False
self.rot = False # only used for triables
self.rot = False # only used for triangles
self.xoff = 0
self.yoff = 0
@@ -157,7 +157,7 @@ class prettyface(object):
angles_co.sort()
I = [i for a, i in angles_co]
# fuv = f.uv
#~ fuv = f.uv
fuv = f.id_data.uv_textures.active.data[f.index].uv # XXX25
if self.rot:
@@ -200,8 +200,8 @@ def lightmap_uvpack(meshes,
'''
BOX_DIV if the maximum division of the UV map that
a box may be consolidated into.
Basicly, a lower value will be slower but waist less space
and a higher value will have more clumpy boxes but more waisted space
Basically, a lower value will be slower but waist less space
and a higher value will have more clumpy boxes but more wasted space
'''
import time
from math import sqrt
@@ -321,7 +321,7 @@ def lightmap_uvpack(meshes,
lengths.append(curr_len)
curr_len = curr_len / 2.0
# Dont allow boxes smaller then the margin
# Don't allow boxes smaller then the margin
# since we contract on the margin, boxes that are smaller will create errors
# print(curr_len, side_len/MARGIN_DIV)
if curr_len / 4.0 < side_len / PREF_MARGIN_DIV:
@@ -371,9 +371,9 @@ def lightmap_uvpack(meshes,
print("...done")
# Since the boxes are sized in powers of 2, we can neatly group them into bigger squares
# this is done hierarchily, so that we may avoid running the pack function
# this is done hierarchically, so that we may avoid running the pack function
# on many thousands of boxes, (under 1k is best) because it would get slow.
# Using an off and even dict us usefull because they are packed differently
# Using an off and even dict us useful because they are packed differently
# where w/h are the same, their packed in groups of 4
# where they are different they are packed in pairs
#
@@ -393,14 +393,14 @@ def lightmap_uvpack(meshes,
# Count the number of boxes consolidated, only used for stats.
c = 0
# This is tricky. the total area of all packed boxes, then squt that to get an estimated size
# This is tricky. the total area of all packed boxes, then sqrt() that to get an estimated size
# this is used then converted into out INT space so we can compare it with
# the ints assigned to the boxes size
# and divided by BOX_DIV, basicly if BOX_DIV is 8
# ...then the maximum box consolidataion (recursive grouping) will have a max width & height
# and divided by BOX_DIV, basically if BOX_DIV is 8
# ...then the maximum box consolidation (recursive grouping) will have a max width & height
# ...1/8th of the UV size.
# ...limiting this is needed or you end up with bug unused texture spaces
# ...however if its too high, boxpacking is way too slow for high poly meshes.
# ...however if its too high, box-packing is way too slow for high poly meshes.
float_to_int_factor = lengths_to_ints[0][0]
if float_to_int_factor > 0:
max_int_dimension = int(((side_len / float_to_int_factor)) / PREF_BOX_DIV)
@@ -456,7 +456,7 @@ def lightmap_uvpack(meshes,
pretty_faces = [pf for pf in pretty_faces if not pf.has_parent]
# spin every second prettyface
# spin every second pretty-face
# if there all vertical you get less efficiently used texture space
i = len(pretty_faces)
d = 0

View File

@@ -34,7 +34,7 @@ USER_FILL_HOLES_QUALITY = None
def pointInTri2D(v, v1, v2, v3):
key = v1.x, v1.y, v2.x, v2.y, v3.x, v3.y
# Commented because its slower to do the bounds check, we should realy cache the bounds info for each face.
# Commented because its slower to do the bounds check, we should really cache the bounds info for each face.
'''
# BOUNDS CHECK
xmin= 1000000
@@ -119,7 +119,7 @@ def boundsEdgeLoop(edges):
"""
# Turns the islands into a list of unpordered edges (Non internal)
# Onlt for UV's
# Only for UV's
# only returns outline edges for intersection tests. and unique points.
def island2Edge(island):
@@ -141,7 +141,7 @@ def island2Edge(island):
else:
i1= vIdx; i2= vIdx-1
try: edges[ f_uvkey[i1], f_uvkey[i2] ] *= 0 # sets eny edge with more then 1 user to 0 are not returned.
try: edges[ f_uvkey[i1], f_uvkey[i2] ] *= 0 # sets any edge with more then 1 user to 0 are not returned.
except: edges[ f_uvkey[i1], f_uvkey[i2] ] = (f.uv[i1] - f.uv[i2]).length,
# If 2 are the same then they will be together, but full [a,b] order is not correct.
@@ -162,10 +162,10 @@ def island2Edge(island):
return length_sorted_edges, [v.to_3d() for v in unique_points.values()]
# ========================= NOT WORKING????
# Find if a points inside an edge loop, un-orderd.
# Find if a points inside an edge loop, un-ordered.
# pt is and x/y
# edges are a non ordered loop of edges.
# #offsets are the edge x and y offset.
# offsets are the edge x and y offset.
"""
def pointInEdges(pt, edges):
#
@@ -223,7 +223,7 @@ def islandIntersectUvIsland(source, target, SourceOffset):
if pointInIsland(pv+SourceOffset, target[0]):
return 2 # SOURCE INSIDE TARGET
# 2 test for a part of the target being totaly inside the source.
# 2 test for a part of the target being totally inside the source.
for pv in target[7]:
if pointInIsland(pv-SourceOffset, source[0]):
return 3 # PART OF TARGET INSIDE SOURCE.
@@ -242,7 +242,7 @@ def testNewVecLs2DRotIsBetter(vecs, mat=-1, bestAreaSoFar = -1):
for i, v in enumerate(vecs):
# Do this allong the way
# Do this along the way
if mat != -1:
v = vecs[i] = mat * v
x= v.x
@@ -252,7 +252,7 @@ def testNewVecLs2DRotIsBetter(vecs, mat=-1, bestAreaSoFar = -1):
if x>maxx: maxx= x
if y>maxy: maxy= y
# Spesific to this algo, bail out if we get bigger then the current area
# Specific to this algo, bail out if we get bigger then the current area
if bestAreaSoFar != -1 and (maxx-minx) * (maxy-miny) > bestAreaSoFar:
return (BIG_NUM, None), None
w = maxx-minx
@@ -262,7 +262,7 @@ def testNewVecLs2DRotIsBetter(vecs, mat=-1, bestAreaSoFar = -1):
def optiRotateUvIsland(faces):
global currentArea
# Bestfit Rotation
# Best-fit Rotation
def best2dRotation(uvVecs, MAT1, MAT2):
global currentArea
@@ -318,7 +318,7 @@ def optiRotateUvIsland(faces):
currentArea = newAreaPos
# 45d done
# Testcase different rotations and find the onfe that best fits in a square
# Testcase different rotations and find the one that best fits in a square
for ROTMAT in RotMatStepRotation:
uvVecs = best2dRotation(uvVecs, ROTMAT[0], ROTMAT[1])
@@ -409,7 +409,7 @@ def mergeUvIslands(islandList):
BREAK= False
while areaIslandIdx < len(decoratedIslandListAreaSort) and not BREAK:
sourceIsland = decoratedIslandListAreaSort[areaIslandIdx]
# Alredy packed?
# Already packed?
if not sourceIsland[0]:
areaIslandIdx+=1
else:
@@ -420,7 +420,7 @@ def mergeUvIslands(islandList):
BREAK= True
break
# Now we have 2 islands, is the efficience of the islands lowers theres an
# Now we have 2 islands, if the efficiency of the islands lowers theres an
# increasing likely hood that we can fit merge into the bigger UV island.
# this ensures a tight fit.
@@ -435,12 +435,12 @@ def mergeUvIslands(islandList):
pass
else:
# ([island, totFaceArea, efficiency, islandArea, w,h])
# Waisted space on target is greater then UV bounding island area.
#~ ([island, totFaceArea, efficiency, islandArea, w,h])
# Wasted space on target is greater then UV bounding island area.
# if targetIsland[3] > (sourceIsland[2]) and\ #
# print USER_FREE_SPACE_TO_TEST_QUALITY
#~ if targetIsland[3] > (sourceIsland[2]) and\ #
#~ print USER_FREE_SPACE_TO_TEST_QUALITY
if targetIsland[2] > (sourceIsland[1] * USER_FREE_SPACE_TO_TEST_QUALITY) and\
targetIsland[4] > sourceIsland[4] and\
targetIsland[5] > sourceIsland[5]:
@@ -456,7 +456,7 @@ def mergeUvIslands(islandList):
boxLeft = 0
# Distllllance we can move between whilst staying inside the targets bounds.
# Distance we can move between whilst staying inside the targets bounds.
testWidth = targetIsland[4] - sourceIsland[4]
testHeight = targetIsland[5] - sourceIsland[5]
@@ -474,25 +474,25 @@ def mergeUvIslands(islandList):
boxLeft = 0 # Start 1 back so we can jump into the loop.
boxBottom= 0 #-yIncrement
##testcount= 0
#~ testcount= 0
while boxBottom <= testHeight:
# Should we use this? - not needed for now.
#if Window.GetKeyQualifiers() & ctrl:
# BREAK= True
# break
#~ if Window.GetKeyQualifiers() & ctrl:
#~ BREAK= True
#~ break
##testcount+=1
#print 'Testing intersect'
Intersect = islandIntersectUvIsland(sourceIsland, targetIsland, Vector((boxLeft, boxBottom)))
#print 'Done', Intersect
if Intersect == 1: # Line intersect, dont bother with this any more
if Intersect == 1: # Line intersect, don't bother with this any more
pass
if Intersect == 2: # Source inside target
'''
We have an intersection, if we are inside the target
then move us 1 whole width accross,
then move us 1 whole width across,
Its possible this is a bad idea since 2 skinny Angular faces
could join without 1 whole move, but its a lot more optimal to speed this up
since we have already tested for it.
@@ -500,7 +500,7 @@ def mergeUvIslands(islandList):
It gives about 10% speedup with minimal errors.
'''
#print 'ass'
# Move the test allong its width + SMALL_NUM
# Move the test along its width + SMALL_NUM
#boxLeft += sourceIsland[4] + SMALL_NUM
boxLeft += sourceIsland[4]
elif Intersect == 0: # No intersection?? Place it.
@@ -551,7 +551,7 @@ def mergeUvIslands(islandList):
break
# INCREMENR NEXT LOCATION
# INCREMENT NEXT LOCATION
if boxLeft > testWidth:
boxBottom += yIncrement
boxLeft = 0.0
@@ -572,8 +572,8 @@ def mergeUvIslands(islandList):
# Takes groups of faces. assumes face groups are UV groups.
def getUvIslands(faceGroups, me):
# Get seams so we dont cross over seams
edge_seams = {} # shoudl be a set
# Get seams so we don't cross over seams
edge_seams = {} # should be a set
for ed in me.edges:
if ed.use_seam:
edge_seams[ed.key] = None # dummy var- use sets!
@@ -609,7 +609,7 @@ def getUvIslands(faceGroups, me):
# Modes
# 0 - face not yet touched.
# 1 - added to island list, and need to search
# 2 - touched and searched - dont touch again.
# 2 - touched and searched - don't touch again.
face_modes = [0] * len(faces) # initialize zero - untested.
face_modes[0] = 1 # start the search with face 1
@@ -633,7 +633,7 @@ def getUvIslands(faceGroups, me):
face_modes[ii] = ok = 1 # mark as searched
newIsland.append(faces[ii])
# mark as searched, dont look again.
# mark as searched, don't look again.
face_modes[i] = 2
islandList.append(newIsland)
@@ -664,8 +664,8 @@ def packIslands(islandList):
# Now we have UV islands, we need to pack them.
# Make a synchronised list with the islands
# so we can box pak the islands.
# Make a synchronized list with the islands
# so we can box pack the islands.
packBoxes = []
# Keep a list of X/Y offset so we can save time by writing the
@@ -716,14 +716,14 @@ def packIslands(islandList):
# print 'Box Packing Time:', time.time() - time1
#if len(pa ckedLs) != len(islandList):
# raise "Error packed boxes differes from original length"
# raise "Error packed boxes differs from original length"
#print '\tWriting Packed Data to faces'
#XXX Window.DrawProgressBar(0.8, 'Writing Packed Data to faces')
# Sort by ID, so there in sync again
islandIdx = len(islandList)
# Having these here avoids devide by 0
# Having these here avoids divide by 0
if islandIdx:
if USER_STRETCH_ASPECT:
@@ -845,9 +845,9 @@ def main(context,
time.sleep(10)
'''
#XXX if not Draw.PupBlock(ob % len(obList), pup_block):
#XXX return
#XXX del ob
#~ XXX if not Draw.PupBlock(ob % len(obList), pup_block):
#~ XXX return
#~ XXX del ob
# Convert from being button types
@@ -859,10 +859,10 @@ def main(context,
is_editmode = (context.active_object.mode == 'EDIT')
if is_editmode:
bpy.ops.object.mode_set(mode='OBJECT')
# Assume face select mode! an annoying hack to toggle face select mode because Mesh dosent like faceSelectMode.
# Assume face select mode! an annoying hack to toggle face select mode because Mesh doesn't like faceSelectMode.
if USER_SHARE_SPACE:
# Sort by data name so we get consistant results
# Sort by data name so we get consistent results
obList.sort(key = lambda ob: ob.data.name)
collected_islandList= []
@@ -870,7 +870,7 @@ def main(context,
time1 = time.time()
# Tag as False se we dont operate on the same mesh twice.
# Tag as False so we don't operate on the same mesh twice.
#XXX bpy.data.meshes.tag = False
for me in bpy.data.meshes:
me.tag = False
@@ -885,7 +885,7 @@ def main(context,
# Tag as used
me.tag = True
if not me.uv_textures: # Mesh has no UV Coords, dont bother.
if not me.uv_textures: # Mesh has no UV Coords, don't bother.
me.uv_textures.new()
uv_layer = me.uv_textures.active.data
@@ -902,7 +902,7 @@ def main(context,
#XXX Window.DrawProgressBar(0.1, 'SmartProj UV Unwrapper, mapping "%s", %i faces.' % (me.name, len(meshFaces)))
# =======
# Generate a projection list from face normals, this is ment to be smart :)
# Generate a projection list from face normals, this is meant to be smart :)
# make a list of face props that are in sync with meshFaces
# Make a Face List that is sorted by area.
@@ -928,7 +928,7 @@ def main(context,
# Initialize projectVecs
if USER_VIEW_INIT:
# Generate Projection
projectVecs = [Vector(Window.GetViewVector()) * ob.matrix_world.inverted().to_3x3()] # We add to this allong the way
projectVecs = [Vector(Window.GetViewVector()) * ob.matrix_world.inverted().to_3x3()] # We add to this along the way
else:
projectVecs = []
@@ -936,7 +936,7 @@ def main(context,
newProjectMeshFaces = [] # Popping stuffs it up.
# Predent that the most unique angke is ages away to start the loop off
# Pretend that the most unique angle is ages away to start the loop off
mostUniqueAngle = -1.0
# This is popped
@@ -950,7 +950,7 @@ def main(context,
# add all the faces that are close.
for fIdx in range(len(tempMeshFaces)-1, -1, -1):
# Use half the angle limit so we dont overweight faces towards this
# Use half the angle limit so we don't overweight faces towards this
# normal and hog all the faces.
if newProjectVec.dot(tempMeshFaces[fIdx].no) > USER_PROJECTION_LIMIT_HALF_CONVERTED:
newProjectMeshFaces.append(tempMeshFaces.pop(fIdx))

View File

@@ -16,7 +16,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENCE BLOCK *****
# ***** END GPL LICENSE BLOCK *****
# --------------------------------------------------------------------------
# <pep8 compliant>
@@ -69,7 +69,7 @@ def applyVertexDirt(me, blur_iterations, blur_strength, clamp_dirt, clamp_clean,
vec /= tot_con
# angle is the acos of the dot product between vert and connected verts normals
# angle is the acos() of the dot product between vert and connected verts normals
ang = acos(no.dot(vec))
# enforce min/max

View File

@@ -33,7 +33,7 @@ class VIEW3D_OT_edit_mesh_extrude_individual_move(Operator):
totface = mesh.total_face_sel
totedge = mesh.total_edge_sel
# totvert = mesh.total_vert_sel
#~ totvert = mesh.total_vert_sel
if select_mode[2] and totface == 1:
bpy.ops.mesh.extrude_region_move('INVOKE_REGION_WIN',
@@ -65,7 +65,7 @@ class VIEW3D_OT_edit_mesh_extrude_move(Operator):
totface = mesh.total_face_sel
totedge = mesh.total_edge_sel
# totvert = mesh.total_vert_sel
#~ totvert = mesh.total_vert_sel
if totface >= 1:
bpy.ops.mesh.extrude_region_move('INVOKE_REGION_WIN',

View File

@@ -70,7 +70,7 @@ def context_path_validate(context, data_path):
# One of the items in the rna path is None, just ignore this
value = Ellipsis
else:
# We have a real error in the rna path, dont ignore that
# We have a real error in the rna path, don't ignore that
raise
return value
@@ -103,7 +103,7 @@ def operator_value_is_undo(value):
def operator_path_is_undo(context, data_path):
# note that if we have data paths that use strings this could fail
# luckily we dont do this!
# luckily we don't do this!
#
# When we cant find the data owner assume no undo is needed.
data_path_head, data_path_sep, data_path_tail = data_path.rpartition(".")
@@ -425,7 +425,7 @@ class WM_OT_context_cycle_enum(Operator):
rna_struct_str, rna_prop_str = data_path.rsplit('.', 1)
i = rna_prop_str.find('[')
# just incse we get "context.foo.bar[0]"
# just in case we get "context.foo.bar[0]"
if i != -1:
rna_prop_str = rna_prop_str[0:i]
@@ -820,8 +820,7 @@ class WM_OT_doc_view(Operator):
class_name = rna_parent.identifier
rna_parent = rna_parent.base
# It so happens that epydoc nests these, not sphinx
# class_name_full = self._nested_class_string(class_name)
#~ class_name_full = self._nested_class_string(class_name)
url = ("%s/bpy.types.%s.html#bpy.types.%s.%s" %
(self._prefix, class_name, class_name, class_prop))
@@ -1014,7 +1013,7 @@ class WM_OT_properties_edit(Operator):
item = eval("context.%s" % data_path)
# setup defaults
prop_ui = rna_idprop_ui_prop_get(item, self.property, False) # dont create
prop_ui = rna_idprop_ui_prop_get(item, self.property, False) # don't create
if prop_ui:
self.min = prop_ui.get("min", -1000000000)
self.max = prop_ui.get("max", 1000000000)
@@ -1171,7 +1170,7 @@ class WM_OT_copy_prev_settings(Operator):
shutil.rmtree(os.path.join(path_dst, 'scripts'))
shutil.rmtree(os.path.join(path_dst, 'plugins'))
# dont loose users work if they open the splash later.
# don't loose users work if they open the splash later.
if bpy.data.is_saved is bpy.data.is_dirty is False:
bpy.ops.wm.read_homefile()
else:
@@ -1372,9 +1371,9 @@ class WM_OT_keyitem_add(Operator):
km = context.keymap
if km.is_modal:
km.keymap_items.new_modal("", 'A', 'PRESS') # kmi
km.keymap_items.new_modal("", 'A', 'PRESS') #~ kmi
else:
km.keymap_items.new("none", 'A', 'PRESS') # kmi
km.keymap_items.new("none", 'A', 'PRESS') #~ kmi
# clear filter and expand keymap so we can see the newly added item
if context.space_data.filter_text != "":
@@ -1556,7 +1555,7 @@ class WM_OT_addon_install(Operator):
pyfile = self.filepath
if self.target == 'DEFAULT':
# dont use bpy.utils.script_paths("addons") because we may not be able to write to it.
# don't use bpy.utils.script_paths("addons") because we may not be able to write to it.
path_addons = bpy.utils.user_resource('SCRIPTS', "addons", create=True)
else:
path_addons = bpy.context.user_preferences.filepaths.script_directory
@@ -1656,7 +1655,7 @@ class WM_OT_addon_install(Operator):
bpy.utils.refresh_script_paths()
# TODO, should not be a warning.
# self.report({'WARNING'}, "File installed to '%s'\n" % path_dest)
#~ self.report({'WARNING'}, "File installed to '%s'\n" % path_dest)
return {'FINISHED'}
def invoke(self, context, event):

View File

@@ -21,8 +21,8 @@
# Generic Panels (Independent of DataType)
# NOTE:
# The specialised panel types are derived in their respective UI modules
# dont register these classes since they are only helpers.
# The specialized panel types are derived in their respective UI modules
# don't register these classes since they are only helpers.
class MotionPathButtonsPanel():

View File

@@ -214,7 +214,7 @@ class DATA_PT_pose_library(ArmatureButtonsPanel, Panel):
layout.prop(pose_marker_active, "name")
# TODO: this panel will soon be depreceated too
# TODO: this panel will soon be deprecated deprecated too
class DATA_PT_ghost(ArmatureButtonsPanel, Panel):
bl_label = "Ghost"
@@ -301,7 +301,7 @@ class DATA_PT_motion_paths(MotionPathButtonsPanel, Panel):
@classmethod
def poll(cls, context):
# XXX: include posemode check?
# XXX: include pose-mode check?
return (context.object) and (context.armature)
def draw(self, context):
@@ -324,7 +324,7 @@ class DATA_PT_onion_skinning(OnionSkinButtonsPanel): # , Panel): # inherit from
@classmethod
def poll(cls, context):
# XXX: include posemode check?
# XXX: include pose-mode check?
return (context.object) and (context.armature)
def draw(self, context):

View File

@@ -189,7 +189,7 @@ class BONE_PT_display(BoneButtonsPanel, Panel):
return context.bone
def draw(self, context):
# note. this works ok in editmode but isnt
# note. this works ok in edit-mode but isn't
# all that useful so disabling for now.
layout = self.layout

View File

@@ -87,7 +87,7 @@ class MATERIAL_PT_context_material(MaterialButtonsPanel, Panel):
@classmethod
def poll(cls, context):
# An exception, dont call the parent poll func because
# An exception, don't call the parent poll func because
# this manages materials for all engine types
engine = context.scene.render.engine
@@ -537,7 +537,7 @@ class MATERIAL_PT_halo(MaterialButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
mat = context.material # dont use node material
mat = context.material # don't use node material
halo = mat.halo
def number_but(layout, toggle, number, name, color):
@@ -595,7 +595,7 @@ class MATERIAL_PT_flare(MaterialButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
mat = context.material # dont use node material
mat = context.material # don't use node material
halo = mat.halo
layout.active = halo.use_flare_mode
@@ -622,7 +622,7 @@ class MATERIAL_PT_game_settings(MaterialButtonsPanel, bpy.types.Panel):
def draw(self, context):
layout = self.layout
game = context.material.game_settings # dont use node material
game = context.material.game_settings # don't use node material
row = layout.row()
row.prop(game, "use_backface_culling")
@@ -653,7 +653,7 @@ class MATERIAL_PT_physics(MaterialButtonsPanel, bpy.types.Panel):
layout = self.layout
layout.active = context.material.game_settings.physics
phys = context.material.physics # dont use node material
phys = context.material.physics # don't use node material
split = layout.split()
row = split.row()
@@ -686,7 +686,7 @@ class MATERIAL_PT_strand(MaterialButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
mat = context.material # dont use node material
mat = context.material # don't use node material
tan = mat.strand
split = layout.split()
@@ -862,7 +862,7 @@ class MATERIAL_PT_volume_density(VolumeButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
vol = context.material.volume # dont use node material
vol = context.material.volume # don't use node material
row = layout.row()
row.prop(vol, "density")
@@ -876,7 +876,7 @@ class MATERIAL_PT_volume_shading(VolumeButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
vol = context.material.volume # dont use node material
vol = context.material.volume # don't use node material
split = layout.split()
@@ -901,7 +901,7 @@ class MATERIAL_PT_volume_lighting(VolumeButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
vol = context.material.volume # dont use node material
vol = context.material.volume # don't use node material
split = layout.split()
@@ -942,7 +942,7 @@ class MATERIAL_PT_volume_transp(VolumeButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
mat = context.material # dont use node material
mat = context.material # don't use node material
layout.prop(mat, "transparency_method", expand=True)
@@ -954,7 +954,7 @@ class MATERIAL_PT_volume_integration(VolumeButtonsPanel, Panel):
def draw(self, context):
layout = self.layout
vol = context.material.volume # dont use node material
vol = context.material.volume # don't use node material
split = layout.split()

View File

@@ -658,7 +658,7 @@ class ConstraintButtonsPanel():
row.label(text="Source to Destination Mapping:")
# note: chr(187) is the ASCII arrow ( >> ). Blender Text Editor can't
# open it. Thus we are using the hardcoded value instead.
# open it. Thus we are using the hard-coded value instead.
row = col.row()
row.prop(con, "map_to_x_from", expand=False, text="")
row.label(text=" %s X" % chr(187))

View File

@@ -76,7 +76,7 @@ class PHYSICS_PT_add(PhysicButtonsPanel, Panel):
physics_add(self, col, context.smoke, "Smoke", 'SMOKE', 'MOD_SMOKE', True)
#cachetype can be 'PSYS' 'HAIR' 'SMOKE' etc
# cache-type can be 'PSYS' 'HAIR' 'SMOKE' etc
def point_cache_ui(self, context, cache, enabled, cachetype):
layout = self.layout

View File

@@ -927,9 +927,9 @@ class TEXTURE_PT_influence(TextureSlotPanel, Panel):
factor_but(col, "use_map_warp", "warp_factor", "Warp")
factor_but(col, "use_map_displacement", "displacement_factor", "Displace")
#sub = col.column()
#sub.active = tex.use_map_translucency or tex.map_emit or tex.map_alpha or tex.map_raymir or tex.map_hardness or tex.map_ambient or tex.map_specularity or tex.map_reflection or tex.map_mirror
#sub.prop(tex, "default_value", text="Amount", slider=True)
#~ sub = col.column()
#~ sub.active = tex.use_map_translucency or tex.map_emit or tex.map_alpha or tex.map_raymir or tex.map_hardness or tex.map_ambient or tex.map_specularity or tex.map_reflection or tex.map_mirror
#~ sub.prop(tex, "default_value", text="Amount", slider=True)
elif idblock.type == 'HALO':
layout.label(text="Halo:")
@@ -1014,7 +1014,7 @@ class TEXTURE_PT_influence(TextureSlotPanel, Panel):
col = split.column()
col.prop(tex, "blend_type", text="Blend")
col.prop(tex, "use_rgb_to_intensity")
# color is used on grayscale textures even when use_rgb_to_intensity is disabled.
# color is used on gray-scale textures even when use_rgb_to_intensity is disabled.
col.prop(tex, "color", text="")
col = split.column()
@@ -1027,14 +1027,14 @@ class TEXTURE_PT_influence(TextureSlotPanel, Panel):
if isinstance(idblock, bpy.types.Material):
layout.label(text="Bump Mapping:")
# only show bump settings if activated but not for normalmap images
# only show bump settings if activated but not for normal-map images
row = layout.row()
sub = row.row()
sub.active = (tex.use_map_normal or tex.use_map_warp) and not (tex.texture.type == 'IMAGE' and (tex.texture.use_normal_map or tex.texture.use_derivative_map))
sub.prop(tex, "bump_method", text="Method")
# the space setting is supported for: derivmaps + bumpmaps (DEFAULT,BEST_QUALITY), not for normalmaps
# the space setting is supported for: derivative-maps + bump-maps (DEFAULT,BEST_QUALITY), not for normal-maps
sub = row.row()
sub.active = (tex.use_map_normal or tex.use_map_warp) and not (tex.texture.type == 'IMAGE' and tex.texture.use_normal_map) and ((tex.bump_method in {'BUMP_DEFAULT', 'BUMP_BEST_QUALITY'}) or (tex.texture.type == 'IMAGE' and tex.texture.use_derivative_map))
sub.prop(tex, "bump_objectspace", text="Space")

View File

@@ -45,7 +45,7 @@ class LOGIC_PT_properties(Panel):
row = box.row()
row.prop(prop, "name", text="")
row.prop(prop, "type", text="")
row.prop(prop, "value", text="", toggle=True) # we dont care about the type. rna will display correctly
row.prop(prop, "value", text="", toggle=True) # we don't care about the type. rna will display correctly
row.prop(prop, "show_debug", text="", toggle=True, icon='INFO')
row.operator("object.game_property_remove", text="", icon='X', emboss=False).index = i

View File

@@ -426,8 +426,8 @@ class USERPREF_PT_system(Panel):
col.label(text="Anisotropic Filtering")
col.prop(system, "anisotropic_filter", text="")
col.prop(system, "use_vertex_buffer_objects")
#Anti-aliasing is disabled as it breaks broder/lasso select
#col.prop(system, "use_antialiasing")
# Anti-aliasing is disabled as it breaks border/lasso select
#~ col.prop(system, "use_antialiasing")
col.label(text="Window Draw Method:")
col.prop(system, "window_draw_method", text="")
col.label(text="Text Draw Options:")

View File

@@ -262,7 +262,7 @@ class InputKeyMapPanel:
row = subcol.row(align=True)
#row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config:")
#~ row.prop_search(wm.keyconfigs, "active", wm, "keyconfigs", text="Key Config:")
text = bpy.path.display_name(context.window_manager.keyconfigs.active.name)
if not text:
text = "Blender (default)"
@@ -270,8 +270,8 @@ class InputKeyMapPanel:
row.operator("wm.keyconfig_preset_add", text="", icon="ZOOMIN")
row.operator("wm.keyconfig_preset_add", text="", icon="ZOOMOUT").remove_active = True
# layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
# row.operator("wm.keyconfig_remove", text="", icon='X')
#~ layout.context_pointer_set("keyconfig", wm.keyconfigs.active)
#~ row.operator("wm.keyconfig_remove", text="", icon='X')
row.prop(context.space_data, "filter_text", icon="VIEWZOOM")

View File

@@ -1142,7 +1142,7 @@ class VIEW3D_MT_sculpt(Menu):
layout.prop(sculpt, "use_threaded", text="Threaded Sculpt")
layout.prop(sculpt, "show_brush")
# TODO, make availabel from paint menu!
# TODO, make available from paint menu!
layout.prop(tool_settings, "sculpt_paint_use_unified_size", text="Unify Size")
layout.prop(tool_settings, "sculpt_paint_use_unified_strength", text="Unify Strength")
@@ -1413,7 +1413,7 @@ class BoneOptions:
data_path_iter = "selected_bones"
opt_suffix = ""
options.append("lock")
else: # posemode
else: # pose-mode
bone_props = bpy.types.Bone.bl_rna.properties
data_path_iter = "selected_pose_bones"
opt_suffix = "bone."
@@ -2172,7 +2172,7 @@ class VIEW3D_PT_view3d_meshdisplay(Panel):
@classmethod
def poll(cls, context):
# The active object check is needed because of localmode
# The active object check is needed because of local-mode
return (context.active_object and (context.mode == 'EDIT_MESH'))
def draw(self, context):
@@ -2233,7 +2233,7 @@ class VIEW3D_PT_background_image(Panel):
@classmethod
def poll(cls, context):
view = context.space_data
# bg = context.space_data.background_image
#~ bg = context.space_data.background_image
return (view)
def draw_header(self, context):

View File

@@ -63,7 +63,7 @@ def draw_gpencil_tools(context, layout):
row.prop(context.tool_settings, "use_grease_pencil_sessions")
# ********** default tools for objectmode ****************
# ********** default tools for object-mode ****************
class VIEW3D_PT_tools_objectmode(View3DPanel, Panel):
bl_context = "objectmode"
@@ -385,7 +385,7 @@ class VIEW3D_PT_tools_latticeedit(View3DPanel, Panel):
draw_gpencil_tools(context, layout)
# ********** default tools for posemode ****************
# ********** default tools for pose-mode ****************
class VIEW3D_PT_tools_posemode(View3DPanel, Panel):
@@ -1043,7 +1043,7 @@ class VIEW3D_PT_tools_brush_appearance(PaintPanel, Panel):
row = col.row(align=True)
row.prop(brush, "icon_filepath", text="")
# ********** default tools for weightpaint ****************
# ********** default tools for weight-paint ****************
class VIEW3D_PT_tools_weightpaint(View3DPanel, Panel):
@@ -1097,9 +1097,9 @@ class VIEW3D_PT_tools_weightpaint_options(View3DPanel, Panel):
# col.prop(wpaint, "mul", text="")
# Also missing now:
# Soft, Vgroup, X-Mirror and "Clear" Operator.
# Soft, Vertex-Group, X-Mirror and "Clear" Operator.
# ********** default tools for vertexpaint ****************
# ********** default tools for vertex-paint ****************
class VIEW3D_PT_tools_vertexpaint(View3DPanel, Panel):
@@ -1128,7 +1128,7 @@ class VIEW3D_PT_tools_vertexpaint(View3DPanel, Panel):
# col.label(text="Multiply:")
# col.prop(vpaint, "mul", text="")
# ********** default tools for texturepaint ****************
# ********** default tools for texture-paint ****************
class VIEW3D_PT_tools_projectpaint(View3DPanel, Panel):

View File

@@ -8,7 +8,7 @@
import bge
# variables defined here will only be set once when the
# module is first imported. Set object spesific vars
# module is first imported. Set object specific vars
# inside the function if you intend to use the module
# with multiple objects.