Merge branch 'master' into blender2.8

This commit is contained in:
2017-10-29 15:14:04 +01:00
26 changed files with 617 additions and 328 deletions

View File

@@ -324,7 +324,7 @@ static double knot_remove_error_value(
/* Avoid having to re-calculate again */
double r_handle_factors[2], uint *r_error_index)
{
double error_sq = FLT_MAX;
double error_sq = DBL_MAX;
#ifdef USE_VLA
double handle_factor_l[dims];
@@ -340,7 +340,7 @@ static double knot_remove_error_value(
handle_factor_l, handle_factor_r,
&error_sq, r_error_index);
assert(error_sq != FLT_MAX);
assert(error_sq != DBL_MAX);
isub_vnvn(handle_factor_l, points_offset, dims);
r_handle_factors[0] = dot_vnvn(tan_l, handle_factor_l, dims);
@@ -465,7 +465,6 @@ static void knot_remove_error_recalculate(
struct KnotRemoveState *r;
if (k->heap_node) {
r = HEAP_node_ptr(k->heap_node);
HEAP_remove(p->heap, k->heap_node);
}
else {
#ifdef USE_TPOOL
@@ -473,14 +472,13 @@ static void knot_remove_error_recalculate(
#else
r = malloc(sizeof(*r));
#endif
r->index = k->index;
}
r->handles[0] = handles[0];
r->handles[1] = handles[1];
k->heap_node = HEAP_insert(p->heap, cost_sq, r);
HEAP_insert_or_update(p->heap, &k->heap_node, cost_sq, r);
}
else {
if (k->heap_node) {
@@ -624,7 +622,6 @@ static void knot_refit_error_recalculate(
struct KnotRefitState *r;
if (k->heap_node) {
r = HEAP_node_ptr(k->heap_node);
HEAP_remove(p->heap, k->heap_node);
}
else {
#ifdef USE_TPOOL
@@ -645,7 +642,7 @@ static void knot_refit_error_recalculate(
r->error_sq[0] = r->error_sq[1] = cost_sq;
/* Always perform removal before refitting, (make a negative number) */
k->heap_node = HEAP_insert(p->heap, cost_sq - error_sq_max, r);
HEAP_insert_or_update(p->heap, &k->heap_node, cost_sq - error_sq_max, r);
return;
}
@@ -689,7 +686,6 @@ static void knot_refit_error_recalculate(
struct KnotRefitState *r;
if (k->heap_node) {
r = HEAP_node_ptr(k->heap_node);
HEAP_remove(p->heap, k->heap_node);
}
else {
#ifdef USE_TPOOL
@@ -716,7 +712,7 @@ static void knot_refit_error_recalculate(
assert(cost_sq_dst_max < cost_sq_src_max);
/* Weight for the greatest improvement */
k->heap_node = HEAP_insert(p->heap, cost_sq_src_max - cost_sq_dst_max, r);
HEAP_insert_or_update(p->heap, &k->heap_node, cost_sq_src_max - cost_sq_dst_max, r);
}
}
else {
@@ -895,7 +891,6 @@ static void knot_corner_error_recalculate(
struct KnotCornerState *c;
if (k_split->heap_node) {
c = HEAP_node_ptr(k_split->heap_node);
HEAP_remove(p->heap, k_split->heap_node);
}
else {
#ifdef USE_TPOOL
@@ -920,7 +915,7 @@ static void knot_corner_error_recalculate(
c->error_sq[1] = cost_sq_dst[1];
const double cost_max_sq = MAX2(cost_sq_dst[0], cost_sq_dst[1]);
k_split->heap_node = HEAP_insert(p->heap, cost_max_sq, c);
HEAP_insert_or_update(p->heap, &k_split->heap_node, cost_max_sq, c);
}
else {
if (k_split->heap_node) {

View File

@@ -48,13 +48,14 @@
# define UNLIKELY(x) (x)
#endif
typedef unsigned int uint;
/***/
struct HeapNode {
void *ptr;
double value;
unsigned int index;
void *ptr;
double value;
uint index;
};
/* heap_* pool allocator */
@@ -67,8 +68,8 @@ struct HeapNode {
#undef TPOOL_STRUCT
struct Heap {
unsigned int size;
unsigned int bufsize;
uint size;
uint bufsize;
HeapNode **tree;
struct HeapMemPool pool;
@@ -86,32 +87,32 @@ struct Heap {
#define HEAP_EQUALS(a, b) ((a)->value == (b)->value)
#endif
static void heap_swap(Heap *heap, const unsigned int i, const unsigned int j)
static void heap_swap(Heap *heap, const uint i, const uint j)
{
#if 0
SWAP(unsigned int, heap->tree[i]->index, heap->tree[j]->index);
SWAP(HeapNode *, heap->tree[i], heap->tree[j]);
SWAP(uint, heap->tree[i]->index, heap->tree[j]->index);
SWAP(HeapNode *, heap->tree[i], heap->tree[j]);
#else
HeapNode **tree = heap->tree;
union {
unsigned int index;
HeapNode *node;
uint index;
HeapNode *node;
} tmp;
SWAP_TVAL(tmp.index, tree[i]->index, tree[j]->index);
SWAP_TVAL(tmp.node, tree[i], tree[j]);
#endif
}
static void heap_down(Heap *heap, unsigned int i)
static void heap_down(Heap *heap, uint i)
{
/* size won't change in the loop */
const unsigned int size = heap->size;
const uint size = heap->size;
while (1) {
const unsigned int l = HEAP_LEFT(i);
const unsigned int r = HEAP_RIGHT(i);
unsigned int smallest;
const uint l = HEAP_LEFT(i);
const uint r = HEAP_RIGHT(i);
uint smallest;
smallest = ((l < size) && HEAP_COMPARE(heap->tree[l], heap->tree[i])) ? l : i;
@@ -128,10 +129,10 @@ static void heap_down(Heap *heap, unsigned int i)
}
}
static void heap_up(Heap *heap, unsigned int i)
static void heap_up(Heap *heap, uint i)
{
while (i > 0) {
const unsigned int p = HEAP_PARENT(i);
const uint p = HEAP_PARENT(i);
if (HEAP_COMPARE(heap->tree[p], heap->tree[i])) {
break;
@@ -148,7 +149,7 @@ static void heap_up(Heap *heap, unsigned int i)
* \{ */
/* use when the size of the heap is known in advance */
Heap *HEAP_new(unsigned int tot_reserve)
Heap *HEAP_new(uint tot_reserve)
{
Heap *heap = malloc(sizeof(Heap));
/* ensure we have at least one so we can keep doubling it */
@@ -164,7 +165,7 @@ Heap *HEAP_new(unsigned int tot_reserve)
void HEAP_free(Heap *heap, HeapFreeFP ptrfreefp)
{
if (ptrfreefp) {
unsigned int i;
uint i;
for (i = 0; i < heap->size; i++) {
ptrfreefp(heap->tree[i]->ptr);
@@ -180,7 +181,7 @@ void HEAP_free(Heap *heap, HeapFreeFP ptrfreefp)
void HEAP_clear(Heap *heap, HeapFreeFP ptrfreefp)
{
if (ptrfreefp) {
unsigned int i;
uint i;
for (i = 0; i < heap->size; i++) {
ptrfreefp(heap->tree[i]->ptr);
@@ -215,12 +216,22 @@ HeapNode *HEAP_insert(Heap *heap, double value, void *ptr)
return node;
}
bool HEAP_is_empty(Heap *heap)
void HEAP_insert_or_update(Heap *heap, HeapNode **node_p, double value, void *ptr)
{
if (*node_p == NULL) {
*node_p = HEAP_insert(heap, value, ptr);
}
else {
HEAP_node_value_update_ptr(heap, *node_p, value, ptr);
}
}
bool HEAP_is_empty(const Heap *heap)
{
return (heap->size == 0);
}
unsigned int HEAP_size(Heap *heap)
uint HEAP_size(const Heap *heap)
{
return heap->size;
}
@@ -230,7 +241,7 @@ HeapNode *HEAP_top(Heap *heap)
return heap->tree[0];
}
double HEAP_top_value(Heap *heap)
double HEAP_top_value(const Heap *heap)
{
return heap->tree[0]->value;
}
@@ -253,12 +264,12 @@ void *HEAP_popmin(Heap *heap)
void HEAP_remove(Heap *heap, HeapNode *node)
{
unsigned int i = node->index;
uint i = node->index;
assert(heap->size != 0);
while (i > 0) {
unsigned int p = HEAP_PARENT(i);
uint p = HEAP_PARENT(i);
heap_swap(heap, p, i);
i = p;
@@ -267,7 +278,25 @@ void HEAP_remove(Heap *heap, HeapNode *node)
HEAP_popmin(heap);
}
double HEAP_node_value(HeapNode *node)
void HEAP_node_value_update(Heap *heap, HeapNode *node, double value)
{
assert(heap->size != 0);
if (node->value == value) {
return;
}
node->value = value;
/* Can be called in either order, makes no difference. */
heap_up(heap, node->index);
heap_down(heap, node->index);
}
void HEAP_node_value_update_ptr(Heap *heap, HeapNode *node, double value, void *ptr)
{
node->ptr = ptr;
HEAP_node_value_update(heap, node, value);
}
double HEAP_node_value(const HeapNode *node)
{
return node->value;
}

View File

@@ -39,16 +39,19 @@ typedef struct HeapNode HeapNode;
typedef void (*HeapFreeFP)(void *ptr);
Heap *HEAP_new(unsigned int tot_reserve);
bool HEAP_is_empty(Heap *heap);
bool HEAP_is_empty(const Heap *heap);
void HEAP_free(Heap *heap, HeapFreeFP ptrfreefp);
void *HEAP_node_ptr(HeapNode *node);
void HEAP_remove(Heap *heap, HeapNode *node);
HeapNode *HEAP_insert(Heap *heap, double value, void *ptr);
void HEAP_insert_or_update(Heap *heap, HeapNode **node_p, double value, void *ptr);
void *HEAP_popmin(Heap *heap);
void HEAP_clear(Heap *heap, HeapFreeFP ptrfreefp);
unsigned int HEAP_size(Heap *heap);
unsigned int HEAP_size(const Heap *heap);
HeapNode *HEAP_top(Heap *heap);
double HEAP_top_value(Heap *heap);
double HEAP_node_value(HeapNode *node);
double HEAP_top_value(const Heap *heap);
void HEAP_node_value_update(Heap *heap, HeapNode *node, double value);
void HEAP_node_value_update_ptr(Heap *heap, HeapNode *node, double value, void *ptr);
double HEAP_node_value(const HeapNode *node);
#endif /* __GENERIC_HEAP_IMPL_H__ */

View File

@@ -1076,7 +1076,10 @@ DerivedMesh *AbcMeshReader::read_derivedmesh(DerivedMesh *dm,
ImportSettings settings;
settings.read_flag |= read_flag;
if (dm->getNumVerts(dm) != positions->size()) {
bool topology_changed = positions->size() != dm->getNumVerts(dm) ||
face_counts->size() != dm->getNumPolys(dm) ||
face_indices->size() != dm->getNumLoops(dm);
if (topology_changed) {
new_dm = CDDM_from_template(dm,
positions->size(),
0,

View File

@@ -23,7 +23,7 @@
/** \file BLI_heap.h
* \ingroup bli
* \brief A heap / priority queue ADT
* \brief A min-heap / priority queue ADT
*/
struct Heap;
@@ -33,34 +33,24 @@ typedef struct HeapNode HeapNode;
typedef void (*HeapFreeFP)(void *ptr);
/* Creates a new heap. BLI_memarena is used for allocating nodes. Removed nodes
* are recycled, so memory usage will not shrink. */
Heap *BLI_heap_new_ex(unsigned int tot_reserve) ATTR_WARN_UNUSED_RESULT;
Heap *BLI_heap_new(void) ATTR_WARN_UNUSED_RESULT;
void BLI_heap_clear(Heap *heap, HeapFreeFP ptrfreefp) ATTR_NONNULL(1);
void BLI_heap_free(Heap *heap, HeapFreeFP ptrfreefp) ATTR_NONNULL(1);
/* Insert heap node with a value (often a 'cost') and pointer into the heap,
* duplicate values are allowed. */
HeapNode *BLI_heap_insert(Heap *heap, float value, void *ptr) ATTR_NONNULL(1);
/* Remove a heap node. */
void BLI_heap_insert_or_update(Heap *heap, HeapNode **node_p, float value, void *ptr) ATTR_NONNULL(1, 2);
void BLI_heap_remove(Heap *heap, HeapNode *node) ATTR_NONNULL(1, 2);
/* Return 0 if the heap is empty, 1 otherwise. */
bool BLI_heap_is_empty(Heap *heap) ATTR_NONNULL(1);
/* Return the size of the heap. */
unsigned int BLI_heap_size(Heap *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
/* Return the top node of the heap. This is the node with the lowest value. */
HeapNode *BLI_heap_top(Heap *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
/* Pop the top node off the heap and return it's pointer. */
bool BLI_heap_is_empty(const Heap *heap) ATTR_NONNULL(1);
unsigned int BLI_heap_size(const Heap *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
HeapNode *BLI_heap_top(const Heap *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
void *BLI_heap_popmin(Heap *heap) ATTR_NONNULL(1);
void BLI_heap_node_value_update(Heap *heap, HeapNode *node, float value) ATTR_NONNULL(1, 2);
void BLI_heap_node_value_update_ptr(Heap *heap, HeapNode *node, float value, void *ptr) ATTR_NONNULL(1, 2);
/* Return the value or pointer of a heap node. */
float BLI_heap_node_value(HeapNode *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
void *BLI_heap_node_ptr(HeapNode *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
float BLI_heap_node_value(const HeapNode *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
void *BLI_heap_node_ptr(const HeapNode *heap) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1);
/* only for gtest */
bool BLI_heap_is_valid(const Heap *heap);
#endif /* __BLI_HEAP_H__ */

View File

@@ -73,10 +73,10 @@ struct bArgs {
int *passes;
};
static unsigned int case_strhash(const void *ptr)
static uint case_strhash(const void *ptr)
{
const char *s = ptr;
unsigned int i = 0;
uint i = 0;
unsigned char c;
while ( (c = tolower(*s++)) )
@@ -85,7 +85,7 @@ static unsigned int case_strhash(const void *ptr)
return i;
}
static unsigned int keyhash(const void *ptr)
static uint keyhash(const void *ptr)
{
const bAKey *k = ptr;
return case_strhash(k->arg); /* ^ BLI_ghashutil_inthash((void *)k->pass); */

View File

@@ -23,7 +23,7 @@
/** \file blender/blenlib/intern/BLI_heap.c
* \ingroup bli
*
* A heap / priority queue ADT.
* A min-heap / priority queue ADT.
*/
#include <stdlib.h>
@@ -38,15 +38,15 @@
/***/
struct HeapNode {
void *ptr;
float value;
unsigned int index;
void *ptr;
float value;
uint index;
};
struct HeapNode_Chunk {
struct HeapNode_Chunk *prev;
unsigned int size;
unsigned int bufsize;
uint size;
uint bufsize;
struct HeapNode buf[0];
};
@@ -58,11 +58,11 @@ struct HeapNode_Chunk {
* \note keep type in sync with tot_nodes in heap_node_alloc_chunk.
*/
#define HEAP_CHUNK_DEFAULT_NUM \
((unsigned int)((MEM_SIZE_OPTIMAL((1 << 16) - sizeof(struct HeapNode_Chunk))) / sizeof(HeapNode)))
((uint)((MEM_SIZE_OPTIMAL((1 << 16) - sizeof(struct HeapNode_Chunk))) / sizeof(HeapNode)))
struct Heap {
unsigned int size;
unsigned int bufsize;
uint size;
uint bufsize;
HeapNode **tree;
struct {
@@ -85,16 +85,16 @@ struct Heap {
#define HEAP_EQUALS(a, b) ((a)->value == (b)->value)
#endif
BLI_INLINE void heap_swap(Heap *heap, const unsigned int i, const unsigned int j)
BLI_INLINE void heap_swap(Heap *heap, const uint i, const uint j)
{
#if 0
SWAP(unsigned int, heap->tree[i]->index, heap->tree[j]->index);
SWAP(uint, heap->tree[i]->index, heap->tree[j]->index);
SWAP(HeapNode *, heap->tree[i], heap->tree[j]);
#else
HeapNode **tree = heap->tree;
union {
unsigned int index;
uint index;
HeapNode *node;
} tmp;
SWAP_TVAL(tmp.index, tree[i]->index, tree[j]->index);
@@ -102,18 +102,19 @@ BLI_INLINE void heap_swap(Heap *heap, const unsigned int i, const unsigned int j
#endif
}
static void heap_down(Heap *heap, unsigned int i)
static void heap_down(Heap *heap, uint i)
{
/* size won't change in the loop */
const unsigned int size = heap->size;
const uint size = heap->size;
while (1) {
const unsigned int l = HEAP_LEFT(i);
const unsigned int r = HEAP_RIGHT(i);
unsigned int smallest;
smallest = ((l < size) && HEAP_COMPARE(heap->tree[l], heap->tree[i])) ? l : i;
const uint l = HEAP_LEFT(i);
const uint r = HEAP_RIGHT(i);
uint smallest = i;
if ((l < size) && HEAP_COMPARE(heap->tree[l], heap->tree[smallest])) {
smallest = l;
}
if ((r < size) && HEAP_COMPARE(heap->tree[r], heap->tree[smallest])) {
smallest = r;
}
@@ -127,10 +128,10 @@ static void heap_down(Heap *heap, unsigned int i)
}
}
static void heap_up(Heap *heap, unsigned int i)
static void heap_up(Heap *heap, uint i)
{
while (i > 0) {
const unsigned int p = HEAP_PARENT(i);
const uint p = HEAP_PARENT(i);
if (HEAP_COMPARE(heap->tree[p], heap->tree[i])) {
break;
@@ -147,7 +148,7 @@ static void heap_up(Heap *heap, unsigned int i)
* \{ */
static struct HeapNode_Chunk *heap_node_alloc_chunk(
unsigned int tot_nodes, struct HeapNode_Chunk *chunk_prev)
uint tot_nodes, struct HeapNode_Chunk *chunk_prev)
{
struct HeapNode_Chunk *chunk = MEM_mallocN(
sizeof(struct HeapNode_Chunk) + (sizeof(HeapNode) * tot_nodes), __func__);
@@ -188,8 +189,12 @@ static void heap_node_free(Heap *heap, HeapNode *node)
/** \name Public Heap API
* \{ */
/* use when the size of the heap is known in advance */
Heap *BLI_heap_new_ex(unsigned int tot_reserve)
/**
* Creates a new heap. Removed nodes are recycled, so memory usage will not shrink.
*
* \note Use when the size of the heap is known in advance.
*/
Heap *BLI_heap_new_ex(uint tot_reserve)
{
Heap *heap = MEM_mallocN(sizeof(Heap), __func__);
/* ensure we have at least one so we can keep doubling it */
@@ -211,7 +216,7 @@ Heap *BLI_heap_new(void)
void BLI_heap_free(Heap *heap, HeapFreeFP ptrfreefp)
{
if (ptrfreefp) {
unsigned int i;
uint i;
for (i = 0; i < heap->size; i++) {
ptrfreefp(heap->tree[i]->ptr);
@@ -233,7 +238,7 @@ void BLI_heap_free(Heap *heap, HeapFreeFP ptrfreefp)
void BLI_heap_clear(Heap *heap, HeapFreeFP ptrfreefp)
{
if (ptrfreefp) {
unsigned int i;
uint i;
for (i = 0; i < heap->size; i++) {
ptrfreefp(heap->tree[i]->ptr);
@@ -251,6 +256,10 @@ void BLI_heap_clear(Heap *heap, HeapFreeFP ptrfreefp)
heap->nodes.free = NULL;
}
/**
* Insert heap node with a value (often a 'cost') and pointer into the heap,
* duplicate values are allowed.
*/
HeapNode *BLI_heap_insert(Heap *heap, float value, void *ptr)
{
HeapNode *node;
@@ -275,27 +284,48 @@ HeapNode *BLI_heap_insert(Heap *heap, float value, void *ptr)
return node;
}
bool BLI_heap_is_empty(Heap *heap)
/**
* Convenience function since this is a common pattern.
*/
void BLI_heap_insert_or_update(Heap *heap, HeapNode **node_p, float value, void *ptr)
{
if (*node_p == NULL) {
*node_p = BLI_heap_insert(heap, value, ptr);
}
else {
BLI_heap_node_value_update_ptr(heap, *node_p, value, ptr);
}
}
bool BLI_heap_is_empty(const Heap *heap)
{
return (heap->size == 0);
}
unsigned int BLI_heap_size(Heap *heap)
uint BLI_heap_size(const Heap *heap)
{
return heap->size;
}
HeapNode *BLI_heap_top(Heap *heap)
/**
* Return the top node of the heap.
* This is the node with the lowest value.
*/
HeapNode *BLI_heap_top(const Heap *heap)
{
return heap->tree[0];
}
/**
* Pop the top node off the heap and return it's pointer.
*/
void *BLI_heap_popmin(Heap *heap)
{
void *ptr = heap->tree[0]->ptr;
BLI_assert(heap->size != 0);
void *ptr = heap->tree[0]->ptr;
heap_node_free(heap, heap->tree[0]);
if (--heap->size) {
@@ -308,13 +338,12 @@ void *BLI_heap_popmin(Heap *heap)
void BLI_heap_remove(Heap *heap, HeapNode *node)
{
unsigned int i = node->index;
BLI_assert(heap->size != 0);
while (i > 0) {
unsigned int p = HEAP_PARENT(i);
uint i = node->index;
while (i > 0) {
uint p = HEAP_PARENT(i);
heap_swap(heap, p, i);
i = p;
}
@@ -322,14 +351,71 @@ void BLI_heap_remove(Heap *heap, HeapNode *node)
BLI_heap_popmin(heap);
}
float BLI_heap_node_value(HeapNode *node)
/**
* Can be used to avoid #BLI_heap_remove, #BLI_heap_insert calls,
* balancing the tree still has a performance cost,
* but is often much less than remove/insert, difference is most noticable with large heaps.
*/
void BLI_heap_node_value_update(Heap *heap, HeapNode *node, float value)
{
if (value < node->value) {
node->value = value;
heap_up(heap, node->index);
}
else if (value > node->value) {
node->value = value;
heap_down(heap, node->index);
}
}
void BLI_heap_node_value_update_ptr(Heap *heap, HeapNode *node, float value, void *ptr)
{
node->ptr = ptr; /* only difference */
if (value < node->value) {
node->value = value;
heap_up(heap, node->index);
}
else if (value > node->value) {
node->value = value;
heap_down(heap, node->index);
}
}
float BLI_heap_node_value(const HeapNode *node)
{
return node->value;
}
void *BLI_heap_node_ptr(HeapNode *node)
void *BLI_heap_node_ptr(const HeapNode *node)
{
return node->ptr;
}
static bool heap_is_minheap(const Heap *heap, uint root)
{
if (root < heap->size) {
const uint l = HEAP_LEFT(root);
if (l < heap->size) {
if (HEAP_COMPARE(heap->tree[l], heap->tree[root]) || !heap_is_minheap(heap, l)) {
return false;
}
}
const uint r = HEAP_RIGHT(root);
if (r < heap->size) {
if (HEAP_COMPARE(heap->tree[r], heap->tree[root]) || !heap_is_minheap(heap, r)) {
return false;
}
}
}
return true;
}
/**
* Only for checking internal errors (gtest).
*/
bool BLI_heap_is_valid(const Heap *heap)
{
return heap_is_minheap(heap, 0);
}
/** \} */

View File

@@ -1294,7 +1294,7 @@ static void bvhtree_overlap_task_cb(void *userdata, const int j)
}
BVHTreeOverlap *BLI_bvhtree_overlap(
const BVHTree *tree1, const BVHTree *tree2, unsigned int *r_overlap_tot,
const BVHTree *tree1, const BVHTree *tree2, uint *r_overlap_tot,
/* optional callback to test the overlap before adding (must be thread-safe!) */
BVHTree_OverlapCallback callback, void *userdata)
{
@@ -1351,13 +1351,13 @@ BVHTreeOverlap *BLI_bvhtree_overlap(
to = overlap = MEM_mallocN(sizeof(BVHTreeOverlap) * total, "BVHTreeOverlap");
for (j = 0; j < thread_num; j++) {
unsigned int count = (unsigned int)BLI_stack_count(data[j].overlap);
uint count = (uint)BLI_stack_count(data[j].overlap);
BLI_stack_pop_n(data[j].overlap, to, count);
BLI_stack_free(data[j].overlap);
to += count;
}
*r_overlap_tot = (unsigned int)total;
*r_overlap_tot = (uint)total;
return overlap;
}

View File

@@ -33,25 +33,25 @@
#include "BLI_strict_flags.h"
typedef struct KDTreeNode_head {
unsigned int left, right;
uint left, right;
float co[3];
int index;
} KDTreeNode_head;
typedef struct KDTreeNode {
unsigned int left, right;
uint left, right;
float co[3];
int index;
unsigned int d; /* range is only (0-2) */
uint d; /* range is only (0-2) */
} KDTreeNode;
struct KDTree {
KDTreeNode *nodes;
unsigned int totnode;
unsigned int root;
uint totnode;
uint root;
#ifdef DEBUG
bool is_balanced; /* ensure we call balance first */
unsigned int maxsize; /* max size of the tree */
uint maxsize; /* max size of the tree */
#endif
};
@@ -59,12 +59,12 @@ struct KDTree {
#define KD_NEAR_ALLOC_INC 100 /* alloc increment for collecting nearest */
#define KD_FOUND_ALLOC_INC 50 /* alloc increment for collecting nearest */
#define KD_NODE_UNSET ((unsigned int)-1)
#define KD_NODE_UNSET ((uint)-1)
/**
* Creates or free a kdtree
*/
KDTree *BLI_kdtree_new(unsigned int maxsize)
KDTree *BLI_kdtree_new(uint maxsize)
{
KDTree *tree;
@@ -113,11 +113,11 @@ void BLI_kdtree_insert(KDTree *tree, int index, const float co[3])
#endif
}
static unsigned int kdtree_balance(KDTreeNode *nodes, unsigned int totnode, unsigned int axis, const unsigned int ofs)
static uint kdtree_balance(KDTreeNode *nodes, uint totnode, uint axis, const uint ofs)
{
KDTreeNode *node;
float co;
unsigned int left, right, median, i, j;
uint left, right, median, i, j;
if (totnode <= 0)
return KD_NODE_UNSET;
@@ -188,11 +188,11 @@ static float squared_distance(const float v2[3], const float v1[3], const float
return dist;
}
static unsigned int *realloc_nodes(unsigned int *stack, unsigned int *totstack, const bool is_alloc)
static uint *realloc_nodes(uint *stack, uint *totstack, const bool is_alloc)
{
unsigned int *stack_new = MEM_mallocN((*totstack + KD_NEAR_ALLOC_INC) * sizeof(unsigned int), "KDTree.treestack");
memcpy(stack_new, stack, *totstack * sizeof(unsigned int));
// memset(stack_new + *totstack, 0, sizeof(unsigned int) * KD_NEAR_ALLOC_INC);
uint *stack_new = MEM_mallocN((*totstack + KD_NEAR_ALLOC_INC) * sizeof(uint), "KDTree.treestack");
memcpy(stack_new, stack, *totstack * sizeof(uint));
// memset(stack_new + *totstack, 0, sizeof(uint) * KD_NEAR_ALLOC_INC);
if (is_alloc)
MEM_freeN(stack);
*totstack += KD_NEAR_ALLOC_INC;
@@ -208,9 +208,9 @@ int BLI_kdtree_find_nearest(
{
const KDTreeNode *nodes = tree->nodes;
const KDTreeNode *root, *min_node;
unsigned int *stack, defaultstack[KD_STACK_INIT];
uint *stack, defaultstack[KD_STACK_INIT];
float min_dist, cur_dist;
unsigned int totstack, cur = 0;
uint totstack, cur = 0;
#ifdef DEBUG
BLI_assert(tree->is_balanced == true);
@@ -307,9 +307,9 @@ int BLI_kdtree_find_nearest_cb(
const KDTreeNode *nodes = tree->nodes;
const KDTreeNode *min_node = NULL;
unsigned int *stack, defaultstack[KD_STACK_INIT];
uint *stack, defaultstack[KD_STACK_INIT];
float min_dist = FLT_MAX, cur_dist;
unsigned int totstack, cur = 0;
uint totstack, cur = 0;
#ifdef DEBUG
BLI_assert(tree->is_balanced == true);
@@ -397,10 +397,10 @@ finally:
}
}
static void add_nearest(KDTreeNearest *ptn, unsigned int *found, unsigned int n, int index,
static void add_nearest(KDTreeNearest *ptn, uint *found, uint n, int index,
float dist, const float *co)
{
unsigned int i;
uint i;
if (*found < n) (*found)++;
@@ -425,14 +425,14 @@ static void add_nearest(KDTreeNearest *ptn, unsigned int *found, unsigned int n,
int BLI_kdtree_find_nearest_n__normal(
const KDTree *tree, const float co[3], const float nor[3],
KDTreeNearest r_nearest[],
unsigned int n)
uint n)
{
const KDTreeNode *nodes = tree->nodes;
const KDTreeNode *root;
unsigned int *stack, defaultstack[KD_STACK_INIT];
uint *stack, defaultstack[KD_STACK_INIT];
float cur_dist;
unsigned int totstack, cur = 0;
unsigned int i, found = 0;
uint totstack, cur = 0;
uint i, found = 0;
#ifdef DEBUG
BLI_assert(tree->is_balanced == true);
@@ -524,8 +524,8 @@ static int range_compare(const void *a, const void *b)
}
static void add_in_range(
KDTreeNearest **r_foundstack,
unsigned int *r_foundstack_tot_alloc,
unsigned int found,
uint *r_foundstack_tot_alloc,
uint found,
const int index, const float dist, const float *co)
{
KDTreeNearest *to;
@@ -554,10 +554,10 @@ int BLI_kdtree_range_search__normal(
KDTreeNearest **r_nearest, float range)
{
const KDTreeNode *nodes = tree->nodes;
unsigned int *stack, defaultstack[KD_STACK_INIT];
uint *stack, defaultstack[KD_STACK_INIT];
KDTreeNearest *foundstack = NULL;
float range_sq = range * range, dist_sq;
unsigned int totstack, cur = 0, found = 0, totfoundstack = 0;
uint totstack, cur = 0, found = 0, totfoundstack = 0;
#ifdef DEBUG
BLI_assert(tree->is_balanced == true);
@@ -624,9 +624,9 @@ void BLI_kdtree_range_search_cb(
{
const KDTreeNode *nodes = tree->nodes;
unsigned int *stack, defaultstack[KD_STACK_INIT];
uint *stack, defaultstack[KD_STACK_INIT];
float range_sq = range * range, dist_sq;
unsigned int totstack, cur = 0;
uint totstack, cur = 0;
#ifdef DEBUG
BLI_assert(tree->is_balanced == true);

View File

@@ -127,17 +127,17 @@ struct BLI_mempool {
* this is needed for iteration so we can loop over chunks in the order added */
BLI_mempool_chunk *chunk_tail;
unsigned int esize; /* element size in bytes */
unsigned int csize; /* chunk size in bytes */
unsigned int pchunk; /* number of elements per chunk */
unsigned int flag;
uint esize; /* element size in bytes */
uint csize; /* chunk size in bytes */
uint pchunk; /* number of elements per chunk */
uint flag;
/* keeps aligned to 16 bits */
BLI_freenode *free; /* free element list. Interleaved into chunk datas. */
unsigned int maxchunks; /* use to know how many chunks to keep for BLI_mempool_clear */
unsigned int totused; /* number of elements currently in use */
uint maxchunks; /* use to know how many chunks to keep for BLI_mempool_clear */
uint totused; /* number of elements currently in use */
#ifdef USE_TOTALLOC
unsigned int totalloc; /* number of elements allocated in total */
uint totalloc; /* number of elements allocated in total */
#endif
};
@@ -154,13 +154,13 @@ struct BLI_mempool {
/* extra bytes implicitly used for every chunk alloc */
#ifdef USE_DATA_PTR
# define CHUNK_OVERHEAD (unsigned int)(MEM_SIZE_OVERHEAD + sizeof(BLI_mempool_chunk))
# define CHUNK_OVERHEAD (uint)(MEM_SIZE_OVERHEAD + sizeof(BLI_mempool_chunk))
#else
# define CHUNK_OVERHEAD (unsigned int)(MEM_SIZE_OVERHEAD)
# define CHUNK_OVERHEAD (uint)(MEM_SIZE_OVERHEAD)
#endif
#ifdef USE_CHUNK_POW2
static unsigned int power_of_2_max_u(unsigned int x)
static uint power_of_2_max_u(uint x)
{
x -= 1;
x = x | (x >> 1);
@@ -172,7 +172,7 @@ static unsigned int power_of_2_max_u(unsigned int x)
}
#endif
BLI_INLINE BLI_mempool_chunk *mempool_chunk_find(BLI_mempool_chunk *head, unsigned int index)
BLI_INLINE BLI_mempool_chunk *mempool_chunk_find(BLI_mempool_chunk *head, uint index)
{
while (index-- && head) {
head = head->next;
@@ -187,7 +187,7 @@ BLI_INLINE BLI_mempool_chunk *mempool_chunk_find(BLI_mempool_chunk *head, unsign
* adding overhead on creation which is redundant if they aren't used.
*
*/
BLI_INLINE unsigned int mempool_maxchunks(const unsigned int totelem, const unsigned int pchunk)
BLI_INLINE uint mempool_maxchunks(const uint totelem, const uint pchunk)
{
return (totelem <= pchunk) ? 1 : ((totelem / pchunk) + 1);
}
@@ -217,9 +217,9 @@ static BLI_mempool_chunk *mempool_chunk_alloc(BLI_mempool *pool)
static BLI_freenode *mempool_chunk_add(BLI_mempool *pool, BLI_mempool_chunk *mpchunk,
BLI_freenode *lasttail)
{
const unsigned int esize = pool->esize;
const uint esize = pool->esize;
BLI_freenode *curnode = CHUNK_DATA(mpchunk);
unsigned int j;
uint j;
/* append */
if (pool->chunk_tail) {
@@ -289,12 +289,12 @@ static void mempool_chunk_free_all(BLI_mempool_chunk *mpchunk)
}
}
BLI_mempool *BLI_mempool_create(unsigned int esize, unsigned int totelem,
unsigned int pchunk, unsigned int flag)
BLI_mempool *BLI_mempool_create(uint esize, uint totelem,
uint pchunk, uint flag)
{
BLI_mempool *pool;
BLI_freenode *lasttail = NULL;
unsigned int i, maxchunks;
uint i, maxchunks;
/* allocate the pool structure */
pool = MEM_mallocN(sizeof(BLI_mempool), "memory pool");
@@ -305,7 +305,7 @@ BLI_mempool *BLI_mempool_create(unsigned int esize, unsigned int totelem,
}
if (flag & BLI_MEMPOOL_ALLOW_ITER) {
esize = MAX2(esize, (unsigned int)sizeof(BLI_freenode));
esize = MAX2(esize, (uint)sizeof(BLI_freenode));
}
maxchunks = mempool_maxchunks(totelem, pchunk);
@@ -436,9 +436,9 @@ void BLI_mempool_free(BLI_mempool *pool, void *addr)
if (UNLIKELY(pool->totused == 0) &&
(pool->chunks->next))
{
const unsigned int esize = pool->esize;
const uint esize = pool->esize;
BLI_freenode *curnode;
unsigned int j;
uint j;
BLI_mempool_chunk *first;
first = pool->chunks;
@@ -477,7 +477,7 @@ int BLI_mempool_count(BLI_mempool *pool)
return (int)pool->totused;
}
void *BLI_mempool_findelem(BLI_mempool *pool, unsigned int index)
void *BLI_mempool_findelem(BLI_mempool *pool, uint index)
{
BLI_assert(pool->flag & BLI_MEMPOOL_ALLOW_ITER);
@@ -512,7 +512,7 @@ void BLI_mempool_as_table(BLI_mempool *pool, void **data)
while ((elem = BLI_mempool_iterstep(&iter))) {
*p++ = elem;
}
BLI_assert((unsigned int)(p - data) == pool->totused);
BLI_assert((uint)(p - data) == pool->totused);
}
/**
@@ -530,7 +530,7 @@ void **BLI_mempool_as_tableN(BLI_mempool *pool, const char *allocstr)
*/
void BLI_mempool_as_array(BLI_mempool *pool, void *data)
{
const unsigned int esize = pool->esize;
const uint esize = pool->esize;
BLI_mempool_iter iter;
char *elem, *p = data;
BLI_assert(pool->flag & BLI_MEMPOOL_ALLOW_ITER);
@@ -539,7 +539,7 @@ void BLI_mempool_as_array(BLI_mempool *pool, void *data)
memcpy(p, elem, (size_t)esize);
p = NODE_STEP_NEXT(p);
}
BLI_assert((unsigned int)(p - (char *)data) == pool->totused * esize);
BLI_assert((uint)(p - (char *)data) == pool->totused * esize);
}
/**
@@ -609,7 +609,7 @@ void *BLI_mempool_iterstep(BLI_mempool_iter *iter)
return NULL;
}
const unsigned int esize = iter->pool->esize;
const uint esize = iter->pool->esize;
BLI_freenode *curnode = POINTER_OFFSET(CHUNK_DATA(iter->curchunk), (esize * iter->curindex));
BLI_freenode *ret;
do {
@@ -643,7 +643,7 @@ void BLI_mempool_clear_ex(BLI_mempool *pool, const int totelem_reserve)
{
BLI_mempool_chunk *mpchunk;
BLI_mempool_chunk *mpchunk_next;
unsigned int maxchunks;
uint maxchunks;
BLI_mempool_chunk *chunks_temp;
BLI_freenode *lasttail = NULL;
@@ -657,7 +657,7 @@ void BLI_mempool_clear_ex(BLI_mempool *pool, const int totelem_reserve)
maxchunks = pool->maxchunks;
}
else {
maxchunks = mempool_maxchunks((unsigned int)totelem_reserve, pool->pchunk);
maxchunks = mempool_maxchunks((uint)totelem_reserve, pool->pchunk);
}
/* free all after pool->maxchunks */

View File

@@ -763,7 +763,7 @@ static void bchunk_list_fill_from_array(
BLI_INLINE uint hash_data_single(const uchar p)
{
return (HASH_INIT << 5) + HASH_INIT + (unsigned int)p;
return ((HASH_INIT << 5) + HASH_INIT) + (unsigned int)(*((signed char *)&p));
}
/* hash bytes, from BLI_ghashutil_strhash_n */
@@ -773,7 +773,7 @@ static uint hash_data(const uchar *key, size_t n)
unsigned int h = HASH_INIT;
for (p = (const signed char *)key; n--; p++) {
h = (h << 5) + h + (unsigned int)*p;
h = ((h << 5) + h) + (unsigned int)*p;
}
return h;

View File

@@ -56,10 +56,10 @@ typedef struct BoxVert {
float x;
float y;
int free : 8; /* vert status */
unsigned int used : 1;
unsigned int _pad : 23;
unsigned int index;
int free : 8; /* vert status */
uint used : 1;
uint _pad : 23;
uint index;
struct BoxPack *trb; /* top right box */
struct BoxPack *blb; /* bottom left box */
@@ -92,7 +92,7 @@ typedef struct BoxVert {
#define BRF 8
#define CORNERFLAGS (BLF | TRF | TLF | BRF)
BLI_INLINE int quad_flag(unsigned int q)
BLI_INLINE int quad_flag(uint q)
{
BLI_assert(q < 4);
return (1 << q);
@@ -241,8 +241,8 @@ static int vertex_sort(const void *p1, const void *p2, void *vs_ctx_p)
const BoxVert *v1, *v2;
float a1, a2;
v1 = &vs_ctx->vertarray[*((const unsigned int *)p1)];
v2 = &vs_ctx->vertarray[*((const unsigned int *)p2)];
v1 = &vs_ctx->vertarray[*((const uint *)p1)];
v2 = &vs_ctx->vertarray[*((const uint *)p2)];
#ifdef USE_FREE_STRIP
/* push free verts to the end so we can strip */
@@ -280,10 +280,10 @@ static int vertex_sort(const void *p1, const void *p2, void *vs_ctx_p)
* \param len: the number of boxes in the array.
* \param r_tot_x, r_tot_y: set so you can normalize the data.
* */
void BLI_box_pack_2d(BoxPack *boxarray, const unsigned int len, float *r_tot_x, float *r_tot_y)
void BLI_box_pack_2d(BoxPack *boxarray, const uint len, float *r_tot_x, float *r_tot_y)
{
unsigned int box_index, verts_pack_len, i, j, k;
unsigned int *vertex_pack_indices; /* an array of indices used for sorting verts */
uint box_index, verts_pack_len, i, j, k;
uint *vertex_pack_indices; /* an array of indices used for sorting verts */
bool isect;
float tot_x = 0.0f, tot_y = 0.0f;

View File

@@ -42,7 +42,7 @@
#include "BLI_strict_flags.h"
/**************inlined code************/
static const unsigned int _ehash_hashsizes[] = {
static const uint _ehash_hashsizes[] = {
1, 3, 5, 11, 17, 37, 67, 131, 257, 521, 1031, 2053, 4099, 8209,
16411, 32771, 65537, 131101, 262147, 524309, 1048583, 2097169,
4194319, 8388617, 16777259, 33554467, 67108879, 134217757,
@@ -62,22 +62,22 @@ static const unsigned int _ehash_hashsizes[] = {
/* ensure v0 is smaller */
#define EDGE_ORD(v0, v1) \
if (v0 > v1) { \
SWAP(unsigned int, v0, v1); \
SWAP(uint, v0, v1); \
} (void)0
/***/
typedef struct EdgeEntry {
struct EdgeEntry *next;
unsigned int v0, v1;
uint v0, v1;
void *val;
} EdgeEntry;
struct EdgeHash {
EdgeEntry **buckets;
BLI_mempool *epool;
unsigned int nbuckets, nentries;
unsigned int cursize, flag;
uint nbuckets, nentries;
uint cursize, flag;
};
@@ -90,7 +90,7 @@ struct EdgeHash {
/**
* Compute the hash and get the bucket-index.
*/
BLI_INLINE unsigned int edgehash_bucket_index(EdgeHash *eh, unsigned int v0, unsigned int v1)
BLI_INLINE uint edgehash_bucket_index(EdgeHash *eh, uint v0, uint v1)
{
BLI_assert(v0 < v1);
@@ -100,7 +100,7 @@ BLI_INLINE unsigned int edgehash_bucket_index(EdgeHash *eh, unsigned int v0, uns
/**
* Check if the number of items in the GHash is large enough to require more buckets.
*/
BLI_INLINE bool edgehash_test_expand_buckets(const unsigned int nentries, const unsigned int nbuckets)
BLI_INLINE bool edgehash_test_expand_buckets(const uint nentries, const uint nbuckets)
{
return (nentries > nbuckets * 3);
}
@@ -108,12 +108,12 @@ BLI_INLINE bool edgehash_test_expand_buckets(const unsigned int nentries, const
/**
* Expand buckets to the next size up.
*/
BLI_INLINE void edgehash_resize_buckets(EdgeHash *eh, const unsigned int nbuckets)
BLI_INLINE void edgehash_resize_buckets(EdgeHash *eh, const uint nbuckets)
{
EdgeEntry **buckets_old = eh->buckets;
EdgeEntry **buckets_new;
const unsigned int nbuckets_old = eh->nbuckets;
unsigned int i;
const uint nbuckets_old = eh->nbuckets;
uint i;
BLI_assert(eh->nbuckets != nbuckets);
@@ -136,7 +136,7 @@ BLI_INLINE void edgehash_resize_buckets(EdgeHash *eh, const unsigned int nbucket
/**
* Increase initial bucket size to match a reserved amount.
*/
BLI_INLINE void edgehash_buckets_reserve(EdgeHash *eh, const unsigned int nentries_reserve)
BLI_INLINE void edgehash_buckets_reserve(EdgeHash *eh, const uint nentries_reserve)
{
while (edgehash_test_expand_buckets(nentries_reserve, eh->nbuckets)) {
eh->nbuckets = _ehash_hashsizes[++eh->cursize];
@@ -148,8 +148,8 @@ BLI_INLINE void edgehash_buckets_reserve(EdgeHash *eh, const unsigned int nentri
* Takes a \a bucket_index argument to avoid calling #edgehash_bucket_index multiple times.
*/
BLI_INLINE EdgeEntry *edgehash_lookup_entry_ex(
EdgeHash *eh, unsigned int v0, unsigned int v1,
const unsigned int bucket_index)
EdgeHash *eh, uint v0, uint v1,
const uint bucket_index)
{
EdgeEntry *e;
BLI_assert(v0 < v1);
@@ -167,8 +167,8 @@ BLI_INLINE EdgeEntry *edgehash_lookup_entry_ex(
* Useful when modifying buckets somehow (like removing an entry...).
*/
BLI_INLINE EdgeEntry *edgehash_lookup_entry_prev_ex(
EdgeHash *eh, unsigned int v0, unsigned int v1,
EdgeEntry **r_e_prev, const unsigned int bucket_index)
EdgeHash *eh, uint v0, uint v1,
EdgeEntry **r_e_prev, const uint bucket_index)
{
BLI_assert(v0 < v1);
for (EdgeEntry *e_prev = NULL, *e = eh->buckets[bucket_index]; e; e_prev = e, e = e->next) {
@@ -185,17 +185,17 @@ BLI_INLINE EdgeEntry *edgehash_lookup_entry_prev_ex(
/**
* Internal lookup function. Only wraps #edgehash_lookup_entry_ex
*/
BLI_INLINE EdgeEntry *edgehash_lookup_entry(EdgeHash *eh, unsigned int v0, unsigned int v1)
BLI_INLINE EdgeEntry *edgehash_lookup_entry(EdgeHash *eh, uint v0, uint v1)
{
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index(eh, v0, v1);
const uint bucket_index = edgehash_bucket_index(eh, v0, v1);
return edgehash_lookup_entry_ex(eh, v0, v1, bucket_index);
}
static EdgeHash *edgehash_new(const char *info,
const unsigned int nentries_reserve,
const unsigned int entry_size)
const uint nentries_reserve,
const uint entry_size)
{
EdgeHash *eh = MEM_mallocN(sizeof(*eh), info);
@@ -220,8 +220,8 @@ static EdgeHash *edgehash_new(const char *info,
* Takes a \a bucket_index argument to avoid calling #edgehash_bucket_index multiple times.
*/
BLI_INLINE void edgehash_insert_ex(
EdgeHash *eh, unsigned int v0, unsigned int v1, void *val,
const unsigned int bucket_index)
EdgeHash *eh, uint v0, uint v1, void *val,
const uint bucket_index)
{
EdgeEntry *e = BLI_mempool_alloc(eh->epool);
@@ -247,8 +247,8 @@ BLI_INLINE void edgehash_insert_ex(
* Insert function that doesn't set the value (use for EdgeSet)
*/
BLI_INLINE void edgehash_insert_ex_keyonly(
EdgeHash *eh, unsigned int v0, unsigned int v1,
const unsigned int bucket_index)
EdgeHash *eh, uint v0, uint v1,
const uint bucket_index)
{
EdgeEntry *e = BLI_mempool_alloc(eh->epool);
@@ -272,8 +272,8 @@ BLI_INLINE void edgehash_insert_ex_keyonly(
* Insert function that doesn't set the value (use for EdgeSet)
*/
BLI_INLINE void edgehash_insert_ex_keyonly_entry(
EdgeHash *eh, unsigned int v0, unsigned int v1,
const unsigned int bucket_index,
EdgeHash *eh, uint v0, uint v1,
const uint bucket_index,
EdgeEntry *e)
{
BLI_assert((eh->flag & EDGEHASH_FLAG_ALLOW_DUPES) || (BLI_edgehash_haskey(eh, v0, v1) == 0));
@@ -293,10 +293,10 @@ BLI_INLINE void edgehash_insert_ex_keyonly_entry(
}
}
BLI_INLINE void edgehash_insert(EdgeHash *eh, unsigned int v0, unsigned int v1, void *val)
BLI_INLINE void edgehash_insert(EdgeHash *eh, uint v0, uint v1, void *val)
{
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index(eh, v0, v1);
const uint bucket_index = edgehash_bucket_index(eh, v0, v1);
edgehash_insert_ex(eh, v0, v1, val, bucket_index);
}
@@ -304,9 +304,9 @@ BLI_INLINE void edgehash_insert(EdgeHash *eh, unsigned int v0, unsigned int v1,
* Remove the entry and return it, caller must free from eh->epool.
*/
BLI_INLINE EdgeEntry *edgehash_remove_ex(
EdgeHash *eh, unsigned int v0, unsigned int v1,
EdgeHash *eh, uint v0, uint v1,
EdgeHashFreeFP valfreefp,
const unsigned int bucket_index)
const uint bucket_index)
{
EdgeEntry *e_prev;
EdgeEntry *e = edgehash_lookup_entry_prev_ex(eh, v0, v1, &e_prev, bucket_index);
@@ -339,7 +339,7 @@ BLI_INLINE EdgeEntry *edgehash_remove_ex(
*/
static void edgehash_free_cb(EdgeHash *eh, EdgeHashFreeFP valfreefp)
{
unsigned int i;
uint i;
BLI_assert(valfreefp);
@@ -365,7 +365,7 @@ static void edgehash_free_cb(EdgeHash *eh, EdgeHashFreeFP valfreefp)
/* Public API */
EdgeHash *BLI_edgehash_new_ex(const char *info,
const unsigned int nentries_reserve)
const uint nentries_reserve)
{
return edgehash_new(info,
nentries_reserve,
@@ -381,7 +381,7 @@ EdgeHash *BLI_edgehash_new(const char *info)
* Insert edge (\a v0, \a v1) into hash with given value, does
* not check for duplicates.
*/
void BLI_edgehash_insert(EdgeHash *eh, unsigned int v0, unsigned int v1, void *val)
void BLI_edgehash_insert(EdgeHash *eh, uint v0, uint v1, void *val)
{
edgehash_insert(eh, v0, v1, val);
}
@@ -389,12 +389,12 @@ void BLI_edgehash_insert(EdgeHash *eh, unsigned int v0, unsigned int v1, void *v
/**
* Assign a new value to a key that may already be in edgehash.
*/
bool BLI_edgehash_reinsert(EdgeHash *eh, unsigned int v0, unsigned int v1, void *val)
bool BLI_edgehash_reinsert(EdgeHash *eh, uint v0, uint v1, void *val)
{
IS_EDGEHASH_ASSERT(eh);
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index(eh, v0, v1);
const uint bucket_index = edgehash_bucket_index(eh, v0, v1);
EdgeEntry *e = edgehash_lookup_entry_ex(eh, v0, v1, bucket_index);
if (e) {
@@ -411,7 +411,7 @@ bool BLI_edgehash_reinsert(EdgeHash *eh, unsigned int v0, unsigned int v1, void
* Return pointer to value for given edge (\a v0, \a v1),
* or NULL if key does not exist in hash.
*/
void **BLI_edgehash_lookup_p(EdgeHash *eh, unsigned int v0, unsigned int v1)
void **BLI_edgehash_lookup_p(EdgeHash *eh, uint v0, uint v1)
{
EdgeEntry *e = edgehash_lookup_entry(eh, v0, v1);
IS_EDGEHASH_ASSERT(eh);
@@ -432,10 +432,10 @@ void **BLI_edgehash_lookup_p(EdgeHash *eh, unsigned int v0, unsigned int v1)
* \returns true when the value didn't need to be added.
* (when false, the caller _must_ initialize the value).
*/
bool BLI_edgehash_ensure_p(EdgeHash *eh, unsigned int v0, unsigned int v1, void ***r_val)
bool BLI_edgehash_ensure_p(EdgeHash *eh, uint v0, uint v1, void ***r_val)
{
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index(eh, v0, v1);
const uint bucket_index = edgehash_bucket_index(eh, v0, v1);
EdgeEntry *e = edgehash_lookup_entry_ex(eh, v0, v1, bucket_index);
const bool haskey = (e != NULL);
@@ -454,7 +454,7 @@ bool BLI_edgehash_ensure_p(EdgeHash *eh, unsigned int v0, unsigned int v1, void
* to differentiate between key-value being NULL and
* lack of key then see BLI_edgehash_lookup_p().
*/
void *BLI_edgehash_lookup(EdgeHash *eh, unsigned int v0, unsigned int v1)
void *BLI_edgehash_lookup(EdgeHash *eh, uint v0, uint v1)
{
EdgeEntry *e = edgehash_lookup_entry(eh, v0, v1);
IS_EDGEHASH_ASSERT(eh);
@@ -464,7 +464,7 @@ void *BLI_edgehash_lookup(EdgeHash *eh, unsigned int v0, unsigned int v1)
/**
* A version of #BLI_edgehash_lookup which accepts a fallback argument.
*/
void *BLI_edgehash_lookup_default(EdgeHash *eh, unsigned int v0, unsigned int v1, void *val_default)
void *BLI_edgehash_lookup_default(EdgeHash *eh, uint v0, uint v1, void *val_default)
{
EdgeEntry *e = edgehash_lookup_entry(eh, v0, v1);
IS_EDGEHASH_ASSERT(eh);
@@ -478,10 +478,10 @@ void *BLI_edgehash_lookup_default(EdgeHash *eh, unsigned int v0, unsigned int v1
* \param valfreefp Optional callback to free the value.
* \return true if \a key was removed from \a eh.
*/
bool BLI_edgehash_remove(EdgeHash *eh, unsigned int v0, unsigned int v1, EdgeHashFreeFP valfreefp)
bool BLI_edgehash_remove(EdgeHash *eh, uint v0, uint v1, EdgeHashFreeFP valfreefp)
{
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index(eh, v0, v1);
const uint bucket_index = edgehash_bucket_index(eh, v0, v1);
EdgeEntry *e = edgehash_remove_ex(eh, v0, v1, valfreefp, bucket_index);
if (e) {
BLI_mempool_free(eh->epool, e);
@@ -500,10 +500,10 @@ bool BLI_edgehash_remove(EdgeHash *eh, unsigned int v0, unsigned int v1, EdgeHas
* \param v0, v1: The key to remove.
* \return the value of \a key int \a eh or NULL.
*/
void *BLI_edgehash_popkey(EdgeHash *eh, unsigned int v0, unsigned int v1)
void *BLI_edgehash_popkey(EdgeHash *eh, uint v0, uint v1)
{
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index(eh, v0, v1);
const uint bucket_index = edgehash_bucket_index(eh, v0, v1);
EdgeEntry *e = edgehash_remove_ex(eh, v0, v1, NULL, bucket_index);
IS_EDGEHASH_ASSERT(eh);
if (e) {
@@ -519,7 +519,7 @@ void *BLI_edgehash_popkey(EdgeHash *eh, unsigned int v0, unsigned int v1)
/**
* Return boolean true/false if edge (v0,v1) in hash.
*/
bool BLI_edgehash_haskey(EdgeHash *eh, unsigned int v0, unsigned int v1)
bool BLI_edgehash_haskey(EdgeHash *eh, uint v0, uint v1)
{
return (edgehash_lookup_entry(eh, v0, v1) != NULL);
}
@@ -536,7 +536,7 @@ int BLI_edgehash_size(EdgeHash *eh)
* Remove all edges from hash.
*/
void BLI_edgehash_clear_ex(EdgeHash *eh, EdgeHashFreeFP valfreefp,
const unsigned int nentries_reserve)
const uint nentries_reserve)
{
if (valfreefp)
edgehash_free_cb(eh, valfreefp);
@@ -577,12 +577,12 @@ void BLI_edgehash_free(EdgeHash *eh, EdgeHashFreeFP valfreefp)
}
void BLI_edgehash_flag_set(EdgeHash *eh, unsigned int flag)
void BLI_edgehash_flag_set(EdgeHash *eh, uint flag)
{
eh->flag |= flag;
}
void BLI_edgehash_flag_clear(EdgeHash *eh, unsigned int flag)
void BLI_edgehash_flag_clear(EdgeHash *eh, uint flag)
{
eh->flag &= ~flag;
}
@@ -664,7 +664,7 @@ void BLI_edgehashIterator_free(EdgeHashIterator *ehi)
/**
* Retrieve the key from an iterator.
*/
void BLI_edgehashIterator_getKey(EdgeHashIterator *ehi, unsigned int *r_v0, unsigned int *r_v1)
void BLI_edgehashIterator_getKey(EdgeHashIterator *ehi, uint *r_v0, uint *r_v1)
{
*r_v0 = ehi->curEntry->v0;
*r_v1 = ehi->curEntry->v1;
@@ -713,7 +713,7 @@ bool BLI_edgehashIterator_isDone(EdgeHashIterator *ehi)
/** \name EdgeSet Functions
* \{ */
EdgeSet *BLI_edgeset_new_ex(const char *info,
const unsigned int nentries_reserve)
const uint nentries_reserve)
{
EdgeSet *es = (EdgeSet *)edgehash_new(info,
nentries_reserve,
@@ -738,10 +738,10 @@ int BLI_edgeset_size(EdgeSet *es)
* Adds the key to the set (no checks for unique keys!).
* Matching #BLI_edgehash_insert
*/
void BLI_edgeset_insert(EdgeSet *es, unsigned int v0, unsigned int v1)
void BLI_edgeset_insert(EdgeSet *es, uint v0, uint v1)
{
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index((EdgeHash *)es, v0, v1);
const uint bucket_index = edgehash_bucket_index((EdgeHash *)es, v0, v1);
edgehash_insert_ex_keyonly((EdgeHash *)es, v0, v1, bucket_index);
}
@@ -751,10 +751,10 @@ void BLI_edgeset_insert(EdgeSet *es, unsigned int v0, unsigned int v1)
*
* \note EdgeHash has no equivalent to this because typically the value would be different.
*/
bool BLI_edgeset_add(EdgeSet *es, unsigned int v0, unsigned int v1)
bool BLI_edgeset_add(EdgeSet *es, uint v0, uint v1)
{
EDGE_ORD(v0, v1);
const unsigned int bucket_index = edgehash_bucket_index((EdgeHash *)es, v0, v1);
const uint bucket_index = edgehash_bucket_index((EdgeHash *)es, v0, v1);
EdgeEntry *e = edgehash_lookup_entry_ex((EdgeHash *)es, v0, v1, bucket_index);
if (e) {
@@ -766,7 +766,7 @@ bool BLI_edgeset_add(EdgeSet *es, unsigned int v0, unsigned int v1)
}
}
bool BLI_edgeset_haskey(EdgeSet *es, unsigned int v0, unsigned int v1)
bool BLI_edgeset_haskey(EdgeSet *es, uint v0, uint v1)
{
return (edgehash_lookup_entry((EdgeHash *)es, v0, v1) != NULL);
}
@@ -777,12 +777,12 @@ void BLI_edgeset_free(EdgeSet *es)
BLI_edgehash_free((EdgeHash *)es, NULL);
}
void BLI_edgeset_flag_set(EdgeSet *es, unsigned int flag)
void BLI_edgeset_flag_set(EdgeSet *es, uint flag)
{
((EdgeHash *)es)->flag |= flag;
}
void BLI_edgeset_flag_clear(EdgeSet *es, unsigned int flag)
void BLI_edgeset_flag_clear(EdgeSet *es, uint flag)
{
((EdgeHash *)es)->flag &= ~flag;
}
@@ -800,7 +800,7 @@ void BLI_edgeset_flag_clear(EdgeSet *es, unsigned int flag)
double BLI_edgehash_calc_quality(EdgeHash *eh)
{
uint64_t sum = 0;
unsigned int i;
uint i;
if (eh->nentries == 0)
return -1.0;

View File

@@ -317,7 +317,7 @@ static bool delete_recursive(const char *dir)
{
struct direntry *filelist, *fl;
bool err = false;
unsigned int nbr, i;
uint nbr, i;
i = nbr = BLI_filelist_dir_contents(dir, &filelist);
fl = filelist;

View File

@@ -85,13 +85,13 @@ static bool BLI_path_is_abs(const char *name);
*/
int BLI_stringdec(const char *string, char *head, char *tail, unsigned short *numlen)
{
unsigned int nums = 0, nume = 0;
uint nums = 0, nume = 0;
int i;
bool found_digit = false;
const char * const lslash = BLI_last_slash(string);
const unsigned int string_len = strlen(string);
const unsigned int lslash_len = lslash != NULL ? (int)(lslash - string) : 0;
unsigned int name_end = string_len;
const uint string_len = strlen(string);
const uint lslash_len = lslash != NULL ? (int)(lslash - string) : 0;
uint name_end = string_len;
while (name_end > lslash_len && string[--name_end] != '.') {} /* name ends at dot if present */
if (name_end == lslash_len && string[name_end] != '.') name_end = string_len;
@@ -700,7 +700,7 @@ bool BLI_parent_dir(char *path)
*/
static bool stringframe_chars(const char *path, int *char_start, int *char_end)
{
unsigned int ch_sta, ch_end, i;
uint ch_sta, ch_end, i;
/* Insert current frame: file### -> file001 */
ch_sta = ch_end = 0;
for (i = 0; path[i] != '\0'; i++) {

View File

@@ -219,23 +219,18 @@ static void polyedge_beauty_cost_update_single(
Heap *eheap, HeapNode **eheap_table)
{
const uint i = e->base_index;
if (eheap_table[i]) {
BLI_heap_remove(eheap, eheap_table[i]);
eheap_table[i] = NULL;
/* recalculate edge */
const float cost = polyedge_rotate_beauty_calc(coords, edges, e);
/* We can get cases where both choices generate very small negative costs, which leads to infinite loop.
* Anyway, costs above that are not worth recomputing, maybe we could even optimize it to a smaller limit?
* Actually, FLT_EPSILON is too small in some cases, 1e-6f seems to work OK hopefully?
* See T43578, T49478. */
if (cost < -1e-6f) {
BLI_heap_insert_or_update(eheap, &eheap_table[i], cost, e);
}
{
/* recalculate edge */
const float cost = polyedge_rotate_beauty_calc(coords, edges, e);
/* We can get cases where both choices generate very small negative costs, which leads to infinite loop.
* Anyway, costs above that are not worth recomputing, maybe we could even optimize it to a smaller limit?
* Actually, FLT_EPSILON is too small in some cases, 1e-6f seems to work OK hopefully?
* See T43578, T49478. */
if (cost < -1e-6f) {
eheap_table[i] = BLI_heap_insert(eheap, cost, e);
}
else {
else {
if (eheap_table[i]) {
BLI_heap_remove(eheap, eheap_table[i]);
eheap_table[i] = NULL;
}
}

View File

@@ -69,8 +69,8 @@
/* typically this re-assigns 'h' */
#define SMHASH_NEXT(h, hoff) ( \
CHECK_TYPE_INLINE(&(h), unsigned int *), \
CHECK_TYPE_INLINE(&(hoff), unsigned int *), \
CHECK_TYPE_INLINE(&(h), uint *), \
CHECK_TYPE_INLINE(&(hoff), uint *), \
((h) + (((hoff) = ((hoff) * 2) + 1), (hoff))) \
)
@@ -87,17 +87,17 @@ BLI_INLINE bool smallhash_val_is_used(const void *val)
#endif
}
extern const unsigned int hashsizes[];
extern const uint hashsizes[];
BLI_INLINE unsigned int smallhash_key(const uintptr_t key)
BLI_INLINE uint smallhash_key(const uintptr_t key)
{
return (unsigned int)key;
return (uint)key;
}
/**
* Check if the number of items in the smallhash is large enough to require more buckets.
*/
BLI_INLINE bool smallhash_test_expand_buckets(const unsigned int nentries, const unsigned int nbuckets)
BLI_INLINE bool smallhash_test_expand_buckets(const uint nentries, const uint nbuckets)
{
/* (approx * 1.5) */
return (nentries + (nentries >> 1)) > nbuckets;
@@ -105,7 +105,7 @@ BLI_INLINE bool smallhash_test_expand_buckets(const unsigned int nentries, const
BLI_INLINE void smallhash_init_empty(SmallHash *sh)
{
unsigned int i;
uint i;
for (i = 0; i < sh->nbuckets; i++) {
sh->buckets[i].key = SMHASH_KEY_UNUSED;
@@ -116,7 +116,7 @@ BLI_INLINE void smallhash_init_empty(SmallHash *sh)
/**
* Increase initial bucket size to match a reserved amount.
*/
BLI_INLINE void smallhash_buckets_reserve(SmallHash *sh, const unsigned int nentries_reserve)
BLI_INLINE void smallhash_buckets_reserve(SmallHash *sh, const uint nentries_reserve)
{
while (smallhash_test_expand_buckets(nentries_reserve, sh->nbuckets)) {
sh->nbuckets = hashsizes[++sh->cursize];
@@ -126,8 +126,8 @@ BLI_INLINE void smallhash_buckets_reserve(SmallHash *sh, const unsigned int nent
BLI_INLINE SmallHashEntry *smallhash_lookup(const SmallHash *sh, const uintptr_t key)
{
SmallHashEntry *e;
unsigned int h = smallhash_key(key);
unsigned int hoff = 1;
uint h = smallhash_key(key);
uint hoff = 1;
BLI_assert(key != SMHASH_KEY_UNUSED);
@@ -150,8 +150,8 @@ BLI_INLINE SmallHashEntry *smallhash_lookup(const SmallHash *sh, const uintptr_t
BLI_INLINE SmallHashEntry *smallhash_lookup_first_free(SmallHash *sh, const uintptr_t key)
{
SmallHashEntry *e;
unsigned int h = smallhash_key(key);
unsigned int hoff = 1;
uint h = smallhash_key(key);
uint hoff = 1;
for (e = &sh->buckets[h % sh->nbuckets];
smallhash_val_is_used(e->val);
@@ -163,12 +163,12 @@ BLI_INLINE SmallHashEntry *smallhash_lookup_first_free(SmallHash *sh, const uint
return e;
}
BLI_INLINE void smallhash_resize_buckets(SmallHash *sh, const unsigned int nbuckets)
BLI_INLINE void smallhash_resize_buckets(SmallHash *sh, const uint nbuckets)
{
SmallHashEntry *buckets_old = sh->buckets;
const unsigned int nbuckets_old = sh->nbuckets;
const uint nbuckets_old = sh->nbuckets;
const bool was_alloc = (buckets_old != sh->buckets_stack);
unsigned int i = 0;
uint i = 0;
BLI_assert(sh->nbuckets != nbuckets);
if (nbuckets <= SMSTACKSIZE) {
@@ -200,7 +200,7 @@ BLI_INLINE void smallhash_resize_buckets(SmallHash *sh, const unsigned int nbuck
}
void BLI_smallhash_init_ex(SmallHash *sh,
const unsigned int nentries_reserve)
const uint nentries_reserve)
{
/* assume 'sh' is uninitialized */
@@ -371,7 +371,7 @@ void **BLI_smallhash_iternew_p(const SmallHash *sh, SmallHashIter *iter, uintptr
#if 0
void BLI_smallhash_print(SmallHash *sh)
{
unsigned int i, linecol = 79, c = 0;
uint i, linecol = 79, c = 0;
printf("{");
for (i = 0; i < sh->nbuckets; i++) {
@@ -382,7 +382,7 @@ void BLI_smallhash_print(SmallHash *sh)
printf("--f-");
}
else {
printf("%2x", (unsigned int)sh->buckets[i].key);
printf("%2x", (uint)sh->buckets[i].key);
}
if (i != sh->nbuckets - 1)
@@ -410,7 +410,7 @@ void BLI_smallhash_print(SmallHash *sh)
double BLI_smallhash_calc_quality(SmallHash *sh)
{
uint64_t sum = 0;
unsigned int i;
uint i;
if (sh->nentries == 0)
return -1.0;
@@ -419,8 +419,8 @@ double BLI_smallhash_calc_quality(SmallHash *sh)
if (sh->buckets[i].key != SMHASH_KEY_UNUSED) {
uint64_t count = 0;
SmallHashEntry *e, *e_final = &sh->buckets[i];
unsigned int h = smallhash_key(e_final->key);
unsigned int hoff = 1;
uint h = smallhash_key(e_final->key);
uint hoff = 1;
for (e = &sh->buckets[h % sh->nbuckets];
e != e_final;

View File

@@ -51,7 +51,7 @@ typedef enum eStrCursorDelimType {
STRCUR_DELIM_OTHER
} eStrCursorDelimType;
static eStrCursorDelimType cursor_delim_type_unicode(const unsigned int uch)
static eStrCursorDelimType cursor_delim_type_unicode(const uint uch)
{
switch (uch) {
case ',':
@@ -112,7 +112,7 @@ static eStrCursorDelimType cursor_delim_type_utf8(const char *ch_utf8)
{
/* for full unicode support we really need to have large lookup tables to figure
* out whats what in every possible char set - and python, glib both have these. */
unsigned int uch = BLI_str_utf8_as_unicode(ch_utf8);
uint uch = BLI_str_utf8_as_unicode(ch_utf8);
return cursor_delim_type_unicode(uch);
}
@@ -259,14 +259,14 @@ void BLI_str_cursor_step_wchar(
if (jump != STRCUR_JUMP_NONE) {
const eStrCursorDelimType delim_type =
(*pos) < maxlen ? cursor_delim_type_unicode((unsigned int)str[*pos]) : STRCUR_DELIM_NONE;
(*pos) < maxlen ? cursor_delim_type_unicode((uint)str[*pos]) : STRCUR_DELIM_NONE;
/* jump between special characters (/,\,_,-, etc.),
* look at function cursor_delim_type_unicode() for complete
* list of special character, ctr -> */
while ((*pos) < maxlen) {
if (wchar_t_step_next(str, maxlen, pos)) {
if ((jump != STRCUR_JUMP_ALL) &&
(delim_type != cursor_delim_type_unicode((unsigned int)str[*pos])))
(delim_type != cursor_delim_type_unicode((uint)str[*pos])))
{
break;
}
@@ -287,7 +287,7 @@ void BLI_str_cursor_step_wchar(
if (jump != STRCUR_JUMP_NONE) {
const eStrCursorDelimType delim_type =
(*pos) > 0 ? cursor_delim_type_unicode((unsigned int)str[(*pos) - 1]) : STRCUR_DELIM_NONE;
(*pos) > 0 ? cursor_delim_type_unicode((uint)str[(*pos) - 1]) : STRCUR_DELIM_NONE;
/* jump between special characters (/,\,_,-, etc.),
* look at function cursor_delim_type() for complete
* list of special character, ctr -> */
@@ -295,7 +295,7 @@ void BLI_str_cursor_step_wchar(
const int pos_prev = *pos;
if (wchar_t_step_prev(str, maxlen, pos)) {
if ((jump != STRCUR_JUMP_ALL) &&
(delim_type != cursor_delim_type_unicode((unsigned int)str[*pos])))
(delim_type != cursor_delim_type_unicode((uint)str[*pos])))
{
/* left only: compensate for index/change in direction */
if ((pos_orig - (*pos)) >= 1) {
@@ -314,4 +314,3 @@ void BLI_str_cursor_step_wchar(
BLI_assert(0);
}
}

View File

@@ -282,14 +282,14 @@ size_t BLI_strncpy_wchar_as_utf8(char *__restrict dst, const wchar_t *__restrict
#endif
while (*src && len <= maxlen_secured) {
len += BLI_str_utf8_from_unicode((unsigned int)*src++, dst + len);
len += BLI_str_utf8_from_unicode((uint)*src++, dst + len);
}
/* We have to be more careful for the last six bytes, to avoid buffer overflow in case utf8-encoded char
* would be too long for our dst buffer. */
while (*src) {
char t[6];
size_t l = BLI_str_utf8_from_unicode((unsigned int)*src++, t);
size_t l = BLI_str_utf8_from_unicode((uint)*src++, t);
BLI_assert(l <= 6);
if (len + l > maxlen) {
break;
@@ -309,7 +309,7 @@ size_t BLI_wstrlen_utf8(const wchar_t *src)
size_t len = 0;
while (*src) {
len += BLI_str_utf8_from_unicode((unsigned int)*src++, NULL);
len += BLI_str_utf8_from_unicode((uint)*src++, NULL);
}
return len;
@@ -381,7 +381,7 @@ size_t BLI_strncpy_wchar_from_utf8(wchar_t *__restrict dst_w, const char *__rest
while (*src_c && len != maxlen) {
size_t step = 0;
unsigned int unicode = BLI_str_utf8_as_unicode_and_size(src_c, &step);
uint unicode = BLI_str_utf8_as_unicode_and_size(src_c, &step);
if (unicode != BLI_UTF8_ERR) {
*dst_w = (wchar_t)unicode;
src_c += step;
@@ -416,7 +416,7 @@ int BLI_wcswidth(const wchar_t *pwcs, size_t n)
int BLI_str_utf8_char_width(const char *p)
{
unsigned int unicode = BLI_str_utf8_as_unicode(p);
uint unicode = BLI_str_utf8_as_unicode(p);
if (unicode == BLI_UTF8_ERR)
return -1;
@@ -427,7 +427,7 @@ int BLI_str_utf8_char_width_safe(const char *p)
{
int columns;
unsigned int unicode = BLI_str_utf8_as_unicode(p);
uint unicode = BLI_str_utf8_as_unicode(p);
if (unicode == BLI_UTF8_ERR)
return 1;
@@ -440,7 +440,7 @@ int BLI_str_utf8_char_width_safe(const char *p)
/* copied from glib's gutf8.c, added 'Err' arg */
/* note, glib uses unsigned int for unicode, best we do the same,
/* note, glib uses uint for unicode, best we do the same,
* though we don't typedef it - campbell */
#define UTF8_COMPUTE(Char, Mask, Len, Err) \
@@ -525,11 +525,11 @@ int BLI_str_utf8_size_safe(const char *p)
*
* Return value: the resulting character
**/
unsigned int BLI_str_utf8_as_unicode(const char *p)
uint BLI_str_utf8_as_unicode(const char *p)
{
int i, len;
unsigned int mask = 0;
unsigned int result;
uint mask = 0;
uint result;
const unsigned char c = (unsigned char) *p;
UTF8_COMPUTE(c, mask, len, -1);
@@ -541,11 +541,11 @@ unsigned int BLI_str_utf8_as_unicode(const char *p)
}
/* variant that increments the length */
unsigned int BLI_str_utf8_as_unicode_and_size(const char *__restrict p, size_t *__restrict index)
uint BLI_str_utf8_as_unicode_and_size(const char *__restrict p, size_t *__restrict index)
{
int i, len;
unsigned mask = 0;
unsigned int result;
uint result;
const unsigned char c = (unsigned char) *p;
UTF8_COMPUTE(c, mask, len, -1);
@@ -556,11 +556,11 @@ unsigned int BLI_str_utf8_as_unicode_and_size(const char *__restrict p, size_t *
return result;
}
unsigned int BLI_str_utf8_as_unicode_and_size_safe(const char *__restrict p, size_t *__restrict index)
uint BLI_str_utf8_as_unicode_and_size_safe(const char *__restrict p, size_t *__restrict index)
{
int i, len;
unsigned int mask = 0;
unsigned int result;
uint mask = 0;
uint result;
const unsigned char c = (unsigned char) *p;
UTF8_COMPUTE(c, mask, len, -1);
@@ -575,11 +575,11 @@ unsigned int BLI_str_utf8_as_unicode_and_size_safe(const char *__restrict p, siz
/* another variant that steps over the index,
* note, currently this also falls back to latin1 for text drawing. */
unsigned int BLI_str_utf8_as_unicode_step(const char *__restrict p, size_t *__restrict index)
uint BLI_str_utf8_as_unicode_step(const char *__restrict p, size_t *__restrict index)
{
int i, len;
unsigned int mask = 0;
unsigned int result;
uint mask = 0;
uint result;
unsigned char c;
p += *index;
@@ -631,12 +631,12 @@ unsigned int BLI_str_utf8_as_unicode_step(const char *__restrict p, size_t *__re
*
* \return number of bytes written
**/
size_t BLI_str_utf8_from_unicode(unsigned int c, char *outbuf)
size_t BLI_str_utf8_from_unicode(uint c, char *outbuf)
{
/* If this gets modified, also update the copy in g_string_insert_unichar() */
unsigned int len = 0;
unsigned int first;
unsigned int i;
uint len = 0;
uint first;
uint i;
if (c < 0x80) {
first = 0;
@@ -757,20 +757,20 @@ char *BLI_str_prev_char_utf8(const char *p)
}
/* end glib copy */
size_t BLI_str_partition_utf8(const char *str, const unsigned int delim[], const char **sep, const char **suf)
size_t BLI_str_partition_utf8(const char *str, const uint delim[], const char **sep, const char **suf)
{
return BLI_str_partition_ex_utf8(str, NULL, delim, sep, suf, false);
}
size_t BLI_str_rpartition_utf8(const char *str, const unsigned int delim[], const char **sep, const char **suf)
size_t BLI_str_rpartition_utf8(const char *str, const uint delim[], const char **sep, const char **suf)
{
return BLI_str_partition_ex_utf8(str, NULL, delim, sep, suf, true);
}
size_t BLI_str_partition_ex_utf8(
const char *str, const char *end, const unsigned int delim[], const char **sep, const char **suf, const bool from_right)
const char *str, const char *end, const uint delim[], const char **sep, const char **suf, const bool from_right)
{
const unsigned int *d;
const uint *d;
const size_t str_len = end ? (size_t)(end - str) : strlen(str);
size_t index;
@@ -783,7 +783,7 @@ size_t BLI_str_partition_ex_utf8(
*sep >= str && (!end || *sep < end) && **sep != '\0';
*sep = (char *)(from_right ? BLI_str_find_prev_char_utf8(str, *sep) : str + index))
{
const unsigned int c = BLI_str_utf8_as_unicode_and_size(*sep, &index);
const uint c = BLI_str_utf8_as_unicode_and_size(*sep, &index);
if (c == BLI_UTF8_ERR) {
*suf = *sep = NULL;

View File

@@ -256,10 +256,6 @@ static void bm_decim_build_edge_cost_single(
{
float cost;
if (eheap_table[BM_elem_index_get(e)]) {
BLI_heap_remove(eheap, eheap_table[BM_elem_index_get(e)]);
}
if (UNLIKELY(vweights &&
((vweights[BM_elem_index_get(e->v1)] == 0.0f) ||
(vweights[BM_elem_index_get(e->v2)] == 0.0f))))
@@ -341,10 +337,13 @@ static void bm_decim_build_edge_cost_single(
}
}
eheap_table[BM_elem_index_get(e)] = BLI_heap_insert(eheap, cost, e);
BLI_heap_insert_or_update(eheap, &eheap_table[BM_elem_index_get(e)], cost, e);
return;
clear:
if (eheap_table[BM_elem_index_get(e)]) {
BLI_heap_remove(eheap, eheap_table[BM_elem_index_get(e)]);
}
eheap_table[BM_elem_index_get(e)] = NULL;
}

View File

@@ -342,8 +342,7 @@ void BM_mesh_decimate_dissolve_ex(
const int j = BM_elem_index_get(l_iter->e);
if (j != -1 && eheap_table[j]) {
const float cost = bm_edge_calc_dissolve_error(l_iter->e, delimit, &delimit_data);
BLI_heap_remove(eheap, eheap_table[j]);
eheap_table[j] = BLI_heap_insert(eheap, cost, l_iter->e);
BLI_heap_node_value_update(eheap, eheap_table[j], cost);
}
} while ((l_iter = l_iter->next) != l_first);
}
@@ -353,8 +352,7 @@ void BM_mesh_decimate_dissolve_ex(
}
if (UNLIKELY(f_new == NULL)) {
BLI_heap_remove(eheap, enode_top);
eheap_table[i] = BLI_heap_insert(eheap, COST_INVALID, e);
BLI_heap_node_value_update(eheap, enode_top, COST_INVALID);
}
}
@@ -475,8 +473,7 @@ void BM_mesh_decimate_dissolve_ex(
const int j = BM_elem_index_get(v_iter);
if (j != -1 && vheap_table[j]) {
const float cost = bm_vert_edge_face_angle(v_iter);
BLI_heap_remove(vheap, vheap_table[j]);
vheap_table[j] = BLI_heap_insert(vheap, cost, v_iter);
BLI_heap_node_value_update(vheap, vheap_table[j], cost);
}
}
@@ -497,8 +494,7 @@ void BM_mesh_decimate_dissolve_ex(
(BLI_heap_node_value(vheap_table[j]) == COST_INVALID))
{
const float cost = bm_vert_edge_face_angle(l_cycle_iter->v);
BLI_heap_remove(vheap, vheap_table[j]);
vheap_table[j] = BLI_heap_insert(vheap, cost, l_cycle_iter->v);
BLI_heap_node_value_update(vheap, vheap_table[j], cost);
}
} while ((l_cycle_iter = l_cycle_iter->next) != l_cycle_first);
@@ -510,8 +506,7 @@ void BM_mesh_decimate_dissolve_ex(
}
if (UNLIKELY(e_new == NULL)) {
BLI_heap_remove(vheap, vnode_top);
vheap_table[i] = BLI_heap_insert(vheap, COST_INVALID, v);
BLI_heap_node_value_update(vheap, vnode_top, COST_INVALID);
}
}

View File

@@ -1034,9 +1034,11 @@ void RENDER_OT_render(wmOperatorType *ot)
/*ot->poll = ED_operator_screenactive;*/ /* this isn't needed, causes failer in background mode */
RNA_def_boolean(ot->srna, "animation", 0, "Animation", "Render files from the animation range of this scene");
prop = RNA_def_boolean(ot->srna, "animation", 0, "Animation", "Render files from the animation range of this scene");
RNA_def_property_flag(prop, PROP_SKIP_SAVE);
RNA_def_boolean(ot->srna, "write_still", 0, "Write Image", "Save rendered the image to the output path (used only when animation is disabled)");
RNA_def_boolean(ot->srna, "use_viewport", 0, "Use 3D Viewport", "When inside a 3D viewport, use layers and camera of the viewport");
prop = RNA_def_boolean(ot->srna, "use_viewport", 0, "Use 3D Viewport", "When inside a 3D viewport, use layers and camera of the viewport");
RNA_def_property_flag(prop, PROP_SKIP_SAVE);
prop = RNA_def_string(ot->srna, "layer", NULL, RE_MAXNAME, "Render Layer", "Single render layer to re-render (used only when animation is disabled)");
RNA_def_property_flag(prop, PROP_SKIP_SAVE);
prop = RNA_def_string(ot->srna, "scene", NULL, MAX_ID_NAME - 2, "Scene", "Scene to render, current scene if not specified");

View File

@@ -32,6 +32,7 @@
#ifndef __DNA_META_TYPES_H__
#define __DNA_META_TYPES_H__
#include "DNA_defs.h"
#include "DNA_listBase.h"
#include "DNA_ID.h"

View File

@@ -391,7 +391,7 @@ static PyObject *py_bvhtree_ray_cast(PyBVHTree *self, PyObject *args)
PyDoc_STRVAR(py_bvhtree_find_nearest_doc,
".. method:: find_nearest(origin, distance=" PYBVH_MAX_DIST_STR ")\n"
"\n"
" Find the nearest element to a point.\n"
" Find the nearest element (typically face index) to a point.\n"
"\n"
" :arg co: Find nearest element to this point.\n"
" :type co: :class:`Vector`\n"
@@ -476,7 +476,7 @@ static void py_bvhtree_nearest_point_range_cb(void *userdata, int index, const f
PyDoc_STRVAR(py_bvhtree_find_nearest_range_doc,
".. method:: find_nearest_range(origin, distance=" PYBVH_MAX_DIST_STR ")\n"
"\n"
" Find the nearest elements to a point in the distance range.\n"
" Find the nearest elements (typically face index) to a point in the distance range.\n"
"\n"
" :arg co: Find nearest elements to this point.\n"
" :type co: :class:`Vector`\n"

View File

@@ -0,0 +1,191 @@
/* Apache License, Version 2.0 */
#include "testing/testing.h"
#include <string.h>
extern "C" {
#include "BLI_compiler_attrs.h"
#include "BLI_heap.h"
#include "BLI_utildefines.h"
#include "BLI_rand.h"
#include "MEM_guardedalloc.h"
};
#define SIZE 1024
static void range_fl(float *array_tar, const int size)
{
float *array_pt = array_tar + (size - 1);
int i = size;
while (i--) {
*(array_pt--) = (float)i;
}
}
TEST(heap, Empty)
{
Heap *heap;
heap = BLI_heap_new();
EXPECT_TRUE(BLI_heap_is_empty(heap));
EXPECT_EQ(BLI_heap_size(heap), 0);
BLI_heap_free(heap, NULL);
}
TEST(heap, One)
{
Heap *heap;
const char *in = "test";
heap = BLI_heap_new();
BLI_heap_insert(heap, 0.0f, (void *)in);
EXPECT_FALSE(BLI_heap_is_empty(heap));
EXPECT_EQ(BLI_heap_size(heap), 1);
EXPECT_EQ(in, BLI_heap_popmin(heap));
EXPECT_TRUE(BLI_heap_is_empty(heap));
EXPECT_EQ(BLI_heap_size(heap), 0);
BLI_heap_free(heap, NULL);
}
TEST(heap, Range)
{
const int items_total = SIZE;
Heap *heap = BLI_heap_new();
for (int in = 0; in < items_total; in++) {
BLI_heap_insert(heap, (float)in, SET_INT_IN_POINTER(in));
}
for (int out_test = 0; out_test < items_total; out_test++) {
EXPECT_EQ(out_test, GET_INT_FROM_POINTER(BLI_heap_popmin(heap)));
}
EXPECT_TRUE(BLI_heap_is_empty(heap));
BLI_heap_free(heap, NULL);
}
TEST(heap, RangeReverse)
{
const int items_total = SIZE;
Heap *heap = BLI_heap_new();
for (int in = 0; in < items_total; in++) {
BLI_heap_insert(heap, (float)-in, SET_INT_IN_POINTER(-in));
}
for (int out_test = items_total - 1; out_test >= 0; out_test--) {
EXPECT_EQ(-out_test, GET_INT_FROM_POINTER(BLI_heap_popmin(heap)));
}
EXPECT_TRUE(BLI_heap_is_empty(heap));
BLI_heap_free(heap, NULL);
}
TEST(heap, RangeRemove)
{
const int items_total = SIZE;
Heap *heap = BLI_heap_new();
HeapNode **nodes = (HeapNode **)MEM_mallocN(sizeof(HeapNode *) * items_total, __func__);
for (int in = 0; in < items_total; in++) {
nodes[in] = BLI_heap_insert(heap, (float)in, SET_INT_IN_POINTER(in));
}
for (int i = 0; i < items_total; i += 2) {
BLI_heap_remove(heap, nodes[i]);
nodes[i] = NULL;
}
for (int out_test = 1; out_test < items_total; out_test += 2) {
EXPECT_EQ(out_test, GET_INT_FROM_POINTER(BLI_heap_popmin(heap)));
}
EXPECT_TRUE(BLI_heap_is_empty(heap));
BLI_heap_free(heap, NULL);
MEM_freeN(nodes);
}
TEST(heap, Duplicates)
{
const int items_total = SIZE;
Heap *heap = BLI_heap_new();
for (int in = 0; in < items_total; in++) {
BLI_heap_insert(heap, 1.0f, 0);
}
for (int out_test = 0; out_test < items_total; out_test++) {
EXPECT_EQ(0, GET_INT_FROM_POINTER(BLI_heap_popmin(heap)));
}
EXPECT_TRUE(BLI_heap_is_empty(heap));
BLI_heap_free(heap, NULL);
}
static void random_heap_helper(
const int items_total,
const int random_seed)
{
Heap *heap = BLI_heap_new();
float *values = (float *)MEM_mallocN(sizeof(float) * items_total, __func__);
range_fl(values, items_total);
BLI_array_randomize(values, sizeof(float), items_total, random_seed);
for (int i = 0; i < items_total; i++) {
BLI_heap_insert(heap, values[i], SET_INT_IN_POINTER((int)values[i]));
}
for (int out_test = 0; out_test < items_total; out_test++) {
EXPECT_EQ(out_test, GET_INT_FROM_POINTER(BLI_heap_popmin(heap)));
}
EXPECT_TRUE(BLI_heap_is_empty(heap));
BLI_heap_free(heap, NULL);
MEM_freeN(values);
}
TEST(heap, Rand1) { random_heap_helper(1, 1234); }
TEST(heap, Rand2) { random_heap_helper(2, 1234); }
TEST(heap, Rand100) { random_heap_helper(100, 4321); }
TEST(heap, ReInsertSimple)
{
const int items_total = SIZE;
Heap *heap = BLI_heap_new();
HeapNode **nodes = (HeapNode **)MEM_mallocN(sizeof(HeapNode *) * items_total, __func__);
for (int in = 0; in < items_total; in++) {
nodes[in] = BLI_heap_insert(heap, (float)in, SET_INT_IN_POINTER(in));
}
for (int i = 0; i < items_total; i++) {
BLI_heap_node_value_update(heap, nodes[i], (float)(items_total + i));
}
for (int out_test = 0; out_test < items_total; out_test++) {
EXPECT_EQ(out_test, GET_INT_FROM_POINTER(BLI_heap_popmin(heap)));
}
EXPECT_TRUE(BLI_heap_is_empty(heap));
BLI_heap_free(heap, NULL);
MEM_freeN(nodes);
}
static void random_heap_reinsert_helper(
const int items_total,
const int random_seed)
{
Heap *heap = BLI_heap_new();
HeapNode **nodes = (HeapNode **)MEM_mallocN(sizeof(HeapNode *) * items_total, __func__);
for (int in = 0; in < items_total; in++) {
nodes[in] = BLI_heap_insert(heap, (float)in, SET_INT_IN_POINTER(in));
}
BLI_array_randomize(nodes, sizeof(HeapNode *), items_total, random_seed);
for (int i = 0; i < items_total; i++) {
BLI_heap_node_value_update(heap, nodes[i], (float)i);
}
EXPECT_TRUE(BLI_heap_is_valid(heap));
for (int out_test = 0; out_test < items_total; out_test++) {
HeapNode *node_top = BLI_heap_top(heap);
float out = BLI_heap_node_value(node_top);
EXPECT_EQ((float)out_test, out);
BLI_heap_popmin(heap);
}
EXPECT_TRUE(BLI_heap_is_empty(heap));
BLI_heap_free(heap, NULL);
MEM_freeN(nodes);
}
TEST(heap, ReInsertRandom1) { random_heap_reinsert_helper(1, 1234); }
TEST(heap, ReInsertRandom2) { random_heap_reinsert_helper(2, 1234); }
TEST(heap, ReInsertRandom100) { random_heap_reinsert_helper(100, 4321); }
TEST(heap, ReInsertRandom1024) { random_heap_reinsert_helper(1024, 9876); }
TEST(heap, ReInsertRandom2048) { random_heap_reinsert_helper(2048, 5321); }

View File

@@ -44,6 +44,7 @@ BLENDER_TEST(BLI_array_store "bf_blenlib")
BLENDER_TEST(BLI_array_utils "bf_blenlib")
BLENDER_TEST(BLI_ghash "bf_blenlib")
BLENDER_TEST(BLI_hash_mm2a "bf_blenlib")
BLENDER_TEST(BLI_heap "bf_blenlib")
BLENDER_TEST(BLI_kdopbvh "bf_blenlib")
BLENDER_TEST(BLI_listbase "bf_blenlib")
BLENDER_TEST(BLI_math_base "bf_blenlib")