1
1

Compare commits

...

11 Commits

Author SHA1 Message Date
58d6cbba6d Merge branch 'master' into temp-ghash-setops 2015-06-29 17:10:42 +02:00
94549adec4 Merge branch 'master' into temp-ghash-setops 2015-03-21 09:07:10 +01:00
77a4d705c7 Merge branch 'master' into temp-ghash-setops 2015-03-20 16:56:12 +01:00
f808f36c12 Get rid of varargs in union & co functions (using variadic macros instead). 2015-03-20 16:55:10 +01:00
b6f0c9e25a Some improvements/tweaks from review. 2015-03-20 15:15:26 +01:00
ecc9fe9945 Merge branch 'master' into temp-ghash-setops 2015-03-20 12:54:41 +01:00
4e3bc21492 Remove public GHash API for union/inter/diff/symmdiff.
This is dead easy to add back when we want (or use other naming if we really
like them better), core code remains exactly the same anyway.

Note we keep checks (isdisjoint, isequal, etc.) for ghashes though, those are
pure-keys non-modifying operations anyway, so here set ops for ghash are
definitively valid (equivalent of set ops on dict.keys() views in py).
2015-03-20 12:04:03 +01:00
88ae44a363 Fix for updates in master... 2015-03-20 12:04:03 +01:00
df587b03c3 Revert "Remove gtests."
This reverts commit 2b8fac2a153e135d0f6cc7621a3755901477fd3d.
2015-03-20 12:04:03 +01:00
1df3713365 Remove gtests. 2015-03-20 12:04:03 +01:00
23b42a5e69 Squashed commit of temp-ghash-experiments, minus the 'hash storage' part. 2015-03-20 12:04:03 +01:00
3 changed files with 837 additions and 2 deletions

View File

@@ -34,6 +34,7 @@
#include "BLI_sys_types.h" /* for bool */
#include "BLI_compiler_attrs.h"
#include "BLI_utildefines.h"
#ifdef __cplusplus
extern "C" {
@@ -91,6 +92,11 @@ unsigned int BLI_ghash_size(GHash *gh) ATTR_WARN_UNUSED_RESULT;
void BLI_ghash_flag_set(GHash *gh, unsigned int flag);
void BLI_ghash_flag_clear(GHash *gh, unsigned int flag);
bool BLI_ghash_isdisjoint(GHash *gh1, GHash *gh2);
bool BLI_ghash_isequal(GHash *gh1, GHash *gh2);
bool BLI_ghash_issubset(GHash *gh1, GHash *gh2);
bool BLI_ghash_issuperset(GHash *gh1, GHash *gh2);
/* *** */
GHashIterator *BLI_ghashIterator_new(GHash *gh) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT;
@@ -234,6 +240,43 @@ void BLI_gset_clear_ex(GSet *gs, GSetKeyFreeFP keyfreefp,
const unsigned int nentries_reserve);
void BLI_gset_clear(GSet *gs, GSetKeyFreeFP keyfreefp);
bool BLI_gset_isdisjoint(GSet *gs1, GSet *gs2);
bool BLI_gset_isequal(GSet *gs1, GSet *gs2);
bool BLI_gset_issubset(GSet *gs1, GSet *gs2);
bool BLI_gset_issuperset(GSet *gs1, GSet *gs2);
GSet *_bli_gset_union(GSetKeyCopyFP keycopyfp, GSet **gset_arr, const size_t nbr_gset_arr);
#define BLI_gset_union(keycopyfp, ...) ( \
{ \
GSet *gset_arr[] = {__VA_ARGS__}; \
_bli_gset_union((keycopyfp), gset_arr, ARRAY_SIZE(gset_arr)); \
})
GSet *_bli_gset_intersection(
GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet **gset_arr, const size_t nbr_gset_arr);
#define BLI_gset_intersection(keycopyfp, keyfree, ...) ( \
{ \
GSet *gset_arr[] = {__VA_ARGS__}; \
_bli_gset_intersection((keycopyfp), (keyfree), gset_arr, ARRAY_SIZE(gset_arr)); \
})
GSet *_bli_gset_difference(
GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet **gset_arr, const size_t nbr_gset_arr);
#define BLI_gset_difference(keycopyfp, keyfree, ...) ( \
{ \
GSet *gset_arr[] = {__VA_ARGS__}; \
_bli_gset_difference((keycopyfp), (keyfree), gset_arr, ARRAY_SIZE(gset_arr)); \
})
GSet *_bli_gset_symmetric_difference(
GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet **gset_arr, const size_t nbr_gset_arr);
#define BLI_gset_symmetric_difference(keycopyfp, keyfree, ...) ( \
{ \
GSet *gset_arr[] = {__VA_ARGS__}; \
_bli_gset_symmetric_difference((keycopyfp), (keyfree), gset_arr, ARRAY_SIZE(gset_arr)); \
})
GSet *BLI_gset_ptr_new_ex(const char *info,
const unsigned int nentries_reserve) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT;
GSet *BLI_gset_ptr_new(const char *info);

View File

@@ -614,6 +614,350 @@ static GHash *ghash_copy(GHash *gh, GHashKeyCopyFP keycopyfp, GHashValCopyFP val
return gh_new;
}
/**
* Merge \a gh2 into \a gh1 (keeping entries already in \a gh1 unchanged), and then each subsequent given GHash.
* If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
* If \a reverse is True, entries present in latest GHash will override those in former GHash.
*/
static GHash *ghash_merge(
const bool reverse,
GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
GHash **ghash_arr, const size_t nbr_ghash_arr)
{
GHash *gh1;
GHash **gh_iter = ghash_arr;
size_t gh_count = nbr_ghash_arr;
BLI_assert(nbr_ghash_arr > (*ghash_arr ? 1 : 2));
gh1 = *gh_iter++;
gh_count--;
if (!gh1) {
gh1 = ghash_copy(*gh_iter++, keycopyfp, valcopyfp);
gh_count--;
}
BLI_assert(!(valfreefp || valcopyfp) || !(gh1->flag & GHASH_FLAG_IS_GSET));
while (gh_count--) {
GHash *ghn = *gh_iter++;
unsigned int i;
BLI_assert(gh1->cmpfp == ghn->cmpfp);
BLI_assert(gh1->hashfp == ghn->hashfp);
BLI_assert((gh1->flag & GHASH_FLAG_IS_GSET) == (ghn->flag & GHASH_FLAG_IS_GSET));
for (i = 0; i < ghn->nbuckets; i++) {
Entry *e;
for (e = ghn->buckets[i]; e; e = e->next) {
Entry *e_gh1;
const unsigned int hash = ghash_entryhash(gh1, e);
const unsigned int gh1_bucket_index = ghash_bucket_index(gh1, hash);
if ((e_gh1 = ghash_lookup_entry_ex(gh1, e->key, gh1_bucket_index)) == NULL) {
Entry *e_new = BLI_mempool_alloc(gh1->entrypool);
ghash_entry_copy(gh1, e_new, ghn, e, keycopyfp, valcopyfp);
/* As with copy, this does not preserve order (but this would be even less meaningful here). */
e_new->next = gh1->buckets[gh1_bucket_index];
gh1->buckets[gh1_bucket_index] = e_new;
ghash_buckets_expand(gh1, ++gh1->nentries, false);
}
else if (reverse) {
if (keyfreefp) keyfreefp(e_gh1->key);
if (valfreefp) valfreefp(((GHashEntry *)e_gh1)->val);
ghash_entry_copy(gh1, e_gh1, ghn, e, keycopyfp, valcopyfp);
}
}
}
}
return gh1;
}
/**
* Remove all entries in \a gh1 which keys are not present in \a gh2 and all subsequent given GHash.
* If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
*/
static GHash *ghash_intersection(
GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
GHash **ghash_arr, const size_t nbr_ghash_arr)
{
GHash *gh1;
GHash **gh_iter = ghash_arr;
size_t gh_count = nbr_ghash_arr;
BLI_assert(nbr_ghash_arr > (*ghash_arr ? 1 : 2));
gh1 = *gh_iter++;
gh_count--;
if (!gh1) {
gh1 = ghash_copy(*gh_iter++, keycopyfp, valcopyfp);
gh_count--;
}
BLI_assert(!(valfreefp || valcopyfp) || !(gh1->flag & GHASH_FLAG_IS_GSET));
while (gh_count--) {
GHash *ghn = *gh_iter++;
unsigned int new_gh1_nentries = gh1->nentries;
unsigned int i;
BLI_assert(gh1->cmpfp == ghn->cmpfp);
BLI_assert(gh1->hashfp == ghn->hashfp);
for (i = 0; i < gh1->nbuckets; i++) {
Entry *e, *e_prev = NULL, *e_next;
for (e = gh1->buckets[i]; e; e = e_next) {
const unsigned int hash = ghash_entryhash(gh1, e);
const unsigned int ghn_bucket_index = ghash_bucket_index(ghn, hash);
e_next = e->next;
if (ghash_lookup_entry_ex(ghn, e->key, ghn_bucket_index) == NULL) {
if (keyfreefp) keyfreefp(e->key);
if (valfreefp) valfreefp(((GHashEntry *)e)->val);
if (e_prev) e_prev->next = e_next;
else gh1->buckets[i] = e_next;
/* We cannot resize gh1 while we are looping on it!!! */
new_gh1_nentries--;
BLI_mempool_free(gh1->entrypool, e);
}
else {
e_prev = e;
}
}
}
gh1->nentries = new_gh1_nentries;
/* We force shrinking here (if needed). */
ghash_buckets_expand(gh1, gh1->nentries, false);
ghash_buckets_contract(gh1, gh1->nentries, false, true);
}
return gh1;
}
/**
* Remove all entries in \a gh1 which keys are present in \a gh2 or any subsequent given GHash.
* If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
*/
static GHash *ghash_difference(
GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
GHash **ghash_arr, const size_t nbr_ghash_arr)
{
GHash *gh1;
GHash **gh_iter = ghash_arr;
size_t gh_count = nbr_ghash_arr;
BLI_assert(nbr_ghash_arr > (*ghash_arr ? 1 : 2));
gh1 = *gh_iter++;
gh_count--;
if (!gh1) {
gh1 = ghash_copy(*gh_iter++, keycopyfp, valcopyfp);
gh_count--;
}
BLI_assert(!(valfreefp || valcopyfp) || !(gh1->flag & GHASH_FLAG_IS_GSET));
while (gh_count--) {
GHash *ghn = *gh_iter++;
unsigned int new_gh1_nentries = gh1->nentries;
unsigned int i;
BLI_assert(gh1->cmpfp == ghn->cmpfp);
BLI_assert(gh1->hashfp == ghn->hashfp);
for (i = 0; i < gh1->nbuckets; i++) {
Entry *e, *e_prev = NULL, *e_next;
for (e = gh1->buckets[i]; e; e = e_next) {
const unsigned int hash = ghash_entryhash(gh1, e);
const unsigned int ghn_bucket_index = ghash_bucket_index(ghn, hash);
e_next = e->next;
if (ghash_lookup_entry_ex(ghn, e->key, ghn_bucket_index) != NULL) {
if (keyfreefp) keyfreefp(e->key);
if (valfreefp) valfreefp(((GHashEntry *)e)->val);
if (e_prev) e_prev->next = e_next;
else gh1->buckets[i] = e_next;
/* We cannot resize gh1 while we are looping on it!!! */
new_gh1_nentries--;
BLI_mempool_free(gh1->entrypool, e);
}
else {
e_prev = e;
}
}
}
gh1->nentries = new_gh1_nentries;
/* We force shrinking here (if needed). */
ghash_buckets_expand(gh1, gh1->nentries, false);
ghash_buckets_contract(gh1, gh1->nentries, false, true);
}
return gh1;
}
/**
* Set \a gh1 to only contain entries which keys are present in one and only one of all given ghash.
* If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
*/
static GHash *ghash_symmetric_difference(
GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
GHash **ghash_arr, const size_t nbr_ghash_arr)
{
GHash *gh1;
GHash **gh_iter = ghash_arr;
size_t gh_count = nbr_ghash_arr;
/* Temp storage, we never copy key/values here, just borrow them from real ghash. */
/* Warning! rem_keys is used as gset (i.e. no val memory reserved). */
GHash *keys, *rem_keys;
unsigned int i;
BLI_assert(nbr_ghash_arr > (*ghash_arr ? 1 : 2));
gh1 = *gh_iter++;
gh_count--;
if (!gh1) {
gh1 = ghash_copy(*gh_iter++, keycopyfp, valcopyfp);
gh_count--;
}
BLI_assert(!(valfreefp || valcopyfp) || !(gh1->flag & GHASH_FLAG_IS_GSET));
keys = ghash_copy(gh1, NULL, NULL);
rem_keys = ghash_new(gh1->hashfp, gh1->cmpfp, __func__, 64, GHASH_FLAG_IS_GSET);
/* First pass: all key found at least once is in keys, all key found at least twice is in rem_keys. */
while (gh_count--) {
GHash *ghn = *gh_iter++;
BLI_assert(gh1->cmpfp == ghn->cmpfp);
BLI_assert(gh1->hashfp == ghn->hashfp);
BLI_assert((gh1->flag & GHASH_FLAG_IS_GSET) == (ghn->flag & GHASH_FLAG_IS_GSET));
for (i = 0; i < ghn->nbuckets; i++) {
Entry *e;
for (e = ghn->buckets[i]; e; e = e->next) {
const unsigned int hash = ghash_entryhash(ghn, e);
const unsigned int keys_bucket_index = ghash_bucket_index(keys, hash);
if (ghash_lookup_entry_ex(keys, e->key, keys_bucket_index) != NULL) {
const unsigned int rem_keys_bucket_index = ghash_bucket_index(rem_keys, hash);
Entry *e_new = BLI_mempool_alloc(rem_keys->entrypool);
ghash_entry_copy(rem_keys, e_new, ghn, e, NULL, NULL);
/* As with copy, this does not preserve order (but this would be even less meaningful here). */
e_new->next = rem_keys->buckets[rem_keys_bucket_index];
rem_keys->buckets[rem_keys_bucket_index] = e_new;
ghash_buckets_expand(rem_keys, ++rem_keys->nentries, false);
}
else {
Entry *e_new = BLI_mempool_alloc(keys->entrypool);
ghash_entry_copy(keys, e_new, ghn, e, NULL, NULL);
/* As with copy, this does not preserve order (but this would be even less meaningful here). */
e_new->next = keys->buckets[keys_bucket_index];
keys->buckets[keys_bucket_index] = e_new;
ghash_buckets_expand(keys, ++keys->nentries, false);
}
}
}
}
/* Now, keys we actually want are (keys - rem_keys) */
for (i = 0; i < rem_keys->nbuckets; i++) {
Entry *e;
for (e = rem_keys->buckets[i]; e; e = e->next) {
Entry *e_prev, *e_curr;
const unsigned int hash = ghash_entryhash(rem_keys, e);
const unsigned int keys_bucket_index = ghash_bucket_index(keys, hash);
const unsigned int gh1_bucket_index = ghash_bucket_index(gh1, hash);
e_curr = ghash_lookup_entry_prev_ex(keys, e->key, &e_prev, keys_bucket_index);
BLI_assert(e_curr != NULL); /* All keys in rem_keys must exist in keys! */
if (e_prev) e_prev->next = e_curr->next;
else keys->buckets[keys_bucket_index] = e_curr->next;
/* We do not care about shrinking keys' buckets here! */
keys->nentries--;
BLI_mempool_free(keys->entrypool, e_curr);
/* Also remove keys from gh1 if possible, since we are at it... */
e_curr = ghash_lookup_entry_prev_ex(gh1, e->key, &e_prev, gh1_bucket_index);
if (e_curr) {
if (e_prev) e_prev->next = e_curr->next;
else gh1->buckets[gh1_bucket_index] = e_curr->next;
/* Note: We can free key/value here, because we won't use them again (have been removed
* from keys already, and we won't use matching entry from rem_key again either. */
if (keyfreefp) keyfreefp(e_curr->key);
if (valfreefp) valfreefp(((GHashEntry *)e_curr)->val);
/* We do not care about shrinking gh1's buckets here for now! */
gh1->nentries--;
BLI_mempool_free(gh1->entrypool, e_curr);
}
}
}
BLI_ghash_free(rem_keys, NULL, NULL);
/* Final step: add (copy) all entries from keys which are not already in gh1. */
for (i = 0; i < keys->nbuckets; i++) {
Entry *e;
for (e = keys->buckets[i]; e; e = e->next) {
const unsigned int hash = ghash_entryhash(keys, e);
const unsigned int gh1_bucket_index = ghash_bucket_index(gh1, hash);
if (ghash_lookup_entry_ex(gh1, e->key, gh1_bucket_index) == NULL) {
Entry *e_new = BLI_mempool_alloc(gh1->entrypool);
ghash_entry_copy(gh1, e_new, keys, e, keycopyfp, valcopyfp);
/* As with copy, this does not preserve order (but this would be even less meaningful here). */
e_new->next = gh1->buckets[gh1_bucket_index];
gh1->buckets[gh1_bucket_index] = e_new;
ghash_buckets_expand(gh1, ++gh1->nentries, false);
}
}
}
BLI_ghash_free(keys, NULL, NULL);
/* We force shrinking here (if needed). */
ghash_buckets_contract(gh1, gh1->nentries, false, true);
return gh1;
}
/** \} */
@@ -902,6 +1246,110 @@ void BLI_ghash_flag_clear(GHash *gh, unsigned int flag)
gh->flag &= ~flag;
}
/**
* Check whether no key from \a gh1 exists in \a gh2.
*/
bool BLI_ghash_isdisjoint(GHash *gh1, GHash *gh2)
{
/* Note: For now, take a basic, brute force approach.
* If we switch from modulo to masking, we may have ways to optimize this, though. */
unsigned int i;
const bool need_hashing = (gh1->nbuckets != gh2->nbuckets);
BLI_assert(gh1->cmpfp == gh2->cmpfp);
BLI_assert(gh1->hashfp == gh2->hashfp);
if (gh1->nentries > gh2->nentries)
{
SWAP(GHash *, gh1, gh2);
}
for (i = 0; i < gh1->nbuckets; i++) {
Entry *e;
for (e = gh1->buckets[i]; e; e = e->next) {
const unsigned int gh2_bucket_index = need_hashing ? ghash_bucket_index(gh2, ghash_entryhash(gh2, e)) : i;
if (ghash_lookup_entry_ex(gh2, e->key, gh2_bucket_index)) {
return false;
}
}
}
return true;
}
/**
* Check whether \a gh1 and \a gh2 contain exactly the same keys.
*/
bool BLI_ghash_isequal(GHash *gh1, GHash *gh2)
{
unsigned int i;
const bool need_hashing = (gh1->nbuckets != gh2->nbuckets);
BLI_assert(gh1->cmpfp == gh2->cmpfp);
BLI_assert(gh1->hashfp == gh2->hashfp);
if (gh1->nentries != gh2->nentries) {
return false;
}
for (i = 0; i < gh1->nbuckets; i++) {
Entry *e;
for (e = gh1->buckets[i]; e; e = e->next) {
const unsigned int gh2_bucket_index = need_hashing ? ghash_bucket_index(gh2, ghash_entryhash(gh2, e)) : i;
if (!ghash_lookup_entry_ex(gh2, e->key, gh2_bucket_index)) {
return false;
}
}
}
return true;
}
/**
* Check whether \a gh2 keys are a subset of \a gh1 keys.
* gh1 >= gh2
*
* Note: Strict subset is (gh1 >= gh2) && (gh1->nentries != gh2->nentries).
*/
bool BLI_ghash_issubset(GHash *gh1, GHash *gh2)
{
unsigned int i;
const bool need_hashing = (gh1->nbuckets != gh2->nbuckets);
BLI_assert(gh1->cmpfp == gh2->cmpfp);
BLI_assert(gh1->hashfp == gh2->hashfp);
if (gh1->nentries < gh2->nentries) {
return false;
}
for (i = 0; i < gh2->nbuckets; i++) {
Entry *e;
for (e = gh2->buckets[i]; e; e = e->next) {
const unsigned int gh1_bucket_index = need_hashing ? ghash_bucket_index(gh1, ghash_entryhash(gh1, e)) : i;
if (!ghash_lookup_entry_ex(gh1, e->key, gh1_bucket_index)) {
return false;
}
}
}
return true;
}
/**
* Check whether \a gh2 keys are a superset of \a gh1 keys.
* gh1 <= gh2
*
* Note: Strict superset is (gh1 <= gh2) && (gh1->nentries != gh2->nentries).
*/
bool BLI_ghash_issuperset(GHash *gh1, GHash *gh2)
{
return BLI_ghash_issubset(gh2, gh1);
}
/** \} */
@@ -1341,6 +1789,66 @@ void BLI_gset_flag_clear(GSet *gs, unsigned int flag)
((GHash *)gs)->flag &= ~flag;
}
bool BLI_gset_isdisjoint(GSet *gs1, GSet *gs2)
{
return BLI_ghash_isdisjoint((GHash *)gs1, (GHash *)gs2);
}
bool BLI_gset_isequal(GSet *gs1, GSet *gs2)
{
return BLI_ghash_isequal((GHash *)gs1, (GHash *)gs2);
}
bool BLI_gset_issubset(GSet *gs1, GSet *gs2)
{
return BLI_ghash_issubset((GHash *)gs1, (GHash *)gs2);
}
bool BLI_gset_issuperset(GSet *gs1, GSet *gs2)
{
return BLI_ghash_issubset((GHash *)gs2, (GHash *)gs1);
}
/**
* Union (no left to right/right to left here, this makes no sense in set context (i.e. no value)).
* If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
*/
GSet *_bli_gset_union(GSetKeyCopyFP keycopyfp, GSet **gset_arr, const size_t nbr_gset_arr)
{
return (GSet *)ghash_merge(false, keycopyfp, NULL, NULL, NULL, (GHash **)gset_arr, nbr_gset_arr);
}
/**
* Intersection (i.e. entries which keys exist in all gs1, gs2, ...).
* If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
*/
GSet *_bli_gset_intersection(
GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet **gset_arr, const size_t nbr_gset_arr)
{
return (GSet *)ghash_intersection(keycopyfp, NULL, keyfreefp, NULL, (GHash **)gset_arr, nbr_gset_arr);
}
/**
* Difference, i.e. remove all entries in \a gs1 which keys are present in \a gs2 or any subsequent given GSet.
* If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
*/
GSet *_bli_gset_difference(
GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet **gset_arr, const size_t nbr_gset_arr)
{
return (GSet *)ghash_difference(keycopyfp, NULL, keyfreefp, NULL, (GHash **)gset_arr, nbr_gset_arr);
}
/**
* Symmetric difference,
* i.e. such as \a gs1 to only contain entries which keys are present in one and only one of all given gset.
* If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
*/
GSet *_bli_gset_symmetric_difference(
GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet **gset_arr, const size_t nbr_gset_arr)
{
return (GSet *)ghash_symmetric_difference(keycopyfp, NULL, keyfreefp, NULL, (GHash **)gset_arr, nbr_gset_arr);
}
/** \} */

View File

@@ -15,13 +15,14 @@ extern "C" {
/* Only keeping this in case here, for now. */
#define PRINTF_GHASH_STATS(_gh) \
{ \
GHash *__gh = (GHash *)(_gh); \
double q, lf, var, pempty, poverloaded; \
int bigb; \
q = BLI_ghash_calc_quality_ex((_gh), &lf, &var, &pempty, &poverloaded, &bigb); \
q = BLI_ghash_calc_quality_ex(__gh, &lf, &var, &pempty, &poverloaded, &bigb); \
printf("GHash stats (%d entries):\n\t" \
"Quality (the lower the better): %f\n\tVariance (the lower the better): %f\n\tLoad: %f\n\t" \
"Empty buckets: %.2f%%\n\tOverloaded buckets: %.2f%% (biggest bucket: %d)\n", \
BLI_ghash_size(_gh), q, var, lf, pempty * 100.0, poverloaded * 100.0, bigb); \
BLI_ghash_size(__gh), q, var, lf, pempty * 100.0, poverloaded * 100.0, bigb); \
} void (0)
/* Note: for pure-ghash testing, nature of the keys and data have absolutely no importance! So here we just use mere
@@ -156,3 +157,286 @@ TEST(ghash, Copy)
BLI_ghash_free(ghash, NULL, NULL);
BLI_ghash_free(ghash_copy, NULL, NULL);
}
/* Check disjoint. */
TEST(ghash, Disjoint)
{
GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
unsigned int keys[TESTCASE_SIZE], *k;
int i;
init_keys(keys, 40);
for (i = (TESTCASE_SIZE - 2) / 2, k = keys; i--; k++) {
BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
}
for (i = (TESTCASE_SIZE - 2) / 2; i--; k++) {
/* Because values should have no effect at all here. */
BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(i));
}
EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(keys[0]), SET_UINT_IN_POINTER(keys[0]));
EXPECT_FALSE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
BLI_ghash_remove(ghash_2, SET_UINT_IN_POINTER(keys[0]), NULL, NULL);
EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
BLI_ghash_free(ghash_1, NULL, NULL);
BLI_ghash_free(ghash_2, NULL, NULL);
}
/* Check equality. */
TEST(ghash, Equal)
{
GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
unsigned int keys[TESTCASE_SIZE], *k;
int i;
init_keys(keys, 50);
for (i = TESTCASE_SIZE, k = keys; i--; k++) {
BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
}
for (i = 0, k = keys; i < TESTCASE_SIZE; i++, k++) {
/* Because values should have no effect at all here. */
BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
}
EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
BLI_ghash_remove(ghash_2, SET_UINT_IN_POINTER(keys[TESTCASE_SIZE / 2]), NULL, NULL);
EXPECT_FALSE(BLI_ghash_isequal(ghash_1, ghash_2));
BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(keys[TESTCASE_SIZE / 2]), SET_UINT_IN_POINTER(keys[TESTCASE_SIZE / 2]));
EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
BLI_ghash_free(ghash_1, NULL, NULL);
BLI_ghash_free(ghash_2, NULL, NULL);
}
/* Check subset. */
TEST(ghash, Subset)
{
GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
unsigned int keys[TESTCASE_SIZE], *k;
int i;
init_keys(keys, 60);
for (i = TESTCASE_SIZE, k = keys; i--; k++) {
BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
}
for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
/* Because values should have no effect at all here. */
BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
}
EXPECT_TRUE(BLI_ghash_issubset(ghash_1, ghash_2));
BLI_ghash_remove(ghash_1, SET_UINT_IN_POINTER(keys[0]), NULL, NULL);
EXPECT_FALSE(BLI_ghash_issubset(ghash_1, ghash_2));
BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(keys[0]), SET_UINT_IN_POINTER(keys[0]));
EXPECT_TRUE(BLI_ghash_issubset(ghash_1, ghash_2));
BLI_ghash_free(ghash_1, NULL, NULL);
BLI_ghash_free(ghash_2, NULL, NULL);
}
/* Check Union (straight only since no ghash ops here). */
TEST(gset, Union)
{
GSet *gset_1 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_U = NULL;
unsigned int keys[TESTCASE_SIZE], *k;
int i;
init_keys(keys, 70);
for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
BLI_gset_insert(gset_1, SET_UINT_IN_POINTER(*k));
}
for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isequal(gset_1, gset_2));
gset_U = BLI_gset_union(NULL, NULL, gset_1, gset_2);
EXPECT_TRUE(BLI_gset_isequal(gset_U, gset_1));
#if 0 /* Checking validity of values handling, not applicable to gset :/ */
for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
void *v = BLI_ghash_lookup(ghash_U, SET_UINT_IN_POINTER(*k));
EXPECT_EQ(*k, GET_UINT_FROM_POINTER(v));
v = BLI_ghash_lookup(ghash_U_rev, SET_UINT_IN_POINTER(*k));
EXPECT_EQ(i, GET_INT_FROM_POINTER(v));
}
#endif
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_U, NULL);
gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isdisjoint(gset_1, gset_2));
gset_U = BLI_gset_union(NULL, NULL, gset_1, gset_2);
EXPECT_TRUE(BLI_gset_issubset(gset_U, gset_1));
EXPECT_TRUE(BLI_gset_issubset(gset_U, gset_2));
BLI_gset_free(gset_1, NULL);
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_U, NULL);
}
/* Check Intersection. */
TEST(gset, Intersection)
{
GSet *gset_1 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_I;
unsigned int keys[TESTCASE_SIZE], *k;
int i;
init_keys(keys, 80);
for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
BLI_gset_insert(gset_1, SET_UINT_IN_POINTER(*k));
}
for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isequal(gset_1, gset_2));
gset_I = BLI_gset_intersection(NULL, NULL, NULL, gset_1, gset_2);
EXPECT_TRUE(BLI_gset_isequal(gset_I, gset_1));
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_I, NULL);
gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isdisjoint(gset_1, gset_2));
gset_I = BLI_gset_intersection(NULL, NULL, NULL, gset_1, gset_2);
EXPECT_EQ(0, BLI_gset_size(gset_I));
BLI_gset_free(gset_1, NULL);
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_I, NULL);
}
/* Check Difference. */
TEST(gset, Difference)
{
GSet *gset_1 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_D;
unsigned int keys[TESTCASE_SIZE], *k;
int i;
init_keys(keys, 90);
for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
BLI_gset_insert(gset_1, SET_UINT_IN_POINTER(*k));
}
for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isequal(gset_1, gset_2));
gset_D = BLI_gset_difference(NULL, NULL, NULL, gset_1, gset_2);
EXPECT_EQ(0, BLI_gset_size(gset_D));
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_D, NULL);
gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isdisjoint(gset_1, gset_2));
gset_D = BLI_gset_difference(NULL, NULL, NULL, gset_1, gset_2);
EXPECT_TRUE(BLI_gset_isequal(gset_D, gset_1));
BLI_gset_free(gset_1, NULL);
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_D, NULL);
}
/* Check Symmetric Difference. */
TEST(gset, SymmDiff)
{
GSet *gset_1 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
GSet *gset_SD;
unsigned int keys[TESTCASE_SIZE], *k;
int i;
init_keys(keys, 100);
for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
BLI_gset_insert(gset_1, SET_UINT_IN_POINTER(*k));
}
for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isequal(gset_1, gset_2));
gset_SD = BLI_gset_symmetric_difference(NULL, NULL, NULL, gset_1, gset_2);
EXPECT_EQ(0, BLI_gset_size(gset_SD));
gset_SD = BLI_gset_symmetric_difference(NULL, NULL, gset_SD, gset_1);
EXPECT_TRUE(BLI_gset_isequal(gset_SD, gset_1));
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_SD, NULL);
gset_2 = BLI_gset_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
BLI_gset_insert(gset_2, SET_UINT_IN_POINTER(*k));
}
EXPECT_TRUE(BLI_gset_isdisjoint(gset_1, gset_2));
gset_SD = BLI_gset_symmetric_difference(NULL, NULL, NULL, gset_1, gset_2);
EXPECT_EQ(TESTCASE_SIZE, BLI_gset_size(gset_SD));
gset_SD = BLI_gset_symmetric_difference(NULL, NULL, gset_SD, gset_1, gset_2);
EXPECT_EQ(0, BLI_gset_size(gset_SD));
BLI_gset_free(gset_1, NULL);
BLI_gset_free(gset_2, NULL);
BLI_gset_free(gset_SD, NULL);
}