[Bf-blender-cvs] [b860684] temp-ghash-basis: rename bucket_hash -> bucket_index

Campbell Barton noreply at git.blender.org
Thu Mar 19 17:30:08 CET 2015


Commit: b86068482db8373b85b43803d2e0582fec8bb3f6
Author: Campbell Barton
Date:   Fri Mar 20 03:29:31 2015 +1100
Branches: temp-ghash-basis
https://developer.blender.org/rBb86068482db8373b85b43803d2e0582fec8bb3f6

rename bucket_hash -> bucket_index

===================================================================

M	source/blender/blenlib/intern/BLI_ghash.c

===================================================================

diff --git a/source/blender/blenlib/intern/BLI_ghash.c b/source/blender/blenlib/intern/BLI_ghash.c
index e9d0061..a2233c3 100644
--- a/source/blender/blenlib/intern/BLI_ghash.c
+++ b/source/blender/blenlib/intern/BLI_ghash.c
@@ -157,10 +157,10 @@ BLI_INLINE unsigned int ghash_entryhash(GHash *gh, const Entry *e)
 /**
  * Get the bucket-hash for an already-computed full hash.
  */
-BLI_INLINE unsigned int ghash_bucket_hash(GHash *gh, const unsigned int full_hash)
+BLI_INLINE unsigned int ghash_bucket_index(GHash *gh, const unsigned int hash)
 {
 #ifdef GHASH_USE_MODULO_BUCKETS
-	return full_hash % gh->nbuckets;
+	return hash % gh->nbuckets;
 #else
 	return full_hash & gh->bucket_mask;
 #endif
@@ -194,10 +194,10 @@ static void ghash_buckets_resize(GHash *gh, const unsigned int nbuckets)
 				Entry *e_next;
 				for (e = buckets_old[i]; e; e = e_next) {
 					const unsigned hash = ghash_entryhash(gh, e);
-					const unsigned bucket_hash = ghash_bucket_hash(gh, hash);
+					const unsigned bucket_index = ghash_bucket_index(gh, hash);
 					e_next = e->next;
-					e->next = buckets_new[bucket_hash];
-					buckets_new[bucket_hash] = e;
+					e->next = buckets_new[bucket_index];
+					buckets_new[bucket_index] = e;
 				}
 			}
 		}
@@ -207,20 +207,20 @@ static void ghash_buckets_resize(GHash *gh, const unsigned int nbuckets)
 				Entry *e_next;
 				for (e = buckets_old[i]; e; e = e_next) {
 					const unsigned hash = ghash_entryhash(gh, e);
-					const unsigned bucket_hash = ghash_bucket_hash(gh, hash);
+					const unsigned bucket_index = ghash_bucket_index(gh, hash);
 					e_next = e->next;
-					e->next = buckets_new[bucket_hash];
-					buckets_new[bucket_hash] = e;
+					e->next = buckets_new[bucket_index];
+					buckets_new[bucket_index] = e;
 				}
 #else
 				/* No need to recompute hashes in this case, since our mask is just smaller, all items in old bucket i
 				 * will go in same new bucket (i & new_mask)! */
-				const unsigned bucket_hash = ghash_bucket_hash(gh, i);
-				BLI_assert(!buckets_old[i] || (bucket_hash == ghash_bucket_hash(gh, ghash_entryhash(gh, buckets_old[i]))));
+				const unsigned bucket_index = ghash_bucket_index(gh, i);
+				BLI_assert(!buckets_old[i] || (bucket_index == ghash_bucket_index(gh, ghash_entryhash(gh, buckets_old[i]))));
 				for (e = buckets_old[i]; e && e->next; e = e->next);
 				if (e) {
-					e->next = buckets_new[bucket_hash];
-					buckets_new[bucket_hash] = buckets_old[i];
+					e->next = buckets_new[bucket_index];
+					buckets_new[bucket_index] = buckets_old[i];
 				}
 #endif
 			}
@@ -357,15 +357,15 @@ BLI_INLINE void ghash_buckets_reset(GHash *gh, const unsigned int nentries)
 
 /**
  * Internal lookup function.
- * Takes hash and bucket_hash arguments to avoid calling #ghash_keyhash and #ghash_bucket_hash multiple times.
+ * Takes hash and bucket_index arguments to avoid calling #ghash_keyhash and #ghash_bucket_index multiple times.
  */
 BLI_INLINE Entry *ghash_lookup_entry_ex(
-        GHash *gh, const void *key, const unsigned int bucket_hash)
+        GHash *gh, const void *key, const unsigned int bucket_index)
 {
 	Entry *e;
 	/* If we do not store GHash, not worth computing it for each entry here!
 	 * Typically, comparison function will be quicker, and since it's needed in the end anyway... */
-	for (e = gh->buckets[bucket_hash]; e; e = e->next) {
+	for (e = gh->buckets[bucket_index]; e; e = e->next) {
 		if (UNLIKELY(gh->cmpfp(key, e->key) == false)) {
 			return e;
 		}
@@ -376,17 +376,17 @@ BLI_INLINE Entry *ghash_lookup_entry_ex(
 
 /**
  * Internal lookup function, returns previous entry of target one too.
- * Takes hash and bucket_hash arguments to avoid calling #ghash_keyhash and #ghash_bucket_hash multiple times.
+ * Takes hash and bucket_index arguments to avoid calling #ghash_keyhash and #ghash_bucket_index multiple times.
  * Useful when modifying buckets somehow (like removing an entry...).
  */
 BLI_INLINE Entry *ghash_lookup_entry_prev_ex(
-        GHash *gh, const void *key, Entry **r_e_prev, const unsigned int bucket_hash)
+        GHash *gh, const void *key, Entry **r_e_prev, const unsigned int bucket_index)
 {
 	Entry *e, *e_prev = NULL;
 
 	/* If we do not store GHash, not worth computing it for each entry here!
 	 * Typically, comparison function will be quicker, and since it's needed in the end anyway... */
-	for (e = gh->buckets[bucket_hash]; e; e_prev = e, e = e->next) {
+	for (e = gh->buckets[bucket_index]; e; e_prev = e, e = e->next) {
 		if (UNLIKELY(gh->cmpfp(key, e->key) == false)) {
 			*r_e_prev = e_prev;
 			return e;
@@ -403,8 +403,8 @@ BLI_INLINE Entry *ghash_lookup_entry_prev_ex(
 BLI_INLINE Entry *ghash_lookup_entry(GHash *gh, const void *key)
 {
 	const unsigned int hash = ghash_keyhash(gh, key);
-	const unsigned int bucket_hash = ghash_bucket_hash(gh, hash);
-	return ghash_lookup_entry_ex(gh, key, bucket_hash);
+	const unsigned int bucket_index = ghash_bucket_index(gh, hash);
+	return ghash_lookup_entry_ex(gh, key, bucket_index);
 }
 
 static GHash *ghash_new(GHashHashFP hashfp, GHashCmpFP cmpfp, const char *info,
@@ -426,20 +426,20 @@ static GHash *ghash_new(GHashHashFP hashfp, GHashCmpFP cmpfp, const char *info,
 
 /**
  * Internal insert function.
- * Takes hash and bucket_hash arguments to avoid calling #ghash_keyhash and #ghash_bucket_hash multiple times.
+ * Takes hash and bucket_index arguments to avoid calling #ghash_keyhash and #ghash_bucket_index multiple times.
  */
 BLI_INLINE void ghash_insert_ex(
-        GHash *gh, void *key, void *val, const unsigned int bucket_hash)
+        GHash *gh, void *key, void *val, const unsigned int bucket_index)
 {
 	GHashEntry *e = BLI_mempool_alloc(gh->entrypool);
 
 	BLI_assert((gh->flag & GHASH_FLAG_ALLOW_DUPES) || (BLI_ghash_haskey(gh, key) == 0));
 	BLI_assert(!(gh->flag & GHASH_FLAG_IS_GSET));
 
-	e->e.next = gh->buckets[bucket_hash];
+	e->e.next = gh->buckets[bucket_index];
 	e->e.key = key;
 	e->val = val;
-	gh->buckets[bucket_hash] = (Entry *)e;
+	gh->buckets[bucket_index] = (Entry *)e;
 
 	ghash_buckets_expand(gh, ++gh->nentries, false);
 }
@@ -448,16 +448,16 @@ BLI_INLINE void ghash_insert_ex(
  * Insert function that doesn't set the value (use for GSet)
  */
 BLI_INLINE void ghash_insert_ex_keyonly(
-        GHash *gh, void *key, const unsigned int bucket_hash)
+        GHash *gh, void *key, const unsigned int bucket_index)
 {
 	GSetEntry *e = BLI_mempool_alloc(gh->entrypool);
 
 	BLI_assert((gh->flag & GHASH_FLAG_ALLOW_DUPES) || (BLI_ghash_haskey(gh, key) == 0));
 	BLI_assert((gh->flag & GHASH_FLAG_IS_GSET) != 0);
 
-	e->next = gh->buckets[bucket_hash];
+	e->next = gh->buckets[bucket_index];
 	e->key = key;
-	gh->buckets[bucket_hash] = (Entry *)e;
+	gh->buckets[bucket_index] = (Entry *)e;
 
 	ghash_buckets_expand(gh, ++gh->nentries, false);
 }
@@ -465,17 +465,17 @@ BLI_INLINE void ghash_insert_ex_keyonly(
 BLI_INLINE void ghash_insert(GHash *gh, void *key, void *val)
 {
 	const unsigned int hash = ghash_keyhash(gh, key);
-	const unsigned int bucket_hash = ghash_bucket_hash(gh, hash);
+	const unsigned int bucket_index = ghash_bucket_index(gh, hash);
 
-	ghash_insert_ex(gh, key, val, bucket_hash);
+	ghash_insert_ex(gh, key, val, bucket_index);
 }
 
 BLI_INLINE bool ghash_insert_safe(
         GHash *gh, void *key, void *val, const bool override, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
 {
 	const unsigned int hash = ghash_keyhash(gh, key);
-	const unsigned int bucket_hash = ghash_bucket_hash(gh, hash);
-	GHashEntry *e = (GHashEntry *)ghash_lookup_entry_ex(gh, key, bucket_hash);
+	const unsigned int bucket_index = ghash_bucket_index(gh, hash);
+	GHashEntry *e = (GHashEntry *)ghash_lookup_entry_ex(gh, key, bucket_index);
 
 	BLI_assert(!(gh->flag & GHASH_FLAG_IS_GSET));
 
@@ -489,7 +489,7 @@ BLI_INLINE bool ghash_insert_safe(
 		return false;
 	}
 	else {
-		ghash_insert_ex(gh, key, val, bucket_hash);
+		ghash_insert_ex(gh, key, val, bucket_index);
 		return true;
 	}
 }
@@ -497,8 +497,8 @@ BLI_INLINE bool ghash_insert_safe(
 BLI_INLINE bool ghash_insert_safe_keyonly(GHash *gh, void *key, const bool override, GHashKeyFreeFP keyfreefp)
 {
 	const unsigned int hash = ghash_keyhash(gh, key);
-	const unsigned int bucket_hash = ghash_bucket_hash(gh, hash);
-	GSetEntry *e = ghash_lookup_entry_ex(gh, key, bucket_hash);
+	const unsigned int bucket_index = ghash_bucket_index(gh, hash);
+	GSetEntry *e = ghash_lookup_entry_ex(gh, key, bucket_index);
 
 	BLI_assert((gh->flag & GHASH_FLAG_IS_GSET) != 0);
 
@@ -510,7 +510,7 @@ BLI_INLINE bool ghash_insert_safe_keyonly(GHash *gh, void *key, const bool overr
 		return false;
 	}
 	else {
-		ghash_insert_ex_keyonly(gh, key, bucket_hash);
+		ghash_insert_ex_keyonly(gh, key, bucket_index);
 		return true;
 	}
 }
@@ -520,10 +520,10 @@ BLI_INLINE bool ghash_insert_safe_keyonly(GHash *gh, void *key, const bool overr
  */
 static Entry *ghash_remove_ex(
         GHash *gh, void *key, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
-        const unsigned int bucket_hash)
+        const unsigned int bucket_index)
 {
 	Entry *e_prev;
-	Entry *e = ghash_lookup_entry_prev_ex(gh, key, &e_prev, bucket_hash);
+	Entry *e = ghash_lookup_entry_prev_ex(gh, key, &e_prev, bucket_index);
 
 	BLI_assert(!valfreefp|| !(gh->flag & GHASH_FLAG_IS_GSET));
 
@@ -532,7 +532,7 @@ static Entry *ghash_remove_ex(
 		if (valfreefp) valfreefp(((GHashEntry *)e)->val);
 
 		if (e_prev) e_prev->next = e->next;
-		else gh->buckets[bucket_hash] = e->next;
+		else gh->buckets[bucket_index] = e->next;
 
 		ghash_buckets_contract(gh, --gh->nentries, false, false);
 	}
@@ -731,8 +731,8 @@ void **BLI_ghash_lookup_p(GHash *gh, const void *key)
 bool BLI_ghash_remove(GHash *gh, void *key, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
 {
 	const unsigned int hash = ghash_keyhash(gh, key);
-	const unsigned int bucket_hash = ghash_bucket_hash(gh, hash);
-	Entry *e = ghash_remove_ex(gh, key, keyfreefp, valfreefp, bucket_hash);
+	const unsigned int bucket_index = ghash_bucket_index(gh, hash);
+	Entry *e = ghash_remove_ex(gh, key, keyfreefp, valfreefp, bucket_index);
 	if (e) {
 		BLI_mempool_free(gh->entrypool, e);
 		return true;
@@ -754,8 +754,8 @@ bool BLI_ghash_remove(GHash *gh, void *key, 

@@ Diff output truncated at 10240 characters. @@




More information about the Bf-blender-cvs mailing list