aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/src/hashtables.c
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2016-12-16 06:56:39 +0100
committerJason A. Donenfeld <Jason@zx2c4.com>2016-12-16 06:56:39 +0100
commit2b542fa8d05f465930532e58cc128496f9a53505 (patch)
tree3bc29fdacf87d3a34440496a4147177f1c70b05b /src/hashtables.c
parentmessages: increase header by 3 bytes for alignment (diff)
downloadwireguard-monolithic-historical-2b542fa8d05f465930532e58cc128496f9a53505.tar.xz
wireguard-monolithic-historical-2b542fa8d05f465930532e58cc128496f9a53505.zip
hashtables: use counter and int to ensure forward progress
Diffstat (limited to 'src/hashtables.c')
-rw-r--r--src/hashtables.c11
1 files changed, 2 insertions, 9 deletions
diff --git a/src/hashtables.c b/src/hashtables.c
index 45c9737..534ad55 100644
--- a/src/hashtables.c
+++ b/src/hashtables.c
@@ -65,16 +65,10 @@ void index_hashtable_init(struct index_hashtable *table)
spin_lock_init(&table->lock);
}
-#if BITS_PER_LONG == 64
-#define get_random_u64() get_random_long()
-#else
-#define get_random_u64() (((u64)get_random_int() << 32) | get_random_int())
-#endif
-
__le32 index_hashtable_insert(struct index_hashtable *table, struct index_hashtable_entry *entry)
{
struct index_hashtable_entry *existing_entry;
- u64 rand;
+ u32 counter = get_random_int();
spin_lock(&table->lock);
hlist_del_init_rcu(&entry->index_hash);
@@ -84,8 +78,7 @@ __le32 index_hashtable_insert(struct index_hashtable *table, struct index_hashta
search_unused_slot:
/* First we try to find an unused slot, randomly, while unlocked. */
- rand = get_random_u64();
- entry->index = (__force __le32)siphash_1u64(rand, table->key);
+ entry->index = (__force __le32)siphash_2u32(get_random_int(), counter++, table->key);
hlist_for_each_entry_rcu(existing_entry, index_bucket(table, entry->index), index_hash) {
if (existing_entry->index == entry->index)
goto search_unused_slot; /* If it's already in use, we continue searching. */