+#if BIHASH_32_64_SVM
+#undef alloc_arena_next
+#undef alloc_arena_size
+#undef alloc_arena
+#undef CLIB_BIHASH_READY_MAGIC
+#define alloc_arena_next(h) (((h)->sh)->alloc_arena_next)
+#define alloc_arena_size(h) (((h)->sh)->alloc_arena_size)
+#define alloc_arena(h) ((h)->alloc_arena)
+#define CLIB_BIHASH_READY_MAGIC 0xFEEDFACE
+#else
+#undef alloc_arena_next
+#undef alloc_arena_size
+#undef alloc_arena
+#undef CLIB_BIHASH_READY_MAGIC
+#define alloc_arena_next(h) ((h)->sh.alloc_arena_next)
+#define alloc_arena_size(h) ((h)->sh.alloc_arena_size)
+#define alloc_arena(h) ((h)->alloc_arena)
+#define CLIB_BIHASH_READY_MAGIC 0
+#endif
+
+static inline void BV (clib_bihash_alloc_lock) (BVT (clib_bihash) * h)
+{
+ while (__atomic_test_and_set (h->alloc_lock, __ATOMIC_ACQUIRE))
+ CLIB_PAUSE ();
+}
+
+static inline void BV (clib_bihash_alloc_unlock) (BVT (clib_bihash) * h)
+{
+ __atomic_clear (h->alloc_lock, __ATOMIC_RELEASE);
+}
+
+static inline void BV (clib_bihash_lock_bucket) (BVT (clib_bihash_bucket) * b)
+{
+ BVT (clib_bihash_bucket) unlocked_bucket, locked_bucket;
+
+ do
+ {
+ locked_bucket.as_u64 = unlocked_bucket.as_u64 = b->as_u64;
+ unlocked_bucket.lock = 0;
+ locked_bucket.lock = 1;
+ CLIB_PAUSE ();
+ }
+ while (__atomic_compare_exchange_n (&b->as_u64, &unlocked_bucket.as_u64,
+ locked_bucket.as_u64, 1 /* weak */ ,
+ __ATOMIC_ACQUIRE,
+ __ATOMIC_ACQUIRE) == 0);
+}
+
+static inline void BV (clib_bihash_unlock_bucket)
+ (BVT (clib_bihash_bucket) * b)
+{
+ CLIB_MEMORY_BARRIER ();
+ b->lock = 0;
+}