*/
#undef BIHASH_TYPE
-#undef BIHASH_KVP_CACHE_SIZE
#undef BIHASH_KVP_PER_PAGE
#define BIHASH_TYPE _40_8
#define BIHASH_KVP_PER_PAGE 4
-#define BIHASH_KVP_CACHE_SIZE 2
#ifndef __included_bihash_40_8_h__
#define __included_bihash_40_8_h__
static inline int
clib_bihash_is_free_40_8 (const clib_bihash_kv_40_8_t * v)
{
- /* Free values are memset to 0xff, check a bit... */
+ /* Free values are clib_memset to 0xff, check a bit... */
if (v->key[0] == ~0ULL && v->value == ~0ULL)
return 1;
return 0;
v = u64x8_load_unaligned (a) ^ u64x8_load_unaligned (b);
return (u64x8_is_zero_mask (v) & 0x1f) == 0;
#elif defined (CLIB_HAVE_VEC256)
- u64x4 v;
- v = u64x4_load_unaligned (a) ^ u64x4_load_unaligned (b);
- v |= u64x4_load_unaligned (a + 1) ^ u64x4_load_unaligned (b + 1);
+ u64x4 v = { a[4] ^ b[4], 0, 0, 0 };
+ v |= u64x4_load_unaligned (a) ^ u64x4_load_unaligned (b);
return u64x4_is_all_zero (v);
#elif defined(CLIB_HAVE_VEC128) && defined(CLIB_HAVE_VEC128_UNALIGNED_LOAD_STORE)
- u64x2 v;
- v = u64x2_load_unaligned (a) ^ u64x2_load_unaligned (b);
+ u64x2 v = { a[4] ^ b[4], 0 };
+ v |= u64x2_load_unaligned (a) ^ u64x2_load_unaligned (b);
v |= u64x2_load_unaligned (a + 2) ^ u64x2_load_unaligned (b + 2);
- v |= u64x2_load_unaligned (a + 3) ^ u64x2_load_unaligned (b + 3);
return u64x2_is_all_zero (v);
#else
return ((a[0] ^ b[0]) | (a[1] ^ b[1]) | (a[2] ^ b[2]) | (a[3] ^ b[3])