#include <vppinfra/config.h>
+#ifdef __x86_64__
+#include <x86intrin.h>
+#endif
+
/* Standalone means to not assume we are running on a Unix box. */
#if ! defined (CLIB_STANDALONE) && ! defined (CLIB_LINUX_KERNEL)
#define CLIB_UNIX
#define CLIB_PACKED(x) x __attribute__ ((packed))
#define CLIB_UNUSED(x) x __attribute__ ((unused))
+/* similar to CLIB_CACHE_LINE_ALIGN_MARK() but with arbitrary alignment */
+#define CLIB_ALIGN_MARK(name, alignment) u8 name[0] __attribute__((aligned(alignment)))
+
/* Make a string from the macro's argument */
#define CLIB_STRING_MACRO(x) #x
#define PREDICT_FALSE(x) __builtin_expect((x),0)
#define PREDICT_TRUE(x) __builtin_expect((x),1)
+/*
+ * Compiler barrier
+ * prevent compiler to reorder memory access accross this boundary
+ * prevent compiler to cache values in register (force reload)
+ * Not to be confused with CPU memory barrier below
+ */
+#define CLIB_COMPILER_BARRIER() asm volatile ("":::"memory")
+
/* Full memory barrier (read and write). */
#define CLIB_MEMORY_BARRIER() __sync_synchronize ()
return f * flt_round_nearest (x / f);
}
+always_inline uword
+extract_bits (uword x, int start, int count)
+{
+#ifdef __BMI__
+ return _bextr_u64 (x, start, count);
+#endif
+ return (x >> start) & pow2_mask (count);
+}
+
#define clib_max(x,y) \
({ \
__typeof__ (x) _x = (x); \