#ifndef included_clib_h
#define included_clib_h
+#include <stddef.h>
+#include <vppinfra/config.h>
+
+#ifdef __x86_64__
+#include <x86intrin.h>
+#endif
+
/* Standalone means to not assume we are running on a Unix box. */
#if ! defined (CLIB_STANDALONE) && ! defined (CLIB_LINUX_KERNEL)
#define CLIB_UNIX
#endif
#include <vppinfra/types.h>
+#include <vppinfra/atomics.h>
/* Global DEBUG flag. Setting this to 1 or 0 turns off
ASSERT (see vppinfra/error.h) & other debugging code. */
#define ARRAY_LEN(x) (sizeof (x)/sizeof (x[0]))
#define _STRUCT_FIELD(t,f) (((t *) 0)->f)
-#define STRUCT_OFFSET_OF(t,f) ((uword) & _STRUCT_FIELD (t, f))
-#define STRUCT_BIT_OFFSET_OF(t,f) (BITS(u8) * (uword) & _STRUCT_FIELD (t, f))
+#define STRUCT_OFFSET_OF(t,f) offsetof(t, f)
+#define STRUCT_BIT_OFFSET_OF(t,f) (BITS(u8) * STRUCT_OFFSET_OF (t, f))
#define STRUCT_SIZE_OF(t,f) (sizeof (_STRUCT_FIELD (t, f)))
#define STRUCT_BITS_OF(t,f) (BITS (_STRUCT_FIELD (t, f)))
#define STRUCT_ARRAY_LEN(t,f) ARRAY_LEN (_STRUCT_FIELD (t, f))
#define CLIB_PACKED(x) x __attribute__ ((packed))
#define CLIB_UNUSED(x) x __attribute__ ((unused))
+/* similar to CLIB_CACHE_LINE_ALIGN_MARK() but with arbitrary alignment */
+#define CLIB_ALIGN_MARK(name, alignment) u8 name[0] __attribute__((aligned(alignment)))
+
+/* Make a string from the macro's argument */
+#define CLIB_STRING_MACRO(x) #x
+
+#define __clib_unused __attribute__ ((unused))
+#define __clib_weak __attribute__ ((weak))
+#define __clib_packed __attribute__ ((packed))
+#define __clib_constructor __attribute__ ((constructor))
+#define __clib_noinline __attribute__ ((noinline))
+#define __clib_aligned(x) __attribute__ ((aligned(x)))
+#define __clib_section(s) __attribute__ ((section(s)))
+#define __clib_warn_unused_result __attribute__ ((warn_unused_result))
+#define __clib_export __attribute__ ((visibility("default")))
+
#define never_inline __attribute__ ((__noinline__))
#if CLIB_DEBUG > 0
/* Hints to compiler about hot/cold code. */
#define PREDICT_FALSE(x) __builtin_expect((x),0)
#define PREDICT_TRUE(x) __builtin_expect((x),1)
+#define COMPILE_TIME_CONST(x) __builtin_constant_p (x)
+
+/*
+ * Compiler barrier
+ * prevent compiler to reorder memory access accross this boundary
+ * prevent compiler to cache values in register (force reload)
+ * Not to be confused with CPU memory barrier below
+ */
+#define CLIB_COMPILER_BARRIER() asm volatile ("":::"memory")
/* Full memory barrier (read and write). */
#define CLIB_MEMORY_BARRIER() __sync_synchronize ()
+#if __x86_64__
+#define CLIB_MEMORY_STORE_BARRIER() __builtin_ia32_sfence ()
+#else
+#define CLIB_MEMORY_STORE_BARRIER() __sync_synchronize ()
+#endif
+
/* Arranges for function to be called before main. */
#define INIT_FUNCTION(decl) \
decl __attribute ((constructor)); \
decl
/* Use __builtin_clz if available. */
-#ifdef __GNUC__
-#include <features.h>
-#if __GNUC_PREREQ(3, 4)
#if uword_bits == 64
-#define count_leading_zeros(count,x) count = __builtin_clzll (x)
-#define count_trailing_zeros(count,x) count = __builtin_ctzll (x)
+#define count_leading_zeros(x) __builtin_clzll (x)
+#define count_trailing_zeros(x) __builtin_ctzll (x)
#else
-#define count_leading_zeros(count,x) count = __builtin_clzl (x)
-#define count_trailing_zeros(count,x) count = __builtin_ctzl (x)
-#endif
-#endif
+#define count_leading_zeros(x) __builtin_clzl (x)
+#define count_trailing_zeros(x) __builtin_ctzl (x)
#endif
-#ifndef count_leading_zeros
-
-/* Misc. integer arithmetic functions. */
-#if defined (i386)
-#define count_leading_zeros(count, x) \
- do { \
- word _clz; \
- __asm__ ("bsrl %1,%0" \
- : "=r" (_clz) : "rm" ((word) (x)));\
- (count) = _clz ^ 31; \
- } while (0)
-
-#define count_trailing_zeros(count, x) \
- __asm__ ("bsfl %1,%0" : "=r" (count) : "rm" ((word)(x)))
-#endif /* i386 */
-
-#if defined (__alpha__) && defined (HAVE_CIX)
-#define count_leading_zeros(count, x) \
- __asm__ ("ctlz %1,%0" \
- : "=r" ((word) (count)) \
- : "r" ((word) (x)))
-#define count_trailing_zeros(count, x) \
- __asm__ ("cttz %1,%0" \
- : "=r" ((word) (count)) \
- : "r" ((word) (x)))
-#endif /* alpha && HAVE_CIX */
-
-#if __mips >= 4
-
-/* Select between 32/64 opcodes. */
-#if uword_bits == 32
-#define count_leading_zeros(_count, _x) \
- __asm__ ("clz %[count],%[x]" \
- : [count] "=r" ((word) (_count)) \
- : [x] "r" ((word) (_x)))
+#if defined (count_leading_zeros)
+always_inline uword
+clear_lowest_set_bit (uword x)
+{
+#ifdef __BMI2__
+ return _blsr_u64 (x);
#else
-#define count_leading_zeros(_count, _x) \
- __asm__ ("dclz %[count],%[x]" \
- : [count] "=r" ((word) (_count)) \
- : [x] "r" ((word) (_x)))
+ return x ^ (1ULL << count_trailing_zeros (x));
#endif
+}
-#endif /* __mips >= 4 */
-
-#endif /* count_leading_zeros */
-
-#if defined (count_leading_zeros)
always_inline uword
min_log2 (uword x)
{
uword n;
- count_leading_zeros (n, x);
+ n = count_leading_zeros (x);
return BITS (uword) - n - 1;
}
#else
return 0 == (x & (x - 1));
}
+always_inline uword
+round_down_pow2 (uword x, uword pow2)
+{
+ return (x) & ~(pow2 - 1);
+}
+
always_inline uword
round_pow2 (uword x, uword pow2)
{
{
uword result;
#ifdef count_trailing_zeros
- count_trailing_zeros (result, x);
+ result = count_trailing_zeros (x);
#else
result = min_log2 (first_set (x));
#endif
return f * flt_round_nearest (x / f);
}
+always_inline uword
+extract_bits (uword x, int start, int count)
+{
+#ifdef __BMI__
+ return _bextr_u64 (x, start, count);
+#endif
+ return (x >> start) & pow2_mask (count);
+}
+
#define clib_max(x,y) \
({ \
__typeof__ (x) _x = (x); \
_x < _y ? _x : _y; \
})
+#define clib_clamp(x,lo,hi) \
+({ \
+ __typeof__ (x) _x = (x); \
+ __typeof__ (lo) _lo = (lo); \
+ __typeof__ (hi) _hi = (hi); \
+ _x < _lo ? _lo : (_x > _hi ? _hi : _x); \
+})
+
#define clib_abs(x) \
({ \
__typeof__ (x) _x = (x); \
uword
clib_backtrace (uword * callers, uword max_callers, uword n_frames_to_skip);
+#include <vppinfra/byte_order.h>
#endif /* included_clib_h */
/*