#define included_clib_h
#include <stddef.h>
+
+#if __has_include(<vppinfra/config.h>)
#include <vppinfra/config.h>
+#endif
#ifdef __x86_64__
#include <x86intrin.h>
/* Hints to compiler about hot/cold code. */
#define PREDICT_FALSE(x) __builtin_expect((x),0)
#define PREDICT_TRUE(x) __builtin_expect((x),1)
+#define COMPILE_TIME_CONST(x) __builtin_constant_p (x)
+#define CLIB_ASSUME(x) \
+ do \
+ { \
+ if (!(x)) \
+ __builtin_unreachable (); \
+ } \
+ while (0)
/*
* Compiler barrier
- * prevent compiler to reorder memory access accross this boundary
+ * prevent compiler to reorder memory access across this boundary
* prevent compiler to cache values in register (force reload)
* Not to be confused with CPU memory barrier below
*/
#endif
#if defined (count_leading_zeros)
+always_inline uword
+clear_lowest_set_bit (uword x)
+{
+#ifdef __BMI2__
+ return _blsr_u64 (x);
+#else
+ return x ^ (1ULL << count_trailing_zeros (x));
+#endif
+}
+
always_inline uword
min_log2 (uword x)
{
always_inline uword
pow2_mask (uword x)
{
+#ifdef __BMI2__
+ return _bzhi_u64 (-1ULL, x);
+#endif
return ((uword) 1 << x) - (uword) 1;
}
_x < _y ? _x : _y; \
})
+#define clib_clamp(x,lo,hi) \
+({ \
+ __typeof__ (x) _x = (x); \
+ __typeof__ (lo) _lo = (lo); \
+ __typeof__ (hi) _hi = (hi); \
+ _x < _lo ? _lo : (_x > _hi ? _hi : _x); \
+})
+
#define clib_abs(x) \
({ \
__typeof__ (x) _x = (x); \
uword
clib_backtrace (uword * callers, uword max_callers, uword n_frames_to_skip);
+#include <vppinfra/byte_order.h>
#endif /* included_clib_h */
/*