X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fvppinfra%2Fclib.h;h=746cb511bbe79ff932c9cab443721ea484680671;hb=993c86f339fca3e382e65fc82627381255aaacec;hp=a6f88245d7956c011832970adc111409b95bfc17;hpb=5f21e1bd613b372ea6f8be6423894548dae59bdc;p=vpp.git diff --git a/src/vppinfra/clib.h b/src/vppinfra/clib.h index a6f88245d79..746cb511bbe 100644 --- a/src/vppinfra/clib.h +++ b/src/vppinfra/clib.h @@ -38,7 +38,15 @@ #ifndef included_clib_h #define included_clib_h +#include + +#if __has_include() #include +#endif + +#ifdef __x86_64__ +#include +#endif /* Standalone means to not assume we are running on a Unix box. */ #if ! defined (CLIB_STANDALONE) && ! defined (CLIB_LINUX_KERNEL) @@ -46,6 +54,7 @@ #endif #include +#include /* Global DEBUG flag. Setting this to 1 or 0 turns off ASSERT (see vppinfra/error.h) & other debugging code. */ @@ -61,8 +70,8 @@ #define ARRAY_LEN(x) (sizeof (x)/sizeof (x[0])) #define _STRUCT_FIELD(t,f) (((t *) 0)->f) -#define STRUCT_OFFSET_OF(t,f) ((uword) & _STRUCT_FIELD (t, f)) -#define STRUCT_BIT_OFFSET_OF(t,f) (BITS(u8) * (uword) & _STRUCT_FIELD (t, f)) +#define STRUCT_OFFSET_OF(t,f) offsetof(t, f) +#define STRUCT_BIT_OFFSET_OF(t,f) (BITS(u8) * STRUCT_OFFSET_OF (t, f)) #define STRUCT_SIZE_OF(t,f) (sizeof (_STRUCT_FIELD (t, f))) #define STRUCT_BITS_OF(t,f) (BITS (_STRUCT_FIELD (t, f))) #define STRUCT_ARRAY_LEN(t,f) ARRAY_LEN (_STRUCT_FIELD (t, f)) @@ -80,13 +89,51 @@ #define CLIB_PACKED(x) x __attribute__ ((packed)) #define CLIB_UNUSED(x) x __attribute__ ((unused)) +/* similar to CLIB_CACHE_LINE_ALIGN_MARK() but with arbitrary alignment */ +#define CLIB_ALIGN_MARK(name, alignment) u8 name[0] __attribute__((aligned(alignment))) + /* Make a string from the macro's argument */ #define CLIB_STRING_MACRO(x) #x +#define CLIB_STRING_ARRAY(...) \ + (char *[]) { __VA_ARGS__, 0 } + +/* sanitizers */ +#ifdef __has_feature +#if __has_feature(address_sanitizer) +#define CLIB_SANITIZE_ADDR 1 +#endif +#elif defined(__SANITIZE_ADDRESS__) +#define CLIB_SANITIZE_ADDR 1 +#endif + #define __clib_unused __attribute__ ((unused)) #define __clib_weak __attribute__ ((weak)) #define __clib_packed __attribute__ ((packed)) +#define __clib_flatten __attribute__ ((flatten)) #define __clib_constructor __attribute__ ((constructor)) +#define __clib_noinline __attribute__ ((noinline)) +#ifdef __clang__ +#define __clib_noclone +#else +#define __clib_noclone __attribute__ ((noclone)) +#endif +#define __clib_aligned(x) __attribute__ ((aligned(x))) +#define __clib_section(s) __attribute__ ((section(s))) +#define __clib_warn_unused_result __attribute__ ((warn_unused_result)) +#define __clib_export __attribute__ ((visibility("default"))) +#ifdef __clang__ +#define __clib_no_tail_calls __attribute__ ((disable_tail_calls)) +#else +#define __clib_no_tail_calls \ + __attribute__ ((optimize ("no-optimize-sibling-calls"))) +#endif + +#ifdef CLIB_SANITIZE_ADDR +#define __clib_nosanitize_addr __attribute__ ((no_sanitize_address)) +#else +#define __clib_nosanitize_addr +#endif #define never_inline __attribute__ ((__noinline__)) @@ -106,6 +153,22 @@ /* Hints to compiler about hot/cold code. */ #define PREDICT_FALSE(x) __builtin_expect((x),0) #define PREDICT_TRUE(x) __builtin_expect((x),1) +#define COMPILE_TIME_CONST(x) __builtin_constant_p (x) +#define CLIB_ASSUME(x) \ + do \ + { \ + if (!(x)) \ + __builtin_unreachable (); \ + } \ + while (0) + +/* + * Compiler barrier + * prevent compiler to reorder memory access across this boundary + * prevent compiler to cache values in register (force reload) + * Not to be confused with CPU memory barrier below + */ +#define CLIB_COMPILER_BARRIER() asm volatile ("":::"memory") /* Full memory barrier (read and write). */ #define CLIB_MEMORY_BARRIER() __sync_synchronize () @@ -126,16 +189,8 @@ decl __attribute ((destructor)); \ decl -/* Use __builtin_clz if available. */ -#if uword_bits == 64 -#define count_leading_zeros(x) __builtin_clzll (x) -#define count_trailing_zeros(x) __builtin_ctzll (x) -#else -#define count_leading_zeros(x) __builtin_clzl (x) -#define count_trailing_zeros(x) __builtin_ctzl (x) -#endif +#include -#if defined (count_leading_zeros) always_inline uword min_log2 (uword x) { @@ -143,45 +198,6 @@ min_log2 (uword x) n = count_leading_zeros (x); return BITS (uword) - n - 1; } -#else -always_inline uword -min_log2 (uword x) -{ - uword a = x, b = BITS (uword) / 2, c = 0, r = 0; - - /* Reduce x to 4 bit result. */ -#define _ \ -{ \ - c = a >> b; \ - if (c) a = c; \ - if (c) r += b; \ - b /= 2; \ -} - - if (BITS (uword) > 32) - _; - _; - _; - _; -#undef _ - - /* Do table lookup on 4 bit partial. */ - if (BITS (uword) > 32) - { - const u64 table = 0x3333333322221104LL; - uword t = (table >> (4 * a)) & 0xf; - r = t < 4 ? r + t : ~0; - } - else - { - const u32 table = 0x22221104; - uword t = (a & 8) ? 3 : ((table >> (4 * a)) & 0xf); - r = t < 4 ? r + t : ~0; - } - - return r; -} -#endif always_inline uword max_log2 (uword x) @@ -215,6 +231,9 @@ min_log2_u64 (u64 x) always_inline uword pow2_mask (uword x) { +#ifdef __BMI2__ + return _bzhi_u64 (-1ULL, x); +#endif return ((uword) 1 << x) - (uword) 1; } @@ -233,6 +252,12 @@ is_pow2 (uword x) return 0 == (x & (x - 1)); } +always_inline uword +round_down_pow2 (uword x, uword pow2) +{ + return (x) & ~(pow2 - 1); +} + always_inline uword round_pow2 (uword x, uword pow2) { @@ -251,18 +276,6 @@ first_set (uword x) return x & -x; } -always_inline uword -log2_first_set (uword x) -{ - uword result; -#ifdef count_trailing_zeros - result = count_trailing_zeros (x); -#else - result = min_log2 (first_set (x)); -#endif - return result; -} - always_inline f64 flt_round_down (f64 x) { @@ -281,6 +294,15 @@ flt_round_to_multiple (f64 x, f64 f) return f * flt_round_nearest (x / f); } +always_inline uword +extract_bits (uword x, int start, int count) +{ +#ifdef __BMI__ + return _bextr_u64 (x, start, count); +#endif + return (x >> start) & pow2_mask (count); +} + #define clib_max(x,y) \ ({ \ __typeof__ (x) _x = (x); \ @@ -295,6 +317,14 @@ flt_round_to_multiple (f64 x, f64 f) _x < _y ? _x : _y; \ }) +#define clib_clamp(x,lo,hi) \ +({ \ + __typeof__ (x) _x = (x); \ + __typeof__ (lo) _lo = (lo); \ + __typeof__ (hi) _hi = (hi); \ + _x < _lo ? _lo : (_x > _hi ? _hi : _x); \ +}) + #define clib_abs(x) \ ({ \ __typeof__ (x) _x = (x); \ @@ -313,6 +343,7 @@ void qsort (void *base, uword n, uword size, uword clib_backtrace (uword * callers, uword max_callers, uword n_frames_to_skip); +#include #endif /* included_clib_h */ /*