aboutsummaryrefslogtreecommitdiffstats
path: root/src/vppinfra/clib.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/vppinfra/clib.h')
-rw-r--r--src/vppinfra/clib.h163
1 files changed, 90 insertions, 73 deletions
diff --git a/src/vppinfra/clib.h b/src/vppinfra/clib.h
index ade7e5fa4bb..d14582492d6 100644
--- a/src/vppinfra/clib.h
+++ b/src/vppinfra/clib.h
@@ -53,6 +53,12 @@
#define CLIB_UNIX
#endif
+#ifdef __linux__
+#define CLIB_LINUX 1
+#else
+#define CLIB_LINUX 0
+#endif
+
#include <vppinfra/types.h>
#include <vppinfra/atomics.h>
@@ -68,6 +74,8 @@
#define BITS(x) (8*sizeof(x))
#define ARRAY_LEN(x) (sizeof (x)/sizeof (x[0]))
+#define FOREACH_ARRAY_ELT(a, b) \
+ for (typeof ((b)[0]) *(a) = (b); (a) - (b) < ARRAY_LEN (b); (a)++)
#define _STRUCT_FIELD(t,f) (((t *) 0)->f)
#define STRUCT_OFFSET_OF(t,f) offsetof(t, f)
@@ -95,15 +103,45 @@
/* Make a string from the macro's argument */
#define CLIB_STRING_MACRO(x) #x
+#define CLIB_STRING_ARRAY(...) \
+ (char *[]) { __VA_ARGS__, 0 }
+
+/* sanitizers */
+#ifdef __has_feature
+#if __has_feature(address_sanitizer)
+#define CLIB_SANITIZE_ADDR 1
+#endif
+#elif defined(__SANITIZE_ADDRESS__)
+#define CLIB_SANITIZE_ADDR 1
+#endif
+
#define __clib_unused __attribute__ ((unused))
#define __clib_weak __attribute__ ((weak))
#define __clib_packed __attribute__ ((packed))
+#define __clib_flatten __attribute__ ((flatten))
#define __clib_constructor __attribute__ ((constructor))
#define __clib_noinline __attribute__ ((noinline))
+#ifdef __clang__
+#define __clib_noclone
+#else
+#define __clib_noclone __attribute__ ((noclone))
+#endif
#define __clib_aligned(x) __attribute__ ((aligned(x)))
#define __clib_section(s) __attribute__ ((section(s)))
#define __clib_warn_unused_result __attribute__ ((warn_unused_result))
#define __clib_export __attribute__ ((visibility("default")))
+#ifdef __clang__
+#define __clib_no_tail_calls __attribute__ ((disable_tail_calls))
+#else
+#define __clib_no_tail_calls \
+ __attribute__ ((optimize ("no-optimize-sibling-calls")))
+#endif
+
+#ifdef CLIB_SANITIZE_ADDR
+#define __clib_nosanitize_addr __attribute__ ((no_sanitize_address))
+#else
+#define __clib_nosanitize_addr
+#endif
#define never_inline __attribute__ ((__noinline__))
@@ -124,10 +162,17 @@
#define PREDICT_FALSE(x) __builtin_expect((x),0)
#define PREDICT_TRUE(x) __builtin_expect((x),1)
#define COMPILE_TIME_CONST(x) __builtin_constant_p (x)
+#define CLIB_ASSUME(x) \
+ do \
+ { \
+ if (!(x)) \
+ __builtin_unreachable (); \
+ } \
+ while (0)
/*
* Compiler barrier
- * prevent compiler to reorder memory access accross this boundary
+ * prevent compiler to reorder memory access across this boundary
* prevent compiler to cache values in register (force reload)
* Not to be confused with CPU memory barrier below
*/
@@ -136,7 +181,7 @@
/* Full memory barrier (read and write). */
#define CLIB_MEMORY_BARRIER() __sync_synchronize ()
-#if __x86_64__
+#if __SSE__
#define CLIB_MEMORY_STORE_BARRIER() __builtin_ia32_sfence ()
#else
#define CLIB_MEMORY_STORE_BARRIER() __sync_synchronize ()
@@ -152,26 +197,17 @@
decl __attribute ((destructor)); \
decl
-/* Use __builtin_clz if available. */
-#if uword_bits == 64
-#define count_leading_zeros(x) __builtin_clzll (x)
-#define count_trailing_zeros(x) __builtin_ctzll (x)
-#else
-#define count_leading_zeros(x) __builtin_clzl (x)
-#define count_trailing_zeros(x) __builtin_ctzl (x)
-#endif
-
-#if defined (count_leading_zeros)
always_inline uword
-clear_lowest_set_bit (uword x)
+pow2_mask (uword x)
{
#ifdef __BMI2__
- return _blsr_u64 (x);
-#else
- return x ^ (1ULL << count_trailing_zeros (x));
+ return _bzhi_u64 (-1ULL, x);
#endif
+ return ((uword) 1 << x) - (uword) 1;
}
+#include <vppinfra/bitops.h>
+
always_inline uword
min_log2 (uword x)
{
@@ -179,45 +215,6 @@ min_log2 (uword x)
n = count_leading_zeros (x);
return BITS (uword) - n - 1;
}
-#else
-always_inline uword
-min_log2 (uword x)
-{
- uword a = x, b = BITS (uword) / 2, c = 0, r = 0;
-
- /* Reduce x to 4 bit result. */
-#define _ \
-{ \
- c = a >> b; \
- if (c) a = c; \
- if (c) r += b; \
- b /= 2; \
-}
-
- if (BITS (uword) > 32)
- _;
- _;
- _;
- _;
-#undef _
-
- /* Do table lookup on 4 bit partial. */
- if (BITS (uword) > 32)
- {
- const u64 table = 0x3333333322221104LL;
- uword t = (table >> (4 * a)) & 0xf;
- r = t < 4 ? r + t : ~0;
- }
- else
- {
- const u32 table = 0x22221104;
- uword t = (a & 8) ? 3 : ((table >> (4 * a)) & 0xf);
- r = t < 4 ? r + t : ~0;
- }
-
- return r;
-}
-#endif
always_inline uword
max_log2 (uword x)
@@ -249,12 +246,6 @@ min_log2_u64 (u64 x)
}
always_inline uword
-pow2_mask (uword x)
-{
- return ((uword) 1 << x) - (uword) 1;
-}
-
-always_inline uword
max_pow2 (uword x)
{
word y = (word) 1 << min_log2 (x);
@@ -293,18 +284,6 @@ first_set (uword x)
return x & -x;
}
-always_inline uword
-log2_first_set (uword x)
-{
- uword result;
-#ifdef count_trailing_zeros
- result = count_trailing_zeros (x);
-#else
- result = min_log2 (first_set (x));
-#endif
- return result;
-}
-
always_inline f64
flt_round_down (f64 x)
{
@@ -360,6 +339,44 @@ extract_bits (uword x, int start, int count)
_x < 0 ? -_x : _x; \
})
+static_always_inline u64
+u64_add_with_carry (u64 *carry, u64 a, u64 b)
+{
+#if defined(__x86_64__)
+ unsigned long long v;
+ *carry = _addcarry_u64 (*carry, a, b, &v);
+ return (u64) v;
+#elif defined(__clang__)
+ unsigned long long c;
+ u64 rv = __builtin_addcll (a, b, *carry, &c);
+ *carry = c;
+ return rv;
+#else
+ u64 rv = a + b + *carry;
+ *carry = rv < a;
+ return rv;
+#endif
+}
+
+static_always_inline u64
+u64_sub_with_borrow (u64 *borrow, u64 x, u64 y)
+{
+#if defined(__x86_64__)
+ unsigned long long v;
+ *borrow = _subborrow_u64 (*borrow, x, y, &v);
+ return (u64) v;
+#elif defined(__clang__)
+ unsigned long long b;
+ u64 rv = __builtin_subcll (x, y, *borrow, &b);
+ *borrow = b;
+ return rv;
+#else
+ unsigned long long rv = x - (y + *borrow);
+ *borrow = rv >= x;
+ return rv;
+#endif
+}
+
/* Standard standalone-only function declarations. */
#ifndef CLIB_UNIX
void clib_standalone_init (void *memory, uword memory_bytes);