summaryrefslogtreecommitdiffstats
path: root/src/vppinfra/clib.h
diff options
context:
space:
mode:
authorDamjan Marion <damarion@cisco.com>2016-12-19 23:05:39 +0100
committerDamjan Marion <damarion@cisco.com>2016-12-28 12:25:14 +0100
commit7cd468a3d7dee7d6c92f69a0bb7061ae208ec727 (patch)
tree5de62f8dbd3a752f5a676ca600e43d2652d1ff1a /src/vppinfra/clib.h
parent696f1adec0df3b8f161862566dd9c86174302658 (diff)
Reorganize source tree to use single autotools instance
Change-Id: I7b51f88292e057c6443b12224486f2d0c9f8ae23 Signed-off-by: Damjan Marion <damarion@cisco.com>
Diffstat (limited to 'src/vppinfra/clib.h')
-rw-r--r--src/vppinfra/clib.h359
1 files changed, 359 insertions, 0 deletions
diff --git a/src/vppinfra/clib.h b/src/vppinfra/clib.h
new file mode 100644
index 00000000000..0386c756833
--- /dev/null
+++ b/src/vppinfra/clib.h
@@ -0,0 +1,359 @@
+/*
+ * Copyright (c) 2015 Cisco and/or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/*
+ Copyright (c) 2001, 2002, 2003 Eliot Dresselhaus
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+#ifndef included_clib_h
+#define included_clib_h
+
+/* Standalone means to not assume we are running on a Unix box. */
+#if ! defined (CLIB_STANDALONE) && ! defined (CLIB_LINUX_KERNEL)
+#define CLIB_UNIX
+#endif
+
+#include <vppinfra/types.h>
+
+/* Global DEBUG flag. Setting this to 1 or 0 turns off
+ ASSERT (see vppinfra/error.h) & other debugging code. */
+#ifndef CLIB_DEBUG
+#define CLIB_DEBUG 0
+#endif
+
+#ifndef NULL
+#define NULL ((void *) 0)
+#endif
+
+#define BITS(x) (8*sizeof(x))
+#define ARRAY_LEN(x) (sizeof (x)/sizeof (x[0]))
+
+#define _STRUCT_FIELD(t,f) (((t *) 0)->f)
+#define STRUCT_OFFSET_OF(t,f) ((uword) & _STRUCT_FIELD (t, f))
+#define STRUCT_BIT_OFFSET_OF(t,f) (BITS(u8) * (uword) & _STRUCT_FIELD (t, f))
+#define STRUCT_SIZE_OF(t,f) (sizeof (_STRUCT_FIELD (t, f)))
+#define STRUCT_BITS_OF(t,f) (BITS (_STRUCT_FIELD (t, f)))
+#define STRUCT_ARRAY_LEN(t,f) ARRAY_LEN (_STRUCT_FIELD (t, f))
+#define STRUCT_MARK(mark) u8 mark[0]
+#define STRUCT_MARK_PTR(v, f) &(v)->f
+
+/* Stride in bytes between struct array elements. */
+#define STRUCT_STRIDE_OF(t,f) \
+ ( ((uword) & (((t *) 0)[1].f)) \
+ - ((uword) & (((t *) 0)[0].f)))
+
+#define STRUCT_OFFSET_OF_VAR(v,f) ((uword) (&(v)->f) - (uword) (v))
+
+/* Used to pack structure elements. */
+#define CLIB_PACKED(x) x __attribute__ ((packed))
+#define CLIB_UNUSED(x) x __attribute__ ((unused))
+
+#define never_inline __attribute__ ((__noinline__))
+
+#if CLIB_DEBUG > 0
+#define always_inline static inline
+#define static_always_inline static inline
+#else
+#define always_inline static inline __attribute__ ((__always_inline__))
+#define static_always_inline static inline __attribute__ ((__always_inline__))
+#endif
+
+
+/* Reserved (unused) structure element with address offset between
+ from and to. */
+#define CLIB_PAD_FROM_TO(from,to) u8 pad_##from[(to) - (from)]
+
+/* Hints to compiler about hot/cold code. */
+#define PREDICT_FALSE(x) __builtin_expect((x),0)
+#define PREDICT_TRUE(x) __builtin_expect((x),1)
+
+/* Full memory barrier (read and write). */
+#define CLIB_MEMORY_BARRIER() __sync_synchronize ()
+
+/* Arranges for function to be called before main. */
+#define INIT_FUNCTION(decl) \
+ decl __attribute ((constructor)); \
+ decl
+
+/* Arranges for function to be called before exit. */
+#define EXIT_FUNCTION(decl) \
+ decl __attribute ((destructor)); \
+ decl
+
+/* Use __builtin_clz if available. */
+#ifdef __GNUC__
+#include <features.h>
+#if __GNUC_PREREQ(3, 4)
+#if uword_bits == 64
+#define count_leading_zeros(count,x) count = __builtin_clzll (x)
+#define count_trailing_zeros(count,x) count = __builtin_ctzll (x)
+#else
+#define count_leading_zeros(count,x) count = __builtin_clzl (x)
+#define count_trailing_zeros(count,x) count = __builtin_ctzl (x)
+#endif
+#endif
+#endif
+
+#ifndef count_leading_zeros
+
+/* Misc. integer arithmetic functions. */
+#if defined (i386)
+#define count_leading_zeros(count, x) \
+ do { \
+ word _clz; \
+ __asm__ ("bsrl %1,%0" \
+ : "=r" (_clz) : "rm" ((word) (x)));\
+ (count) = _clz ^ 31; \
+ } while (0)
+
+#define count_trailing_zeros(count, x) \
+ __asm__ ("bsfl %1,%0" : "=r" (count) : "rm" ((word)(x)))
+#endif /* i386 */
+
+#if defined (__alpha__) && defined (HAVE_CIX)
+#define count_leading_zeros(count, x) \
+ __asm__ ("ctlz %1,%0" \
+ : "=r" ((word) (count)) \
+ : "r" ((word) (x)))
+#define count_trailing_zeros(count, x) \
+ __asm__ ("cttz %1,%0" \
+ : "=r" ((word) (count)) \
+ : "r" ((word) (x)))
+#endif /* alpha && HAVE_CIX */
+
+#if __mips >= 4
+
+/* Select between 32/64 opcodes. */
+#if uword_bits == 32
+#define count_leading_zeros(_count, _x) \
+ __asm__ ("clz %[count],%[x]" \
+ : [count] "=r" ((word) (_count)) \
+ : [x] "r" ((word) (_x)))
+#else
+#define count_leading_zeros(_count, _x) \
+ __asm__ ("dclz %[count],%[x]" \
+ : [count] "=r" ((word) (_count)) \
+ : [x] "r" ((word) (_x)))
+#endif
+
+#endif /* __mips >= 4 */
+
+#endif /* count_leading_zeros */
+
+#if defined (count_leading_zeros)
+always_inline uword
+min_log2 (uword x)
+{
+ uword n;
+ count_leading_zeros (n, x);
+ return BITS (uword) - n - 1;
+}
+#else
+always_inline uword
+min_log2 (uword x)
+{
+ uword a = x, b = BITS (uword) / 2, c = 0, r = 0;
+
+ /* Reduce x to 4 bit result. */
+#define _ \
+{ \
+ c = a >> b; \
+ if (c) a = c; \
+ if (c) r += b; \
+ b /= 2; \
+}
+
+ if (BITS (uword) > 32)
+ _;
+ _;
+ _;
+ _;
+#undef _
+
+ /* Do table lookup on 4 bit partial. */
+ if (BITS (uword) > 32)
+ {
+ const u64 table = 0x3333333322221104LL;
+ uword t = (table >> (4 * a)) & 0xf;
+ r = t < 4 ? r + t : ~0;
+ }
+ else
+ {
+ const u32 table = 0x22221104;
+ uword t = (a & 8) ? 3 : ((table >> (4 * a)) & 0xf);
+ r = t < 4 ? r + t : ~0;
+ }
+
+ return r;
+}
+#endif
+
+always_inline uword
+max_log2 (uword x)
+{
+ uword l = min_log2 (x);
+ if (x > ((uword) 1 << l))
+ l++;
+ return l;
+}
+
+always_inline u64
+min_log2_u64 (u64 x)
+{
+ if (BITS (uword) == 64)
+ return min_log2 (x);
+ else
+ {
+ uword l, y;
+ y = x;
+ l = 0;
+ if (y == 0)
+ {
+ l += 32;
+ x >>= 32;
+ }
+ l += min_log2 (x);
+ return l;
+ }
+}
+
+always_inline uword
+pow2_mask (uword x)
+{
+ return ((uword) 1 << x) - (uword) 1;
+}
+
+always_inline uword
+max_pow2 (uword x)
+{
+ word y = (word) 1 << min_log2 (x);
+ if (x > y)
+ y *= 2;
+ return y;
+}
+
+always_inline uword
+is_pow2 (uword x)
+{
+ return 0 == (x & (x - 1));
+}
+
+always_inline uword
+round_pow2 (uword x, uword pow2)
+{
+ return (x + pow2 - 1) & ~(pow2 - 1);
+}
+
+always_inline u64
+round_pow2_u64 (u64 x, u64 pow2)
+{
+ return (x + pow2 - 1) & ~(pow2 - 1);
+}
+
+always_inline uword
+first_set (uword x)
+{
+ return x & -x;
+}
+
+always_inline uword
+log2_first_set (uword x)
+{
+ uword result;
+#ifdef count_trailing_zeros
+ count_trailing_zeros (result, x);
+#else
+ result = min_log2 (first_set (x));
+#endif
+ return result;
+}
+
+always_inline f64
+flt_round_down (f64 x)
+{
+ return (int) x;
+}
+
+always_inline word
+flt_round_nearest (f64 x)
+{
+ return (word) (x + .5);
+}
+
+always_inline f64
+flt_round_to_multiple (f64 x, f64 f)
+{
+ return f * flt_round_nearest (x / f);
+}
+
+#define clib_max(x,y) \
+({ \
+ __typeof__ (x) _x = (x); \
+ __typeof__ (y) _y = (y); \
+ _x > _y ? _x : _y; \
+})
+
+#define clib_min(x,y) \
+({ \
+ __typeof__ (x) _x = (x); \
+ __typeof__ (y) _y = (y); \
+ _x < _y ? _x : _y; \
+})
+
+#define clib_abs(x) \
+({ \
+ __typeof__ (x) _x = (x); \
+ _x < 0 ? -_x : _x; \
+})
+
+/* Standard standalone-only function declarations. */
+#ifndef CLIB_UNIX
+void clib_standalone_init (void *memory, uword memory_bytes);
+
+void qsort (void *base, uword n, uword size,
+ int (*)(const void *, const void *));
+#endif
+
+/* Stack backtrace. */
+uword
+clib_backtrace (uword * callers, uword max_callers, uword n_frames_to_skip);
+
+#endif /* included_clib_h */
+
+/*
+ * fd.io coding-style-patch-verification: ON
+ *
+ * Local Variables:
+ * eval: (c-set-style "gnu")
+ * End:
+ */