summaryrefslogtreecommitdiffstats
path: root/src/vppinfra
diff options
context:
space:
mode:
Diffstat (limited to 'src/vppinfra')
-rw-r--r--src/vppinfra/crc32.h80
-rw-r--r--src/vppinfra/lb_hash_hash.h14
2 files changed, 54 insertions, 40 deletions
diff --git a/src/vppinfra/crc32.h b/src/vppinfra/crc32.h
index fec67cd9757..3b81daf28ca 100644
--- a/src/vppinfra/crc32.h
+++ b/src/vppinfra/crc32.h
@@ -21,67 +21,81 @@
#if __SSE4_2__
#define clib_crc32c_uses_intrinsics
#include <x86intrin.h>
-
-#define crc32_u64 _mm_crc32_u64
-#define crc32_u32 _mm_crc32_u32
-
static_always_inline u32
-clib_crc32c (u8 * s, int len)
+clib_crc32c_u8 (u32 last, u8 data)
{
- u32 v = 0;
-
-#if defined(__x86_64__)
- for (; len >= 8; len -= 8, s += 8)
- v = _mm_crc32_u64 (v, *((u64 *) s));
-#else
- /* workaround weird GCC bug when using _mm_crc32_u32
- which happens with -O2 optimization */
-#if !defined (__i686__)
- asm volatile ("":::"memory");
-#endif
-#endif
-
- for (; len >= 4; len -= 4, s += 4)
- v = _mm_crc32_u32 (v, *((u32 *) s));
+ return _mm_crc32_u8 (last, data);
+}
- for (; len >= 2; len -= 2, s += 2)
- v = _mm_crc32_u16 (v, *((u16 *) s));
+static_always_inline u32
+clib_crc32c_u16 (u32 last, u16 data)
+{
+ return _mm_crc32_u16 (last, data);
+}
- for (; len >= 1; len -= 1, s += 1)
- v = _mm_crc32_u8 (v, *((u16 *) s));
+static_always_inline u32
+clib_crc32c_u32 (u32 last, u32 data)
+{
+ return _mm_crc32_u32 (last, data);
+}
- return v;
+static_always_inline u32
+clib_crc32c_u64 (u32 last, u64 data)
+{
+ return _mm_crc32_u64 (last, data);
}
+#endif
-#elif __ARM_FEATURE_CRC32
+#if __ARM_FEATURE_CRC32
#define clib_crc32c_uses_intrinsics
#include <arm_acle.h>
+static_always_inline u32
+clib_crc32c_u8 (u32 last, u8 data)
+{
+ return __crc32cd (last, data);
+}
+static_always_inline u32
+clib_crc32c_u16 (u32 last, u16 data)
+{
+ return __crc32ch (last, data);
+}
-#define crc32_u64 __crc32cd
-#define crc32_u32 __crc32cw
+static_always_inline u32
+clib_crc32c_u32 (u32 last, u32 data)
+{
+ return __crc32cw (last, data);
+}
static_always_inline u32
+clib_crc32c_u64 (u32 last, u64 data)
+{
+ return __crc32cd (last, data);
+}
+#endif
+
+#ifdef clib_crc32c_uses_intrinsics
+static_always_inline u32
clib_crc32c (u8 * s, int len)
{
u32 v = 0;
for (; len >= 8; len -= 8, s += 8)
- v = __crc32cd (v, *((u64 *) s));
+ v = clib_crc32c_u64 (v, *((u64u *) s));
for (; len >= 4; len -= 4, s += 4)
- v = __crc32cw (v, *((u32 *) s));
+ v = clib_crc32c_u32 (v, *((u32u *) s));
for (; len >= 2; len -= 2, s += 2)
- v = __crc32ch (v, *((u16 *) s));
+ v = clib_crc32c_u16 (v, *((u16u *) s));
for (; len >= 1; len -= 1, s += 1)
- v = __crc32cb (v, *((u8 *) s));
+ v = clib_crc32c_u8 (v, *((u8 *) s));
return v;
}
-
#endif
+
#endif /* __included_crc32_h__ */
/*
diff --git a/src/vppinfra/lb_hash_hash.h b/src/vppinfra/lb_hash_hash.h
index fb251591eeb..f355515bce4 100644
--- a/src/vppinfra/lb_hash_hash.h
+++ b/src/vppinfra/lb_hash_hash.h
@@ -24,11 +24,11 @@ static_always_inline u32
lb_hash_hash (u64 k0, u64 k1, u64 k2, u64 k3, u64 k4)
{
u64 val = 0;
- val = crc32_u64 (val, k0);
- val = crc32_u64 (val, k1);
- val = crc32_u64 (val, k2);
- val = crc32_u64 (val, k3);
- val = crc32_u64 (val, k4);
+ val = clib_crc32c_u64 (val, k0);
+ val = clib_crc32c_u64 (val, k1);
+ val = clib_crc32c_u64 (val, k2);
+ val = clib_crc32c_u64 (val, k3);
+ val = clib_crc32c_u64 (val, k4);
return (u32) val;
}
@@ -37,8 +37,8 @@ static_always_inline u32
lb_hash_hash_2_tuples (u64 k0, u32 k1)
{
u64 val = 0;
- val = crc32_u64 (val, k0);
- val = crc32_u32 (val, k1);
+ val = clib_crc32c_u64 (val, k0);
+ val = clib_crc32c_u32 (val, k1);
return (u32) val;
}
#else