summaryrefslogtreecommitdiffstats
path: root/src/vppinfra
diff options
context:
space:
mode:
authorDamjan Marion <damarion@cisco.com>2018-05-10 13:40:44 +0200
committerDave Barach <openvpp@barachs.net>2018-05-10 17:16:56 +0000
commit1105600416e0560cb05120a22e0a2e7359a13665 (patch)
treeacf1df08de42344c380473d81886864e662890cf /src/vppinfra
parent132dc49ee847a3e3b644de8b36499d73e8a8d37e (diff)
vppinfra: use count_trailing_zeros in sparse_vec_index
It is much cheaper to use ctzll than to do shift,subtract and mask in likely case when we are looking for 1st set bit in the uword. Change-Id: I31954081571978878c7098bafad0c85a91755fa2 Signed-off-by: Damjan Marion <damarion@cisco.com>
Diffstat (limited to 'src/vppinfra')
-rw-r--r--src/vppinfra/bitmap.h2
-rw-r--r--src/vppinfra/clib.h63
-rw-r--r--src/vppinfra/sparse_vec.h33
3 files changed, 30 insertions, 68 deletions
diff --git a/src/vppinfra/bitmap.h b/src/vppinfra/bitmap.h
index 9e1ae493285..dbf9eeb2232 100644
--- a/src/vppinfra/bitmap.h
+++ b/src/vppinfra/bitmap.h
@@ -409,7 +409,7 @@ clib_bitmap_last_set (uword * ai)
if (x != 0)
{
uword first_bit;
- count_leading_zeros (first_bit, x);
+ first_bit = count_leading_zeros (x);
return (i) * BITS (ai[0]) - first_bit - 1;
}
}
diff --git a/src/vppinfra/clib.h b/src/vppinfra/clib.h
index 0d059a0778b..42748b0a34c 100644
--- a/src/vppinfra/clib.h
+++ b/src/vppinfra/clib.h
@@ -125,71 +125,20 @@
decl
/* Use __builtin_clz if available. */
-#ifdef __GNUC__
-#include <features.h>
-#if __GNUC_PREREQ(3, 4)
#if uword_bits == 64
-#define count_leading_zeros(count,x) count = __builtin_clzll (x)
-#define count_trailing_zeros(count,x) count = __builtin_ctzll (x)
+#define count_leading_zeros(x) __builtin_clzll (x)
+#define count_trailing_zeros(x) __builtin_ctzll (x)
#else
-#define count_leading_zeros(count,x) count = __builtin_clzl (x)
-#define count_trailing_zeros(count,x) count = __builtin_ctzl (x)
+#define count_leading_zeros(x) __builtin_clzl (x)
+#define count_trailing_zeros(x) __builtin_ctzl (x)
#endif
-#endif
-#endif
-
-#ifndef count_leading_zeros
-
-/* Misc. integer arithmetic functions. */
-#if defined (i386)
-#define count_leading_zeros(count, x) \
- do { \
- word _clz; \
- __asm__ ("bsrl %1,%0" \
- : "=r" (_clz) : "rm" ((word) (x)));\
- (count) = _clz ^ 31; \
- } while (0)
-
-#define count_trailing_zeros(count, x) \
- __asm__ ("bsfl %1,%0" : "=r" (count) : "rm" ((word)(x)))
-#endif /* i386 */
-
-#if defined (__alpha__) && defined (HAVE_CIX)
-#define count_leading_zeros(count, x) \
- __asm__ ("ctlz %1,%0" \
- : "=r" ((word) (count)) \
- : "r" ((word) (x)))
-#define count_trailing_zeros(count, x) \
- __asm__ ("cttz %1,%0" \
- : "=r" ((word) (count)) \
- : "r" ((word) (x)))
-#endif /* alpha && HAVE_CIX */
-
-#if __mips >= 4
-
-/* Select between 32/64 opcodes. */
-#if uword_bits == 32
-#define count_leading_zeros(_count, _x) \
- __asm__ ("clz %[count],%[x]" \
- : [count] "=r" ((word) (_count)) \
- : [x] "r" ((word) (_x)))
-#else
-#define count_leading_zeros(_count, _x) \
- __asm__ ("dclz %[count],%[x]" \
- : [count] "=r" ((word) (_count)) \
- : [x] "r" ((word) (_x)))
-#endif
-
-#endif /* __mips >= 4 */
-
-#endif /* count_leading_zeros */
#if defined (count_leading_zeros)
always_inline uword
min_log2 (uword x)
{
uword n;
- count_leading_zeros (n, x);
+ n = count_leading_zeros (x);
return BITS (uword) - n - 1;
}
#else
@@ -305,7 +254,7 @@ log2_first_set (uword x)
{
uword result;
#ifdef count_trailing_zeros
- count_trailing_zeros (result, x);
+ result = count_trailing_zeros (x);
#else
result = min_log2 (first_set (x));
#endif
diff --git a/src/vppinfra/sparse_vec.h b/src/vppinfra/sparse_vec.h
index 0da154d8b68..cfa5778e290 100644
--- a/src/vppinfra/sparse_vec.h
+++ b/src/vppinfra/sparse_vec.h
@@ -108,15 +108,20 @@ sparse_vec_index_internal (void *v,
h = sparse_vec_header (v);
i = sparse_index / BITS (h->is_member_bitmap[0]);
- b = (uword) 1 << (uword) (sparse_index % BITS (h->is_member_bitmap[0]));
+ b = sparse_index % BITS (h->is_member_bitmap[0]);
ASSERT (i < vec_len (h->is_member_bitmap));
ASSERT (i < vec_len (h->member_counts));
w = h->is_member_bitmap[i];
- d = h->member_counts[i] + count_set_bits (w & (b - 1));
- is_member = (w & b) != 0;
+ if (PREDICT_TRUE (maybe_range == 0 && insert == 0 &&
+ count_trailing_zeros (w) == b))
+ return h->member_counts[i] + 1;
+
+ d = h->member_counts[i] + count_set_bits (w & ((1ULL << b) - 1));
+ is_member = (w & (1ULL << b)) != 0;
+
if (maybe_range)
{
u8 r = h->range_flags[d];
@@ -134,7 +139,7 @@ sparse_vec_index_internal (void *v,
if (!is_member)
{
uword j;
- w |= b;
+ w |= 1ULL << b;
h->is_member_bitmap[i] = w;
for (j = i + 1; j < vec_len (h->member_counts); j++)
h->member_counts[j] += 1;
@@ -170,8 +175,8 @@ sparse_vec_index2 (void *v,
i0 = si0 / BITS (h->is_member_bitmap[0]);
i1 = si1 / BITS (h->is_member_bitmap[0]);
- b0 = (uword) 1 << (uword) (si0 % BITS (h->is_member_bitmap[0]));
- b1 = (uword) 1 << (uword) (si1 % BITS (h->is_member_bitmap[0]));
+ b0 = si0 % BITS (h->is_member_bitmap[0]);
+ b1 = si1 % BITS (h->is_member_bitmap[0]);
ASSERT (i0 < vec_len (h->is_member_bitmap));
ASSERT (i1 < vec_len (h->is_member_bitmap));
@@ -182,8 +187,16 @@ sparse_vec_index2 (void *v,
w0 = h->is_member_bitmap[i0];
w1 = h->is_member_bitmap[i1];
- v0 = w0 & (b0 - 1);
- v1 = w1 & (b1 - 1);
+ if (PREDICT_TRUE ((count_trailing_zeros (w0) == b0) +
+ (count_trailing_zeros (w1) == b1) == 2))
+ {
+ *i0_return = h->member_counts[i0] + 1;
+ *i1_return = h->member_counts[i1] + 1;
+ return;
+ }
+
+ v0 = w0 & ((1ULL << b0) - 1);
+ v1 = w1 & ((1ULL << b1) - 1);
/* Speculate that masks will have zero or one bits set. */
d0 = h->member_counts[i0] + (v0 != 0);
@@ -196,8 +209,8 @@ sparse_vec_index2 (void *v,
d1 += count_set_bits (v1) - (v1 != 0);
}
- is_member0 = (w0 & b0) != 0;
- is_member1 = (w1 & b1) != 0;
+ is_member0 = (w0 & (1ULL << b0)) != 0;
+ is_member1 = (w1 & (1ULL << b1)) != 0;
d0 = is_member0 ? d0 : 0;
d1 = is_member1 ? d1 : 0;