aboutsummaryrefslogtreecommitdiffstats
path: root/src/vppinfra/hash.c
diff options
context:
space:
mode:
authorGabriel Ganne <gabriel.ganne@enea.com>2017-11-03 10:30:45 +0100
committerJohn Lo <loj@cisco.com>2017-11-03 14:16:15 +0000
commitc5c2bb3f042fe11184faa731b6209444e2c05231 (patch)
treed4ac9a475233a6a0e80defe5212ec0deae5b0601 /src/vppinfra/hash.c
parenta30d903956d387b9c5e97f4b50d1166361fb33da (diff)
silence clib_mem_unaligned() invalid read found by address-sanitizer
clib_mem_unaligned + zap64 casts its input as u64, computes a mask according to the input length, and returns the casted maked value. Therefore all the 8 Bytes of the u64 are systematically read, and the invalid ones are discarded. Since they are discarded correctly, this invalid read can safely be ignored. Revert "fix clib_mem_unaligned() invalid read" This reverts commit 0ed3d81a5fa274283ae69b69a405c385189897d3. Change-Id: I5cc33ad36063c414085636debe93707d9a75157a Signed-off-by: Gabriel Ganne <gabriel.ganne@enea.com>
Diffstat (limited to 'src/vppinfra/hash.c')
-rw-r--r--src/vppinfra/hash.c47
1 files changed, 28 insertions, 19 deletions
diff --git a/src/vppinfra/hash.c b/src/vppinfra/hash.c
index b3db9f82b9c..121fa385705 100644
--- a/src/vppinfra/hash.c
+++ b/src/vppinfra/hash.c
@@ -80,27 +80,33 @@ static u8 *hash_format_pair_default (u8 * s, va_list * args);
#if uword_bits == 64
static inline u64
-get_unaligned_as_u64 (void const *data, int n)
+zap64 (u64 x, word n)
{
- int i;
- u64 r = 0;
- u8 const *p = (u8 const *) data;
-
+#define _(n) (((u64) 1 << (u64) (8*(n))) - (u64) 1)
+ static u64 masks_little_endian[] = {
+ 0, _(1), _(2), _(3), _(4), _(5), _(6), _(7),
+ };
+ static u64 masks_big_endian[] = {
+ 0, ~_(7), ~_(6), ~_(5), ~_(4), ~_(3), ~_(2), ~_(1),
+ };
+#undef _
if (clib_arch_is_big_endian)
- {
- for (i = 0; i < n; i++)
- r |= ((u64) ((*(p + i)) << (u8) (1 << (8 - i))));
- }
+ return x & masks_big_endian[n];
else
- {
- for (i = 0; i < n; i++)
- r |= ((u64) ((*(p + i)) << (u8) (1 << i)));
- }
-
- return r;
+ return x & masks_little_endian[n];
}
-static inline u64
+/**
+ * make address-sanitizer skip this:
+ * clib_mem_unaligned + zap64 casts its input as u64, computes a mask
+ * according to the input length, and returns the casted maked value.
+ * Therefore all the 8 Bytes of the u64 are systematically read, which
+ * rightfully causes address-sanitizer to raise an error on smaller inputs.
+ *
+ * However the invalid Bytes are discarded within zap64(), whicj is why
+ * this can be silenced safely.
+ */
+static inline u64 __attribute__ ((no_sanitize_address))
hash_memory64 (void *p, word n_bytes, u64 state)
{
u64 *q = p;
@@ -126,16 +132,19 @@ hash_memory64 (void *p, word n_bytes, u64 state)
case 2:
a += clib_mem_unaligned (q + 0, u64);
b += clib_mem_unaligned (q + 1, u64);
- c += get_unaligned_as_u64 (q + 2, n % sizeof (u64)) << 8;
+ if (n % sizeof (u64))
+ c += zap64 (clib_mem_unaligned (q + 2, u64), n % sizeof (u64)) << 8;
break;
case 1:
a += clib_mem_unaligned (q + 0, u64);
- b += get_unaligned_as_u64 (q + 1, n % sizeof (u64));
+ if (n % sizeof (u64))
+ b += zap64 (clib_mem_unaligned (q + 1, u64), n % sizeof (u64));
break;
case 0:
- a += get_unaligned_as_u64 (q + 0, n % sizeof (u64));
+ if (n % sizeof (u64))
+ a += zap64 (clib_mem_unaligned (q + 0, u64), n % sizeof (u64));
break;
}