aboutsummaryrefslogtreecommitdiffstats
path: root/src/vlib
diff options
context:
space:
mode:
authorDamjan Marion <damarion@cisco.com>2019-01-31 12:29:39 +0100
committerDamjan Marion <damarion@cisco.com>2019-01-31 12:33:16 +0100
commit24dcbe45209dfdbcf2f1851aa443e696fac050f2 (patch)
tree95613b567a5df9d6e73e95ade88689a6c0b5d0a2 /src/vlib
parentf6e9b014cbd0ba5a757565a6a0179065d7236463 (diff)
buffers: reinitialize metadata, add additional validation
- DPDK overwrites metadata as part of rte_pktmbuf_init(...) so we need reinitialize it - additional checks added to ensure ref_count is never < 1 Change-Id: Ida336f81c4723e8f2e0ad4a70cb7b1ecfff978a0 Signed-off-by: Damjan Marion <damarion@cisco.com>
Diffstat (limited to 'src/vlib')
-rw-r--r--src/vlib/buffer.c1
-rw-r--r--src/vlib/buffer_funcs.h56
2 files changed, 44 insertions, 13 deletions
diff --git a/src/vlib/buffer.c b/src/vlib/buffer.c
index 3e194113675..701ddde2227 100644
--- a/src/vlib/buffer.c
+++ b/src/vlib/buffer.c
@@ -569,6 +569,7 @@ vlib_buffer_pool_create (vlib_main_t * vm, u8 index, char *name,
bi = vlib_get_buffer_index (vm, (vlib_buffer_t *) p);
vec_add1_aligned (bp->buffers, bi, CLIB_CACHE_LINE_BYTES);
+ vlib_get_buffer (vm, bi);
bp->n_buffers += 1;
}
diff --git a/src/vlib/buffer_funcs.h b/src/vlib/buffer_funcs.h
index ce8d1ef9078..37ddcd4a206 100644
--- a/src/vlib/buffer_funcs.h
+++ b/src/vlib/buffer_funcs.h
@@ -47,6 +47,34 @@
vlib buffer access methods.
*/
+always_inline void
+vlib_buffer_validate (vlib_main_t * vm, vlib_buffer_t * b)
+{
+ vlib_buffer_main_t *bm = vm->buffer_main;
+ vlib_buffer_pool_t *bp;
+
+ /* reference count in allocated buffer always must be 1 or higher */
+ ASSERT (b->ref_count > 0);
+
+ /* verify that buffer pointer is from buffer memory range */
+ ASSERT (pointer_to_uword (b) >= bm->buffer_mem_start);
+ ASSERT (pointer_to_uword (b) < bm->buffer_mem_start + bm->buffer_mem_size -
+ VLIB_BUFFER_DATA_SIZE);
+
+ /* verify that buffer pool index is valid */
+ bp = vec_elt_at_index (bm->buffer_pools, b->buffer_pool_index);
+ ASSERT (pointer_to_uword (b) >= bp->start);
+ ASSERT (pointer_to_uword (b) < bp->start + bp->size -
+ VLIB_BUFFER_DATA_SIZE);
+}
+
+always_inline void *
+vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
+ uword offset)
+{
+ offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
+ return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
+}
/** \brief Translate buffer index into buffer pointer
@@ -58,10 +86,11 @@ always_inline vlib_buffer_t *
vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
{
vlib_buffer_main_t *bm = vm->buffer_main;
- uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
- ASSERT (offset < bm->buffer_mem_size);
+ vlib_buffer_t *b;
- return uword_to_pointer (bm->buffer_mem_start + offset, void *);
+ b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
+ vlib_buffer_validate (vm, b);
+ return b;
}
static_always_inline void
@@ -108,9 +137,7 @@ static_always_inline void
vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
i32 offset)
{
-#if defined (CLIB_HAVE_VEC256) || defined (CLIB_HAVE_VEC128)
uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
-#endif
#ifdef CLIB_HAVE_VEC256
u64x4 off = u64x4_splat (buffer_mem_start + offset);
/* if count is not const, compiler will not unroll while loop
@@ -146,10 +173,10 @@ vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
#else
- b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
- b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
- b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
- b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
+ b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
+ b[1] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[1], offset);
+ b[2] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[2], offset);
+ b[3] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[3], offset);
#endif
b += 4;
bi += 4;
@@ -157,7 +184,7 @@ vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
}
while (count)
{
- b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
+ b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
b += 1;
bi += 1;
count -= 1;
@@ -710,6 +737,11 @@ vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
vlib_buffer_validate_alloc_free (vm, buffers, 4,
VLIB_BUFFER_KNOWN_ALLOCATED);
+ vlib_buffer_validate (vm, b[0]);
+ vlib_buffer_validate (vm, b[1]);
+ vlib_buffer_validate (vm, b[2]);
+ vlib_buffer_validate (vm, b[3]);
+
VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
@@ -748,9 +780,7 @@ vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
}
}
- ASSERT (pointer_to_uword (b[0]) >= bp->start &&
- pointer_to_uword (b[0]) <
- bp->start + bp->size - (bp->data_size + sizeof (*b[0])));
+ vlib_buffer_validate (vm, b[0]);
VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);