summaryrefslogtreecommitdiffstats
path: root/src/vlib
diff options
context:
space:
mode:
Diffstat (limited to 'src/vlib')
-rw-r--r--src/vlib/buffer_funcs.h10
-rw-r--r--src/vlib/buffer_node.h12
2 files changed, 14 insertions, 8 deletions
diff --git a/src/vlib/buffer_funcs.h b/src/vlib/buffer_funcs.h
index 8fbb58d68b3..f5781c8b173 100644
--- a/src/vlib/buffer_funcs.h
+++ b/src/vlib/buffer_funcs.h
@@ -65,6 +65,12 @@ vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
}
static_always_inline void
+vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
+{
+ clib_memcpy_fast (dst, src, n_indices * sizeof (u32));
+}
+
+static_always_inline void
vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
{
clib_memcpy_fast (b, bt, STRUCT_OFFSET_OF (vlib_buffer_t, template_end));
@@ -454,7 +460,7 @@ vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
/* following code is intentionaly duplicated to allow compiler
to optimize fast path when n_buffers is constant value */
src = fl->buffers + len - n_buffers;
- clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
+ vlib_buffer_copy_indices (buffers, src, n_buffers);
_vec_len (fl->buffers) -= n_buffers;
/* Verify that buffers are known free. */
@@ -465,7 +471,7 @@ vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
}
src = fl->buffers + len - n_buffers;
- clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
+ vlib_buffer_copy_indices (buffers, src, n_buffers);
_vec_len (fl->buffers) -= n_buffers;
/* Verify that buffers are known free. */
diff --git a/src/vlib/buffer_node.h b/src/vlib/buffer_node.h
index 2163b72de5e..7e488783fc5 100644
--- a/src/vlib/buffer_node.h
+++ b/src/vlib/buffer_node.h
@@ -383,7 +383,7 @@ vlib_buffer_enqueue_to_next (vlib_main_t * vm, vlib_node_runtime_t * node,
#ifdef CLIB_HAVE_VEC512
if (n_enqueued >= 32)
{
- clib_memcpy_fast (to_next, buffers, 32 * sizeof (u32));
+ vlib_buffer_copy_indices (to_next, buffers, 32);
nexts += 32;
to_next += 32;
buffers += 32;
@@ -397,7 +397,7 @@ vlib_buffer_enqueue_to_next (vlib_main_t * vm, vlib_node_runtime_t * node,
#ifdef CLIB_HAVE_VEC256
if (n_enqueued >= 16)
{
- clib_memcpy_fast (to_next, buffers, 16 * sizeof (u32));
+ vlib_buffer_copy_indices (to_next, buffers, 16);
nexts += 16;
to_next += 16;
buffers += 16;
@@ -411,7 +411,7 @@ vlib_buffer_enqueue_to_next (vlib_main_t * vm, vlib_node_runtime_t * node,
#ifdef CLIB_HAVE_VEC128
if (n_enqueued >= 8)
{
- clib_memcpy_fast (to_next, buffers, 8 * sizeof (u32));
+ vlib_buffer_copy_indices (to_next, buffers, 8);
nexts += 8;
to_next += 8;
buffers += 8;
@@ -424,7 +424,7 @@ vlib_buffer_enqueue_to_next (vlib_main_t * vm, vlib_node_runtime_t * node,
if (n_enqueued >= 4)
{
- clib_memcpy_fast (to_next, buffers, 4 * sizeof (u32));
+ vlib_buffer_copy_indices (to_next, buffers, 4);
nexts += 4;
to_next += 4;
buffers += 4;
@@ -459,7 +459,7 @@ vlib_buffer_enqueue_to_single_next (vlib_main_t * vm,
if (PREDICT_TRUE (n_left_to_next >= count))
{
- clib_memcpy_fast (to_next, buffers, count * sizeof (u32));
+ vlib_buffer_copy_indices (to_next, buffers, count);
n_left_to_next -= count;
vlib_put_next_frame (vm, node, next_index, n_left_to_next);
return;
@@ -467,7 +467,7 @@ vlib_buffer_enqueue_to_single_next (vlib_main_t * vm,
n_enq = n_left_to_next;
next:
- clib_memcpy_fast (to_next, buffers, n_enq * sizeof (u32));
+ vlib_buffer_copy_indices (to_next, buffers, n_enq);
n_left_to_next -= n_enq;
if (PREDICT_FALSE (count > n_enq))