summaryrefslogtreecommitdiffstats
path: root/src/vlib
diff options
context:
space:
mode:
Diffstat (limited to 'src/vlib')
-rw-r--r--src/vlib/buffer.c254
-rw-r--r--src/vlib/buffer.h4
-rw-r--r--src/vlib/buffer_funcs.h113
3 files changed, 171 insertions, 200 deletions
diff --git a/src/vlib/buffer.c b/src/vlib/buffer.c
index 95b4344f10b..4f5eb09da91 100644
--- a/src/vlib/buffer.c
+++ b/src/vlib/buffer.c
@@ -68,8 +68,9 @@ format_vlib_buffer (u8 * s, va_list * args)
vlib_buffer_t *b = va_arg (*args, vlib_buffer_t *);
uword indent = format_get_indent (s);
- s = format (s, "current data %d, length %d, free-list %d",
- b->current_data, b->current_length, b->free_list_index);
+ s = format (s, "current data %d, length %d, free-list %d, clone-count %u",
+ b->current_data, b->current_length, b->free_list_index,
+ b->n_add_refs);
if (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
s = format (s, ", totlen-nifb %d",
@@ -84,8 +85,10 @@ format_vlib_buffer (u8 * s, va_list * args)
u32 next_buffer = b->next_buffer;
b = vlib_get_buffer (vm, next_buffer);
- s = format (s, "\n%Unext-buffer 0x%x, segment length %d",
- format_white_space, indent, next_buffer, b->current_length);
+ s =
+ format (s, "\n%Unext-buffer 0x%x, segment length %d, clone-count %u",
+ format_white_space, indent, next_buffer, b->current_length,
+ b->n_add_refs);
}
return s;
@@ -262,7 +265,7 @@ vlib_main_t **vlib_mains;
/* When dubugging validate that given buffers are either known allocated
or known free. */
-static void __attribute__ ((unused))
+static void
vlib_buffer_validate_alloc_free (vlib_main_t * vm,
u32 * buffers,
uword n_buffers,
@@ -362,6 +365,7 @@ vlib_buffer_create_free_list_helper (vlib_main_t * vm,
/* Setup free buffer template. */
f->buffer_init_template.free_list_index = f->index;
+ f->buffer_init_template.n_add_refs = 0;
if (is_public)
{
@@ -620,19 +624,11 @@ vlib_buffer_free_inline (vlib_main_t * vm,
{
vlib_buffer_main_t *bm = vm->buffer_main;
vlib_buffer_free_list_t *fl;
- static u32 *next_to_free[2]; /* smp bad */
- u32 i_next_to_free, *b, *n, *f, fi;
- uword n_left;
+ u32 fi;
int i;
- static vlib_buffer_free_list_t **announce_list;
- vlib_buffer_free_list_t *fl0 = 0, *fl1 = 0;
- u32 bi0 = (u32) ~ 0, bi1 = (u32) ~ 0, fi0, fi1 = (u32) ~ 0;
- u8 free0, free1 = 0, free_next0, free_next1;
u32 (*cb) (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
u32 follow_buffer_next);
- ASSERT (os_get_cpu_number () == 0);
-
cb = bm->buffer_free_callback;
if (PREDICT_FALSE (cb != 0))
@@ -641,203 +637,68 @@ vlib_buffer_free_inline (vlib_main_t * vm,
if (!n_buffers)
return;
- /* Use first buffer to get default free list. */
- {
- u32 bi0 = buffers[0];
- vlib_buffer_t *b0;
-
- b0 = vlib_get_buffer (vm, bi0);
- fl = vlib_buffer_get_buffer_free_list (vm, b0, &fi);
- if (fl->buffers_added_to_freelist_function)
- vec_add1 (announce_list, fl);
- }
-
- vec_validate (next_to_free[0], n_buffers - 1);
- vec_validate (next_to_free[1], n_buffers - 1);
-
- i_next_to_free = 0;
- n_left = n_buffers;
- b = buffers;
-
-again:
- /* Verify that buffers are known allocated. */
- vlib_buffer_validate_alloc_free (vm, b,
- n_left, VLIB_BUFFER_KNOWN_ALLOCATED);
-
- vec_add2_aligned (fl->buffers, f, n_left, CLIB_CACHE_LINE_BYTES);
-
- n = next_to_free[i_next_to_free];
- while (n_left >= 4)
- {
- vlib_buffer_t *b0, *b1, *binit0, *binit1, dummy_buffers[2];
-
- bi0 = b[0];
- bi1 = b[1];
-
- f[0] = bi0;
- f[1] = bi1;
- f += 2;
- b += 2;
- n_left -= 2;
-
- /* Prefetch buffers for next iteration. */
- vlib_prefetch_buffer_with_index (vm, b[0], WRITE);
- vlib_prefetch_buffer_with_index (vm, b[1], WRITE);
-
- b0 = vlib_get_buffer (vm, bi0);
- b1 = vlib_get_buffer (vm, bi1);
-
- free0 = (b0->flags & VLIB_BUFFER_RECYCLE) == 0;
- free1 = (b1->flags & VLIB_BUFFER_RECYCLE) == 0;
-
- /* Must be before init which will over-write buffer flags. */
- if (follow_buffer_next)
- {
- n[0] = b0->next_buffer;
- free_next0 = free0 && (b0->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
- n += free_next0;
-
- n[0] = b1->next_buffer;
- free_next1 = free1 && (b1->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
- n += free_next1;
- }
- else
- free_next0 = free_next1 = 0;
-
- /* Must be before init which will over-write buffer free list. */
- fi0 = b0->free_list_index;
- fi1 = b1->free_list_index;
-
- if (PREDICT_FALSE (fi0 != fi || fi1 != fi))
- goto slow_path_x2;
-
- binit0 = free0 ? b0 : &dummy_buffers[0];
- binit1 = free1 ? b1 : &dummy_buffers[1];
-
- vlib_buffer_init_two_for_free_list (binit0, binit1, fl);
- continue;
-
- slow_path_x2:
- /* Backup speculation. */
- f -= 2;
- n -= free_next0 + free_next1;
-
- _vec_len (fl->buffers) = f - fl->buffers;
-
- fl0 = pool_elt_at_index (bm->buffer_free_list_pool, fi0);
- fl1 = pool_elt_at_index (bm->buffer_free_list_pool, fi1);
-
- vlib_buffer_add_to_free_list (vm, fl0, bi0, free0);
- if (PREDICT_FALSE (fl0->buffers_added_to_freelist_function != 0))
- {
- int i;
- for (i = 0; i < vec_len (announce_list); i++)
- if (fl0 == announce_list[i])
- goto no_fl0;
- vec_add1 (announce_list, fl0);
- }
- no_fl0:
- if (PREDICT_FALSE (fl1->buffers_added_to_freelist_function != 0))
- {
- int i;
- for (i = 0; i < vec_len (announce_list); i++)
- if (fl1 == announce_list[i])
- goto no_fl1;
- vec_add1 (announce_list, fl1);
- }
-
- no_fl1:
- vlib_buffer_add_to_free_list (vm, fl1, bi1, free1);
-
- /* Possibly change current free list. */
- if (fi0 != fi && fi1 != fi)
- {
- fi = fi1;
- fl = pool_elt_at_index (bm->buffer_free_list_pool, fi);
- }
-
- vec_add2_aligned (fl->buffers, f, n_left, CLIB_CACHE_LINE_BYTES);
- }
-
- while (n_left >= 1)
+ for (i = 0; i < n_buffers; i++)
{
- vlib_buffer_t *b0, *binit0, dummy_buffers[1];
+ vlib_buffer_t *b;
+ u32 bi = buffers[i];
- bi0 = b[0];
- f[0] = bi0;
- f += 1;
- b += 1;
- n_left -= 1;
-
- b0 = vlib_get_buffer (vm, bi0);
+ b = vlib_get_buffer (vm, bi);
- free0 = (b0->flags & VLIB_BUFFER_RECYCLE) == 0;
+ fl = vlib_buffer_get_buffer_free_list (vm, b, &fi);
- /* Must be before init which will over-write buffer flags. */
- if (follow_buffer_next)
+ /* The only current use of this callback: multicast recycle */
+ if (PREDICT_FALSE (fl->buffers_added_to_freelist_function != 0))
{
- n[0] = b0->next_buffer;
- free_next0 = free0 && (b0->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
- n += free_next0;
+ int j;
+
+ vlib_buffer_add_to_free_list
+ (vm, fl, buffers[i], (b->flags & VLIB_BUFFER_RECYCLE) == 0);
+
+ for (j = 0; j < vec_len (bm->announce_list); j++)
+ {
+ if (fl == bm->announce_list[j])
+ goto already_announced;
+ }
+ vec_add1 (bm->announce_list, fl);
+ already_announced:
+ ;
}
else
- free_next0 = 0;
-
- /* Must be before init which will over-write buffer free list. */
- fi0 = b0->free_list_index;
-
- if (PREDICT_FALSE (fi0 != fi))
- goto slow_path_x1;
-
- binit0 = free0 ? b0 : &dummy_buffers[0];
-
- vlib_buffer_init_for_free_list (binit0, fl);
- continue;
-
- slow_path_x1:
- /* Backup speculation. */
- f -= 1;
- n -= free_next0;
-
- _vec_len (fl->buffers) = f - fl->buffers;
-
- fl0 = pool_elt_at_index (bm->buffer_free_list_pool, fi0);
-
- vlib_buffer_add_to_free_list (vm, fl0, bi0, free0);
- if (PREDICT_FALSE (fl0->buffers_added_to_freelist_function != 0))
{
- int i;
- for (i = 0; i < vec_len (announce_list); i++)
- if (fl0 == announce_list[i])
- goto no_fl00;
- vec_add1 (announce_list, fl0);
+ if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_RECYCLE) == 0))
+ {
+ u32 flags, next;
+
+ do
+ {
+ vlib_buffer_t *nb = vlib_get_buffer (vm, bi);
+ flags = nb->flags;
+ next = nb->next_buffer;
+ if (nb->n_add_refs)
+ nb->n_add_refs--;
+ else
+ {
+ vlib_buffer_validate_alloc_free (vm, &bi, 1,
+ VLIB_BUFFER_KNOWN_ALLOCATED);
+ vlib_buffer_add_to_free_list (vm, fl, bi, 1);
+ }
+ bi = next;
+ }
+ while (follow_buffer_next
+ && (flags & VLIB_BUFFER_NEXT_PRESENT));
+
+ }
}
-
- no_fl00:
- fi = fi0;
- fl = pool_elt_at_index (bm->buffer_free_list_pool, fi);
-
- vec_add2_aligned (fl->buffers, f, n_left, CLIB_CACHE_LINE_BYTES);
}
-
- if (follow_buffer_next && ((n_left = n - next_to_free[i_next_to_free]) > 0))
- {
- b = next_to_free[i_next_to_free];
- i_next_to_free ^= 1;
- goto again;
- }
-
- _vec_len (fl->buffers) = f - fl->buffers;
-
- if (vec_len (announce_list))
+ if (vec_len (bm->announce_list))
{
vlib_buffer_free_list_t *fl;
- for (i = 0; i < vec_len (announce_list); i++)
+ for (i = 0; i < vec_len (bm->announce_list); i++)
{
- fl = announce_list[i];
+ fl = bm->announce_list[i];
fl->buffers_added_to_freelist_function (vm, fl);
}
- _vec_len (announce_list) = 0;
+ _vec_len (bm->announce_list) = 0;
}
}
@@ -922,6 +783,7 @@ vlib_packet_template_init (vlib_main_t * vm,
fl->buffer_init_template.current_data = 0;
fl->buffer_init_template.current_length = n_packet_data_bytes;
fl->buffer_init_template.flags = 0;
+ fl->buffer_init_template.n_add_refs = 0;
vlib_worker_thread_barrier_release (vm);
}
diff --git a/src/vlib/buffer.h b/src/vlib/buffer.h
index 8ea79502455..b4015b302c3 100644
--- a/src/vlib/buffer.h
+++ b/src/vlib/buffer.h
@@ -119,7 +119,9 @@ typedef struct
feature node
*/
- u8 dont_waste_me[3]; /**< Available space in the (precious)
+ u8 n_add_refs; /**< Number of additional references to this buffer. */
+
+ u8 dont_waste_me[2]; /**< Available space in the (precious)
first 32 octets of buffer metadata
Before allocating any of it, discussion required!
*/
diff --git a/src/vlib/buffer_funcs.h b/src/vlib/buffer_funcs.h
index 0b583a61994..e0fde5f2194 100644
--- a/src/vlib/buffer_funcs.h
+++ b/src/vlib/buffer_funcs.h
@@ -530,6 +530,110 @@ vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
return fd;
}
+/** \brief Create multiple clones of buffer and store them in the supplied array
+
+ @param vm - (vlib_main_t *) vlib main data structure pointer
+ @param src_buffer - (u32) source buffer index
+ @param buffers - (u32 * ) buffer index array
+ @param n_buffers - (u8) number of buffer clones requested
+ @param head_end_offset - (u16) offset relative to current position
+ where packet head ends
+ @return - (u8) number of buffers actually cloned, may be
+ less than the number requested or zero
+*/
+
+always_inline u8
+vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
+ u8 n_buffers, u16 head_end_offset)
+{
+ u8 i;
+ vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
+
+ ASSERT (s->n_add_refs == 0);
+ ASSERT (n_buffers);
+
+ if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
+ {
+ buffers[0] = src_buffer;
+ for (i = 1; i < n_buffers; i++)
+ {
+ vlib_buffer_t *d;
+ d = vlib_buffer_copy (vm, s);
+ if (d == 0)
+ return i;
+ buffers[i] = vlib_get_buffer_index (vm, d);
+
+ }
+ return n_buffers;
+ }
+
+ n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
+ s->free_list_index);
+ if (PREDICT_FALSE (n_buffers == 0))
+ {
+ buffers[0] = src_buffer;
+ return 1;
+ }
+
+ for (i = 0; i < n_buffers; i++)
+ {
+ vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
+ d->current_data = s->current_data;
+ d->current_length = head_end_offset;
+ d->free_list_index = s->free_list_index;
+ d->total_length_not_including_first_buffer =
+ s->total_length_not_including_first_buffer + s->current_length -
+ head_end_offset;
+ d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
+ d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
+ clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
+ clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
+ head_end_offset);
+ d->next_buffer = src_buffer;
+ }
+ vlib_buffer_advance (s, head_end_offset);
+ s->n_add_refs = n_buffers - 1;
+ while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
+ {
+ s = vlib_get_buffer (vm, s->next_buffer);
+ s->n_add_refs = n_buffers - 1;
+ }
+
+ return n_buffers;
+}
+
+/** \brief Attach cloned tail to the buffer
+
+ @param vm - (vlib_main_t *) vlib main data structure pointer
+ @param head - (vlib_buffer_t *) head buffer
+ @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
+*/
+
+always_inline void
+vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
+ vlib_buffer_t * tail)
+{
+ ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
+ ASSERT (head->free_list_index == tail->free_list_index);
+
+ head->flags |= VLIB_BUFFER_NEXT_PRESENT;
+ head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
+ head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
+ head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
+ head->next_buffer = vlib_get_buffer_index (vm, tail);
+ head->total_length_not_including_first_buffer = tail->current_length +
+ tail->total_length_not_including_first_buffer;
+
+next_segment:
+ __sync_add_and_fetch (&tail->n_add_refs, 1);
+
+ if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
+ {
+ tail = vlib_get_buffer (vm, tail->next_buffer);
+ goto next_segment;
+ }
+}
+
/* Initializes the buffer as an empty packet with no chained buffers. */
always_inline void
vlib_buffer_chain_init (vlib_buffer_t * first)
@@ -695,7 +799,8 @@ vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
_(flags);
_(free_list_index);
#undef _
- ASSERT (dst->total_length_not_including_first_buffer == 0);
+ dst->total_length_not_including_first_buffer = 0;
+ ASSERT (dst->n_add_refs == 0);
}
always_inline void
@@ -727,8 +832,10 @@ vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
_(flags);
_(free_list_index);
#undef _
- ASSERT (dst0->total_length_not_including_first_buffer == 0);
- ASSERT (dst1->total_length_not_including_first_buffer == 0);
+ dst0->total_length_not_including_first_buffer = 0;
+ dst1->total_length_not_including_first_buffer = 0;
+ ASSERT (dst0->n_add_refs == 0);
+ ASSERT (dst1->n_add_refs == 0);
}
#if CLIB_DEBUG > 0