summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorFlorin Coras <fcoras@cisco.com>2019-12-12 12:09:29 -0800
committerDave Barach <openvpp@barachs.net>2020-02-25 19:18:49 +0000
commitb020806806c0e6c54886cdb4347a5fd1f19504b0 (patch)
tree77322fd6c17967df0626e99c98076b5dac71d89d
parent5cd31ec9405d2bb2fbc8152a08c4cfb64f2a8e73 (diff)
svm: refactor fifo chunk tracking
Avoid tracking with rbtrees all of the chunks associated to a fifo. Instead, only track chunks when doing out-of-order operations (peek or ooo enqueue). Type: refactor Change-Id: I9f8bd266211746637d98e6a12ffc4b2d6346950a Signed-off-by: Florin Coras <fcoras@cisco.com>
-rw-r--r--src/plugins/unittest/svm_fifo_test.c14
-rw-r--r--src/svm/fifo_segment.c32
-rw-r--r--src/svm/svm_fifo.c338
-rw-r--r--src/svm/svm_fifo.h6
-rw-r--r--src/vppinfra/rbtree.h1
5 files changed, 307 insertions, 84 deletions
diff --git a/src/plugins/unittest/svm_fifo_test.c b/src/plugins/unittest/svm_fifo_test.c
index fd2e05f3c3f..8b43ee370f1 100644
--- a/src/plugins/unittest/svm_fifo_test.c
+++ b/src/plugins/unittest/svm_fifo_test.c
@@ -1194,8 +1194,8 @@ sfifo_test_fifo_grow (vlib_main_t * vm, unformat_input_t * input)
int test_n_bytes, deq_bytes, enq_bytes, n_deqs, n_enqs;
svm_fifo_chunk_t *c, *next, *prev;
u8 *test_data = 0, *data_buf = 0;
+ u32 old_tail, offset;
svm_fifo_t *f;
- u32 old_tail;
while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
{
@@ -1278,6 +1278,7 @@ sfifo_test_fifo_grow (vlib_main_t * vm, unformat_input_t * input)
svm_fifo_add_chunk (f, c);
SFIFO_TEST (f->size == fifo_size + 200, "size expected %u is %u",
fifo_size + 200, f->size);
+ SFIFO_TEST (svm_fifo_is_sane (f), "fifo should be sane");
prev = 0;
for (i = 0; i < 5; i++)
@@ -1435,14 +1436,22 @@ sfifo_test_fifo_grow (vlib_main_t * vm, unformat_input_t * input)
}
}
+ SFIFO_TEST (svm_fifo_is_sane (f), "fifo should be sane");
SFIFO_TEST (svm_fifo_max_dequeue (f) == 0, "max deq expected %u is %u",
0, svm_fifo_max_dequeue (f));
svm_fifo_enqueue (f, sizeof (u8), &test_data[0]);
memset (data_buf, 0, vec_len (data_buf));
+ offset = 0;
for (i = 0; i <= n_deqs; i++)
- svm_fifo_dequeue (f, deq_bytes, data_buf + i * deq_bytes);
+ {
+ rv = svm_fifo_peek (f, offset, deq_bytes, data_buf + i * deq_bytes);
+ if (rv < 0 || (rv != deq_bytes && i != n_deqs))
+ SFIFO_TEST (0, "unexpected peek %d", rv);
+ offset += rv;
+ }
+ svm_fifo_dequeue_drop (f, offset);
rv = compare_data (data_buf, test_data, 0, vec_len (test_data),
(u32 *) & j);
@@ -1750,6 +1759,7 @@ sfifo_test_fifo_shrink (vlib_main_t * vm, unformat_input_t * input)
svm_fifo_enqueue (f, 200, test_data);
svm_fifo_enqueue_with_offset (f, 50, vec_len (test_data) - 250,
&test_data[250]);
+ SFIFO_TEST (svm_fifo_is_sane (f), "fifo should be sane");
/* Free space */
rv = svm_fifo_max_enqueue (f);
diff --git a/src/svm/fifo_segment.c b/src/svm/fifo_segment.c
index 58c11383e43..bc46aba7bf6 100644
--- a/src/svm/fifo_segment.c
+++ b/src/svm/fifo_segment.c
@@ -412,6 +412,7 @@ fs_try_alloc_fifo_batch (fifo_segment_header_t * fsh,
c = (svm_fifo_chunk_t *) (fmem + sizeof (*f));
c->start_byte = 0;
c->length = rounded_data_size;
+ c->rb_index = RBTREE_TNIL_INDEX;
c->next = fss->free_chunks[fl_index];
fss->free_chunks[fl_index] = c;
fmem += hdrs + rounded_data_size;
@@ -506,11 +507,7 @@ fifo_segment_alloc_fifo_w_slice (fifo_segment_t * fs, u32 slice_index,
/* Initialize chunks and rbtree for multi-chunk fifos */
if (f->start_chunk->next != f->start_chunk)
- {
- void *oldheap = ssvm_push_heap (fsh->ssvm_sh);
- svm_fifo_init_chunks (f);
- ssvm_pop_heap (oldheap);
- }
+ svm_fifo_init_chunks (f);
/* If rx fifo type add to active fifos list. When cleaning up segment,
* we need a list of active sessions that should be disconnected. Since
@@ -541,7 +538,6 @@ fifo_segment_free_fifo (fifo_segment_t * fs, svm_fifo_t * f)
fifo_segment_header_t *fsh = fs->h;
svm_fifo_chunk_t *cur, *next;
fifo_segment_slice_t *fss;
- void *oldheap;
int fl_index;
ASSERT (f->refcnt > 0);
@@ -576,6 +572,7 @@ fifo_segment_free_fifo (fifo_segment_t * fs, svm_fifo_t * f)
fl_index = fs_freelist_for_size (cur->length);
ASSERT (fl_index < vec_len (fss->free_chunks));
cur->next = fss->free_chunks[fl_index];
+ cur->rb_index = RBTREE_TNIL_INDEX;
fss->free_chunks[fl_index] = cur;
fss->n_fl_chunk_bytes += fs_freelist_index_to_size (fl_index);
cur = next;
@@ -585,9 +582,7 @@ fifo_segment_free_fifo (fifo_segment_t * fs, svm_fifo_t * f)
f->start_chunk = f->end_chunk = f->new_chunks = 0;
f->head_chunk = f->tail_chunk = f->ooo_enq = f->ooo_deq = 0;
- oldheap = ssvm_push_heap (fsh->ssvm_sh);
svm_fifo_free_chunk_lookup (f);
- ssvm_pop_heap (oldheap);
/* not allocated on segment heap */
svm_fifo_free_ooo_data (f);
@@ -768,25 +763,21 @@ fifo_segment_grow_fifo (fifo_segment_t * fs, svm_fifo_t * f, u32 chunk_size)
fl_index = fs_freelist_for_size (chunk_size);
fss = fsh_slice_get (fsh, f->slice_index);
- oldheap = ssvm_push_heap (fsh->ssvm_sh);
-
c = fss->free_chunks[fl_index];
if (!c)
{
fsh_check_mem (fsh);
if (fsh_n_free_bytes (fsh) < chunk_size)
- {
- ssvm_pop_heap (oldheap);
- return -1;
- }
+ return -1;
+ oldheap = ssvm_push_heap (fsh->ssvm_sh);
c = svm_fifo_chunk_alloc (chunk_size);
+ ssvm_pop_heap (oldheap);
+
if (!c)
- {
- ssvm_pop_heap (oldheap);
- return -1;
- }
+ return -1;
+
fsh_free_bytes_sub (fsh, chunk_size + sizeof (*c));
}
else
@@ -798,7 +789,6 @@ fifo_segment_grow_fifo (fifo_segment_t * fs, svm_fifo_t * f, u32 chunk_size)
svm_fifo_add_chunk (f, c);
- ssvm_pop_heap (oldheap);
return 0;
}
@@ -808,10 +798,8 @@ fifo_segment_collect_fifo_chunks (fifo_segment_t * fs, svm_fifo_t * f)
fifo_segment_header_t *fsh = fs->h;
svm_fifo_chunk_t *cur, *next;
fifo_segment_slice_t *fss;
- void *oldheap;
int fl_index;
- oldheap = ssvm_push_heap (fsh->ssvm_sh);
cur = svm_fifo_collect_chunks (f);
fss = fsh_slice_get (fsh, f->slice_index);
@@ -825,8 +813,6 @@ fifo_segment_collect_fifo_chunks (fifo_segment_t * fs, svm_fifo_t * f)
cur = next;
}
- ssvm_pop_heap (oldheap);
-
return 0;
}
diff --git a/src/svm/svm_fifo.c b/src/svm/svm_fifo.c
index 975a82026f7..3df67135cfd 100644
--- a/src/svm/svm_fifo.c
+++ b/src/svm/svm_fifo.c
@@ -413,8 +413,8 @@ svm_fifo_init_chunks (svm_fifo_t * f)
return;
f->flags |= SVM_FIFO_F_MULTI_CHUNK;
- rb_tree_init (&f->chunk_lookup);
- rb_tree_add2 (&f->chunk_lookup, 0, pointer_to_uword (f->start_chunk));
+ rb_tree_init (&f->ooo_enq_lookup);
+ rb_tree_init (&f->ooo_deq_lookup);
f->start_chunk->start_byte = 0;
prev = f->start_chunk;
@@ -423,7 +423,6 @@ svm_fifo_init_chunks (svm_fifo_t * f)
while (c != f->start_chunk)
{
c->start_byte = prev->start_byte + prev->length;
- rb_tree_add2 (&f->chunk_lookup, c->start_byte, pointer_to_uword (c));
prev = c;
c = c->next;
}
@@ -458,6 +457,7 @@ svm_fifo_create (u32 data_size_in_bytes)
c->next = c;
c->start_byte = 0;
c->length = data_size_in_bytes;
+ c->rb_index = RBTREE_TNIL_INDEX;
f->start_chunk = f->end_chunk = c;
svm_fifo_init (f, data_size_in_bytes);
@@ -491,6 +491,60 @@ svm_fifo_chunk_includes_pos (svm_fifo_chunk_t * c, u32 pos)
return (pos >= c->start_byte && pos < c->start_byte + c->length);
}
+static rb_node_t *
+svm_fifo_find_node_rbtree (rb_tree_t * rt, u32 pos)
+{
+ rb_node_t *cur, *prev;
+
+ cur = rb_node (rt, rt->root);
+ if (PREDICT_FALSE (rb_node_is_tnil (rt, cur)))
+ return 0;
+
+ while (pos != cur->key)
+ {
+ prev = cur;
+ if (pos < cur->key)
+ {
+ cur = rb_node_left (rt, cur);
+ if (rb_node_is_tnil (rt, cur))
+ {
+ cur = rb_tree_predecessor (rt, prev);
+ break;
+ }
+ }
+ else
+ {
+ cur = rb_node_right (rt, cur);
+ if (rb_node_is_tnil (rt, cur))
+ {
+ cur = prev;
+ break;
+ }
+ }
+ }
+
+ if (rb_node_is_tnil (rt, cur))
+ return 0;
+
+ return cur;
+}
+
+static svm_fifo_chunk_t *
+svm_fifo_find_chunk_rbtree (rb_tree_t * rt, u32 pos)
+{
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ n = svm_fifo_find_node_rbtree (rt, pos);
+ if (!n)
+ return 0;
+ c = uword_to_pointer (n->opaque, svm_fifo_chunk_t *);
+ if (svm_fifo_chunk_includes_pos (c, pos))
+ return c;
+
+ return 0;
+}
+
/**
* Find chunk for given byte position
*
@@ -502,44 +556,170 @@ svm_fifo_chunk_includes_pos (svm_fifo_chunk_t * c, u32 pos)
static svm_fifo_chunk_t *
svm_fifo_find_chunk (svm_fifo_t * f, u32 pos)
{
- rb_tree_t *rt = &f->chunk_lookup;
- rb_node_t *cur, *prev;
svm_fifo_chunk_t *c;
- cur = rb_node (rt, rt->root);
- while (pos != cur->key)
+ c = f->start_chunk;
+ do
{
- prev = cur;
- if (pos < cur->key)
- cur = rb_node_left (rt, cur);
- else
- cur = rb_node_right (rt, cur);
+ if (svm_fifo_chunk_includes_pos (c, pos))
+ return c;
+ c = c->next;
+ }
+ while (c != f->start_chunk);
+
+ return 0;
+}
+
+static void
+svm_fifo_update_ooo_enq (svm_fifo_t * f, u32 ref_pos, u32 start_pos,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_enq_lookup;
+ svm_fifo_chunk_t *c;
+ rb_node_t *cur;
+
+ if (svm_fifo_chunk_includes_pos (f->ooo_enq, start_pos)
+ && svm_fifo_chunk_includes_pos (f->ooo_enq, end_pos)
+ && ref_pos < start_pos)
+ return;
- if (rb_node_is_tnil (rt, cur))
+ if (rt->root == RBTREE_TNIL_INDEX)
+ {
+ c = f->tail_chunk;
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+ }
+ else
+ {
+ cur = svm_fifo_find_node_rbtree (rt, start_pos);
+ c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
+ if (ref_pos > start_pos && c->start_byte > start_pos)
{
- /* Hit tnil as a left child. Find predecessor */
- if (pos < prev->key)
- {
- cur = rb_tree_predecessor (rt, prev);
- if (rb_node_is_tnil (rt, cur))
- return 0;
- c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
- if (svm_fifo_chunk_includes_pos (c, pos))
- return c;
- return 0;
- }
- /* Hit tnil as a right child. Check if this is the one */
- c = uword_to_pointer (prev->opaque, svm_fifo_chunk_t *);
- if (svm_fifo_chunk_includes_pos (c, pos))
- return c;
+ c = f->end_chunk;
+ ASSERT (c->rb_index != RBTREE_TNIL_INDEX);
+ }
+ }
- return 0;
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_enq = c;
+
+ if (svm_fifo_chunk_includes_pos (c, end_pos) && ref_pos < end_pos)
+ return;
+
+ do
+ {
+ c = c->next;
+ if (c->rb_index != RBTREE_TNIL_INDEX)
+ break;
+
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_enq = c;
+
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+}
+
+static void
+svm_fifo_update_ooo_deq (svm_fifo_t * f, u32 ref_pos, u32 start_pos,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_deq_lookup;
+ rb_node_t *cur;
+ svm_fifo_chunk_t *c;
+
+ if (svm_fifo_chunk_includes_pos (f->ooo_deq, start_pos)
+ && svm_fifo_chunk_includes_pos (f->ooo_deq, end_pos)
+ && ref_pos < start_pos)
+ return;
+
+ if (rt->root == RBTREE_TNIL_INDEX)
+ {
+ c = f->head_chunk;
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+ }
+ else
+ {
+ cur = svm_fifo_find_node_rbtree (rt, start_pos);
+ c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
+ if (ref_pos > start_pos && c->start_byte > start_pos)
+ {
+ c = f->end_chunk;
+ ASSERT (c->rb_index != RBTREE_TNIL_INDEX);
}
}
- if (!rb_node_is_tnil (rt, cur))
- return uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
- return 0;
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_deq = c;
+
+ if (svm_fifo_chunk_includes_pos (c, end_pos) && ref_pos < end_pos)
+ return;
+
+ do
+ {
+ c = c->next;
+ if (c->rb_index != RBTREE_TNIL_INDEX)
+ break;
+
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_deq = c;
+
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+}
+
+void
+svm_fifo_ooo_deq_track (svm_fifo_t * f, u32 start_pos, u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_deq_lookup;
+ svm_fifo_chunk_t *c;
+
+ if (svm_fifo_chunk_includes_pos (f->ooo_deq, end_pos)
+ && start_pos < end_pos)
+ return;
+
+ c = f->ooo_deq->next;
+ do
+ {
+ ASSERT (c->rb_index == RBTREE_TNIL_INDEX);
+ rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+
+ c = c->next;
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+}
+
+static svm_fifo_chunk_t *
+svm_fifo_lookup_clear_chunks (svm_fifo_t * f, rb_tree_t * rt,
+ svm_fifo_chunk_t * start, u32 start_pos,
+ u32 end_pos)
+{
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ /* Nothing to do if still in the same chunk and not wrapped */
+ if (svm_fifo_chunk_includes_pos (start, end_pos) && start_pos < end_pos)
+ return start;
+
+ c = start;
+ do
+ {
+ if (c->rb_index == RBTREE_TNIL_INDEX)
+ {
+ c = c->next;
+ continue;
+ }
+
+ n = rb_node (rt, c->rb_index);
+ rb_tree_del_node (rt, n);
+ c->rb_index = RBTREE_TNIL_INDEX;
+ c = c->next;
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+
+ return c;
}
static inline void
@@ -585,8 +765,8 @@ svm_fifo_add_chunk (svm_fifo_t * f, svm_fifo_chunk_t * c)
if (!(f->flags & SVM_FIFO_F_MULTI_CHUNK))
{
ASSERT (f->start_chunk->next == f->start_chunk);
- rb_tree_init (&f->chunk_lookup);
- rb_tree_add2 (&f->chunk_lookup, 0, pointer_to_uword (f->start_chunk));
+ rb_tree_init (&f->ooo_enq_lookup);
+ rb_tree_init (&f->ooo_deq_lookup);
f->flags |= SVM_FIFO_F_MULTI_CHUNK;
}
@@ -608,8 +788,7 @@ svm_fifo_add_chunk (svm_fifo_t * f, svm_fifo_chunk_t * c)
while (cur)
{
cur->start_byte = prev->start_byte + prev->length;
- rb_tree_add2 (&f->chunk_lookup, cur->start_byte,
- pointer_to_uword (cur));
+ cur->rb_index = RBTREE_TNIL_INDEX;
prev = cur;
cur = cur->next;
}
@@ -637,7 +816,12 @@ svm_fifo_add_chunk (svm_fifo_t * f, svm_fifo_chunk_t * c)
while (cur != f->start_chunk)
{
/* remove any existing rb_tree entry */
- rb_tree_del (&f->chunk_lookup, cur->start_byte);
+ if (cur->rb_index != RBTREE_TNIL_INDEX)
+ {
+ rb_tree_del (&f->ooo_enq_lookup, cur->start_byte);
+ rb_tree_del (&f->ooo_deq_lookup, cur->start_byte);
+ }
+ cur->rb_index = RBTREE_TNIL_INDEX;
cur = cur->next;
}
@@ -647,8 +831,7 @@ svm_fifo_add_chunk (svm_fifo_t * f, svm_fifo_chunk_t * c)
{
add_bytes += c->length;
c->start_byte = prev->start_byte + prev->length;
- rb_tree_add2 (&f->chunk_lookup, c->start_byte,
- pointer_to_uword (c));
+ cur->rb_index = RBTREE_TNIL_INDEX;
prev = c;
c = c->next;
@@ -660,8 +843,6 @@ svm_fifo_add_chunk (svm_fifo_t * f, svm_fifo_chunk_t * c)
while (cur != f->start_chunk)
{
cur->start_byte = prev->start_byte + prev->length;
- rb_tree_add2 (&f->chunk_lookup, cur->start_byte,
- pointer_to_uword (cur));
prev = cur;
cur = cur->next;
}
@@ -694,8 +875,7 @@ svm_fifo_add_chunk (svm_fifo_t * f, svm_fifo_chunk_t * c)
while (cur)
{
cur->start_byte = prev->start_byte + prev->length;
- rb_tree_add2 (&f->chunk_lookup, cur->start_byte,
- pointer_to_uword (cur));
+ cur->rb_index = RBTREE_TNIL_INDEX;
prev = cur;
cur = cur->next;
}
@@ -723,7 +903,11 @@ svm_fifo_collect_chunks (svm_fifo_t * f)
cur = list;
while (cur)
{
- rb_tree_del (&f->chunk_lookup, cur->start_byte);
+ if (cur->rb_index != RBTREE_TNIL_INDEX)
+ {
+ rb_tree_del (&f->ooo_enq_lookup, cur->start_byte);
+ rb_tree_del (&f->ooo_deq_lookup, cur->start_byte);
+ }
cur = cur->next;
}
@@ -846,7 +1030,8 @@ svm_fifo_reduce_size (svm_fifo_t * f, u32 len, u8 try_shrink)
void
svm_fifo_free_chunk_lookup (svm_fifo_t * f)
{
- rb_tree_free_nodes (&f->chunk_lookup);
+ rb_tree_free_nodes (&f->ooo_enq_lookup);
+ rb_tree_free_nodes (&f->ooo_deq_lookup);
}
void
@@ -910,8 +1095,10 @@ svm_fifo_enqueue (svm_fifo_t * f, u32 len, const u8 * src)
if (PREDICT_FALSE (f->ooos_list_head != OOO_SEGMENT_INVALID_INDEX))
{
len += ooo_segment_try_collect (f, len, &tail);
- if (!svm_fifo_chunk_includes_pos (f->tail_chunk, tail))
- f->tail_chunk = svm_fifo_find_chunk (f, tail);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->tail_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_enq_lookup,
+ f->tail_chunk, f->tail,
+ tail);
}
/* store-rel: producer owned index (paired with load-acq in consumer) */
@@ -949,8 +1136,9 @@ svm_fifo_enqueue_with_offset (svm_fifo_t * f, u32 offset, u32 len, u8 * src)
ooo_segment_add (f, offset, head, tail, len);
tail_idx = (tail + offset) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->ooo_enq, tail_idx))
- f->ooo_enq = svm_fifo_find_chunk (f, tail_idx);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ svm_fifo_update_ooo_enq (f, f->tail, tail_idx,
+ (tail_idx + len) % f->size);
svm_fifo_copy_to_chunk (f, f->ooo_enq, tail_idx, src, len, &f->ooo_enq);
@@ -970,8 +1158,10 @@ svm_fifo_enqueue_nocopy (svm_fifo_t * f, u32 len)
tail = f->tail;
tail = (tail + len) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->tail_chunk, tail))
- f->tail_chunk = svm_fifo_find_chunk (f, tail);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->tail_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_enq_lookup,
+ f->tail_chunk, f->tail,
+ tail);
/* store-rel: producer owned index (paired with load-acq in consumer) */
clib_atomic_store_rel_n (&f->tail, tail);
@@ -1018,8 +1208,9 @@ svm_fifo_peek (svm_fifo_t * f, u32 offset, u32 len, u8 * dst)
len = clib_min (cursize - offset, len);
head_idx = (head + offset) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->ooo_deq, head_idx))
- f->ooo_deq = svm_fifo_find_chunk (f, head_idx);
+
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ svm_fifo_update_ooo_deq (f, head, head_idx, (head_idx + len) % f->size);
svm_fifo_copy_from_chunk (f, f->ooo_deq, head_idx, dst, len, &f->ooo_deq);
return len;
@@ -1045,8 +1236,10 @@ svm_fifo_dequeue_drop (svm_fifo_t * f, u32 len)
/* move head */
head = (head + total_drop_bytes) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->head_chunk, head))
- f->head_chunk = svm_fifo_find_chunk (f, head);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->head_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_deq_lookup,
+ f->head_chunk, f->head,
+ head);
if (PREDICT_FALSE (f->flags & SVM_FIFO_F_GROW))
svm_fifo_try_grow (f, head);
@@ -1063,8 +1256,10 @@ svm_fifo_dequeue_drop_all (svm_fifo_t * f)
/* consumer foreign index */
u32 tail = clib_atomic_load_acq_n (&f->tail);
- if (!svm_fifo_chunk_includes_pos (f->head_chunk, tail))
- f->head_chunk = svm_fifo_find_chunk (f, tail);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->head_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_deq_lookup,
+ f->head_chunk, tail,
+ tail - 1);
if (PREDICT_FALSE (f->flags & SVM_FIFO_F_GROW))
svm_fifo_try_grow (f, tail);
@@ -1220,6 +1415,35 @@ svm_fifo_is_sane (svm_fifo_t * f)
return 0;
if (prev && (prev->start_byte + prev->length != c->start_byte))
return 0;
+
+ if (c->rb_index != RBTREE_TNIL_INDEX)
+ {
+ u8 found = 0;
+
+ tmp = svm_fifo_find_chunk_rbtree (&f->ooo_enq_lookup,
+ c->start_byte);
+ if (tmp)
+ {
+ found = 1;
+ if (tmp != c)
+ return 0;
+ }
+
+ tmp = svm_fifo_find_chunk_rbtree (&f->ooo_deq_lookup,
+ c->start_byte);
+ if (tmp)
+ {
+ if (found)
+ return 0;
+
+ found = 1;
+ if (tmp != c)
+ return 0;
+ }
+ if (!found)
+ return 0;
+ }
+
size += c->length;
prev = c;
c = c->next;
diff --git a/src/svm/svm_fifo.h b/src/svm/svm_fifo.h
index 0b7c70790c0..2b6e8542cdf 100644
--- a/src/svm/svm_fifo.h
+++ b/src/svm/svm_fifo.h
@@ -60,6 +60,7 @@ typedef struct svm_fifo_chunk_
u32 start_byte; /**< chunk start byte */
u32 length; /**< length of chunk in bytes */
struct svm_fifo_chunk_ *next; /**< pointer to next chunk in linked-lists */
+ rb_node_index_t rb_index; /**< node index if chunk in rbtree */
u8 data[0]; /**< start of chunk data */
} svm_fifo_chunk_t;
@@ -80,8 +81,8 @@ typedef struct _svm_fifo
u32 nitems; /**< usable size (size-1) */
svm_fifo_chunk_t *start_chunk;/**< first chunk in fifo chunk list */
svm_fifo_chunk_t *end_chunk; /**< end chunk in fifo chunk list */
- svm_fifo_chunk_t *new_chunks; /**< chunks yet to be added to list */
- rb_tree_t chunk_lookup; /**< rbtree for chunk lookup */
+ rb_tree_t ooo_enq_lookup; /**< rbtree for ooo enq chunk lookup */
+ rb_tree_t ooo_deq_lookup; /**< rbtree for ooo deq chunk lookup */
u8 flags; /**< fifo flags */
u8 slice_index; /**< segment slice for fifo */
@@ -96,6 +97,7 @@ typedef struct _svm_fifo
u32 segment_index; /**< segment index in segment manager */
struct _svm_fifo *next; /**< next in freelist/active chain */
struct _svm_fifo *prev; /**< prev in active chain */
+ svm_fifo_chunk_t *new_chunks; /**< chunks yet to be added to list */
u32 size_decrement; /**< bytes to remove from fifo */
CLIB_CACHE_LINE_ALIGN_MARK (consumer);
diff --git a/src/vppinfra/rbtree.h b/src/vppinfra/rbtree.h
index 65580584b6d..dde2fbfb836 100644
--- a/src/vppinfra/rbtree.h
+++ b/src/vppinfra/rbtree.h
@@ -53,6 +53,7 @@ rb_node_index_t rb_tree_add2 (rb_tree_t * rt, u32 key, uword opaque);
rb_node_index_t rb_tree_add_custom (rb_tree_t * rt, u32 key, uword opaque,
rb_tree_lt_fn ltfn);
void rb_tree_del (rb_tree_t * rt, u32 key);
+void rb_tree_del_node (rb_tree_t * rt, rb_node_t * z);
void rb_tree_del_custom (rb_tree_t * rt, u32 key, rb_tree_lt_fn ltfn);
void rb_tree_free_nodes (rb_tree_t * rt);
u32 rb_tree_n_nodes (rb_tree_t * rt);