f->ooos_list_head = OOO_SEGMENT_INVALID_INDEX;
f->segment_index = SVM_FIFO_INVALID_INDEX;
f->refcnt = 1;
- f->flags = 0;
+ f->head = f->tail = f->flags = 0;
f->head_chunk = f->tail_chunk = f->ooo_enq = f->ooo_deq = f->start_chunk;
}
return;
f->flags |= SVM_FIFO_F_MULTI_CHUNK;
- rb_tree_init (&f->chunk_lookup);
- rb_tree_add2 (&f->chunk_lookup, 0, pointer_to_uword (f->start_chunk));
+ rb_tree_init (&f->ooo_enq_lookup);
+ rb_tree_init (&f->ooo_deq_lookup);
f->start_chunk->start_byte = 0;
prev = f->start_chunk;
while (c != f->start_chunk)
{
c->start_byte = prev->start_byte + prev->length;
- rb_tree_add2 (&f->chunk_lookup, c->start_byte, pointer_to_uword (c));
prev = c;
c = c->next;
}
c->next = c;
c->start_byte = 0;
c->length = data_size_in_bytes;
+ c->rb_index = RBTREE_TNIL_INDEX;
f->start_chunk = f->end_chunk = c;
svm_fifo_init (f, data_size_in_bytes);
return (pos >= c->start_byte && pos < c->start_byte + c->length);
}
+static rb_node_t *
+svm_fifo_find_node_rbtree (rb_tree_t * rt, u32 pos)
+{
+ rb_node_t *cur, *prev;
+
+ cur = rb_node (rt, rt->root);
+ if (PREDICT_FALSE (rb_node_is_tnil (rt, cur)))
+ return 0;
+
+ while (pos != cur->key)
+ {
+ prev = cur;
+ if (pos < cur->key)
+ {
+ cur = rb_node_left (rt, cur);
+ if (rb_node_is_tnil (rt, cur))
+ {
+ cur = rb_tree_predecessor (rt, prev);
+ break;
+ }
+ }
+ else
+ {
+ cur = rb_node_right (rt, cur);
+ if (rb_node_is_tnil (rt, cur))
+ {
+ cur = prev;
+ break;
+ }
+ }
+ }
+
+ if (rb_node_is_tnil (rt, cur))
+ return 0;
+
+ return cur;
+}
+
+static svm_fifo_chunk_t *
+svm_fifo_find_chunk_rbtree (rb_tree_t * rt, u32 pos)
+{
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ n = svm_fifo_find_node_rbtree (rt, pos);
+ if (!n)
+ return 0;
+ c = uword_to_pointer (n->opaque, svm_fifo_chunk_t *);
+ if (svm_fifo_chunk_includes_pos (c, pos))
+ return c;
+
+ return 0;
+}
+
/**
* Find chunk for given byte position
*
static svm_fifo_chunk_t *
svm_fifo_find_chunk (svm_fifo_t * f, u32 pos)
{
- rb_tree_t *rt = &f->chunk_lookup;
- rb_node_t *cur, *prev;
svm_fifo_chunk_t *c;
- cur = rb_node (rt, rt->root);
- while (pos != cur->key)
+ c = f->start_chunk;
+ do
{
- prev = cur;
- if (pos < cur->key)
- cur = rb_node_left (rt, cur);
- else
- cur = rb_node_right (rt, cur);
+ if (svm_fifo_chunk_includes_pos (c, pos))
+ return c;
+ c = c->next;
+ }
+ while (c != f->start_chunk);
+
+ return 0;
+}
+
+static void
+svm_fifo_update_ooo_enq (svm_fifo_t * f, u32 ref_pos, u32 start_pos,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_enq_lookup;
+ svm_fifo_chunk_t *c;
+ rb_node_t *cur;
+
+ if (svm_fifo_chunk_includes_pos (f->ooo_enq, start_pos)
+ && svm_fifo_chunk_includes_pos (f->ooo_enq, end_pos)
+ && ref_pos < start_pos)
+ return;
- if (rb_node_is_tnil (rt, cur))
+ if (rt->root == RBTREE_TNIL_INDEX)
+ {
+ c = f->tail_chunk;
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+ }
+ else
+ {
+ cur = svm_fifo_find_node_rbtree (rt, start_pos);
+ c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
+ if (ref_pos > start_pos && c->start_byte > start_pos)
{
- /* Hit tnil as a left child. Find predecessor */
- if (pos < prev->key)
- {
- cur = rb_tree_predecessor (rt, prev);
- if (rb_node_is_tnil (rt, cur))
- return 0;
- c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
- if (svm_fifo_chunk_includes_pos (c, pos))
- return c;
- return 0;
- }
- /* Hit tnil as a right child. Check if this is the one */
- c = uword_to_pointer (prev->opaque, svm_fifo_chunk_t *);
- if (svm_fifo_chunk_includes_pos (c, pos))
- return c;
+ c = f->end_chunk;
+ ASSERT (c->rb_index != RBTREE_TNIL_INDEX);
+ }
+ }
- return 0;
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_enq = c;
+
+ if (svm_fifo_chunk_includes_pos (c, end_pos) && ref_pos < end_pos)
+ return;
+
+ do
+ {
+ c = c->next;
+ if (c->rb_index != RBTREE_TNIL_INDEX)
+ break;
+
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_enq = c;
+
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+}
+
+static void
+svm_fifo_update_ooo_deq (svm_fifo_t * f, u32 ref_pos, u32 start_pos,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_deq_lookup;
+ rb_node_t *cur;
+ svm_fifo_chunk_t *c;
+
+ if (svm_fifo_chunk_includes_pos (f->ooo_deq, start_pos)
+ && svm_fifo_chunk_includes_pos (f->ooo_deq, end_pos)
+ && ref_pos < start_pos)
+ return;
+
+ if (rt->root == RBTREE_TNIL_INDEX)
+ {
+ c = f->head_chunk;
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+ }
+ else
+ {
+ cur = svm_fifo_find_node_rbtree (rt, start_pos);
+ c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
+ if (ref_pos > start_pos && c->start_byte > start_pos)
+ {
+ c = f->end_chunk;
+ ASSERT (c->rb_index != RBTREE_TNIL_INDEX);
}
}
- if (!rb_node_is_tnil (rt, cur))
- return uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
- return 0;
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_deq = c;
+
+ if (svm_fifo_chunk_includes_pos (c, end_pos) && ref_pos < end_pos)
+ return;
+
+ do
+ {
+ c = c->next;
+ if (c->rb_index != RBTREE_TNIL_INDEX)
+ break;
+
+ c->rb_index = rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+
+ if (svm_fifo_chunk_includes_pos (c, start_pos))
+ f->ooo_deq = c;
+
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+}
+
+void
+svm_fifo_ooo_deq_track (svm_fifo_t * f, u32 start_pos, u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_deq_lookup;
+ svm_fifo_chunk_t *c;
+
+ if (svm_fifo_chunk_includes_pos (f->ooo_deq, end_pos)
+ && start_pos < end_pos)
+ return;
+
+ c = f->ooo_deq->next;
+ do
+ {
+ ASSERT (c->rb_index == RBTREE_TNIL_INDEX);
+ rb_tree_add2 (rt, c->start_byte, pointer_to_uword (c));
+
+ c = c->next;
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+}
+
+static svm_fifo_chunk_t *
+svm_fifo_lookup_clear_chunks (svm_fifo_t * f, rb_tree_t * rt,
+ svm_fifo_chunk_t * start, u32 start_pos,
+ u32 end_pos)
+{
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ /* Nothing to do if still in the same chunk and not wrapped */
+ if (svm_fifo_chunk_includes_pos (start, end_pos) && start_pos < end_pos)
+ return start;
+
+ c = start;
+ do
+ {
+ if (c->rb_index == RBTREE_TNIL_INDEX)
+ {
+ c = c->next;
+ continue;
+ }
+
+ n = rb_node (rt, c->rb_index);
+ rb_tree_del_node (rt, n);
+ c->rb_index = RBTREE_TNIL_INDEX;
+ c = c->next;
+ }
+ while (!svm_fifo_chunk_includes_pos (c, end_pos));
+
+ return c;
}
static inline void
if (!(f->flags & SVM_FIFO_F_MULTI_CHUNK))
{
ASSERT (f->start_chunk->next == f->start_chunk);
- rb_tree_init (&f->chunk_lookup);
- rb_tree_add2 (&f->chunk_lookup, 0, pointer_to_uword (f->start_chunk));
+ rb_tree_init (&f->ooo_enq_lookup);
+ rb_tree_init (&f->ooo_deq_lookup);
f->flags |= SVM_FIFO_F_MULTI_CHUNK;
}
+ /* If fifo is not wrapped, update the size now */
+ if (!svm_fifo_is_wrapped (f))
+ {
+ /* Initialize chunks and add to lookup rbtree */
+ cur = c;
+ if (f->new_chunks)
+ {
+ prev = f->new_chunks;
+ while (prev->next)
+ prev = prev->next;
+ prev->next = c;
+ }
+ else
+ prev = f->end_chunk;
+
+ while (cur)
+ {
+ cur->start_byte = prev->start_byte + prev->length;
+ cur->rb_index = RBTREE_TNIL_INDEX;
+ prev = cur;
+ cur = cur->next;
+ }
+
+ ASSERT (!f->new_chunks);
+ svm_fifo_grow (f, c);
+ return;
+ }
+
+ /* Wrapped */
+ if (f->flags & SVM_FIFO_F_SINGLE_THREAD_OWNED)
+ {
+ ASSERT (f->master_thread_index == os_get_thread_index ());
+
+ if (!f->new_chunks && f->head_chunk != f->tail_chunk)
+ {
+ u32 head = 0, tail = 0;
+ f_load_head_tail_cons (f, &head, &tail);
+
+ svm_fifo_chunk_t *tmp = f->tail_chunk->next;
+
+ prev = f->tail_chunk;
+ u32 add_bytes = 0;
+ cur = prev->next;
+ while (cur != f->start_chunk)
+ {
+ /* remove any existing rb_tree entry */
+ if (cur->rb_index != RBTREE_TNIL_INDEX)
+ {
+ rb_tree_del (&f->ooo_enq_lookup, cur->start_byte);
+ rb_tree_del (&f->ooo_deq_lookup, cur->start_byte);
+ }
+ cur->rb_index = RBTREE_TNIL_INDEX;
+ cur = cur->next;
+ }
+
+ /* insert new chunk after the tail_chunk */
+ f->tail_chunk->next = c;
+ while (c)
+ {
+ add_bytes += c->length;
+ c->start_byte = prev->start_byte + prev->length;
+ cur->rb_index = RBTREE_TNIL_INDEX;
+
+ prev = c;
+ c = c->next;
+ }
+ prev->next = tmp;
+
+ /* shift existing chunks along */
+ cur = tmp;
+ while (cur != f->start_chunk)
+ {
+ cur->start_byte = prev->start_byte + prev->length;
+ prev = cur;
+ cur = cur->next;
+ }
+
+ f->size += add_bytes;
+ f->nitems = f->size - 1;
+ f->new_chunks = 0;
+ head += add_bytes;
+
+ clib_atomic_store_rel_n (&f->head, head);
+ ASSERT (svm_fifo_is_sane (f));
+
+ return;
+ }
+ }
+
+ /* Wrapped, and optimization of single-thread-owned fifo cannot be applied */
/* Initialize chunks and add to lookup rbtree */
cur = c;
if (f->new_chunks)
while (cur)
{
cur->start_byte = prev->start_byte + prev->length;
- rb_tree_add2 (&f->chunk_lookup, cur->start_byte,
- pointer_to_uword (cur));
+ cur->rb_index = RBTREE_TNIL_INDEX;
prev = cur;
cur = cur->next;
}
- /* If fifo is not wrapped, update the size now */
- if (!svm_fifo_is_wrapped (f))
- {
- ASSERT (!f->new_chunks);
- svm_fifo_grow (f, c);
- return;
- }
-
/* Postpone size update */
if (!f->new_chunks)
{
cur = list;
while (cur)
{
- rb_tree_del (&f->chunk_lookup, cur->start_byte);
+ if (cur->rb_index != RBTREE_TNIL_INDEX)
+ {
+ rb_tree_del (&f->ooo_enq_lookup, cur->start_byte);
+ rb_tree_del (&f->ooo_deq_lookup, cur->start_byte);
+ }
cur = cur->next;
}
void
svm_fifo_free_chunk_lookup (svm_fifo_t * f)
{
- rb_tree_free_nodes (&f->chunk_lookup);
+ rb_tree_free_nodes (&f->ooo_enq_lookup);
+ rb_tree_free_nodes (&f->ooo_deq_lookup);
}
void
if (PREDICT_FALSE (f->ooos_list_head != OOO_SEGMENT_INVALID_INDEX))
{
len += ooo_segment_try_collect (f, len, &tail);
- if (!svm_fifo_chunk_includes_pos (f->tail_chunk, tail))
- f->tail_chunk = svm_fifo_find_chunk (f, tail);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->tail_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_enq_lookup,
+ f->tail_chunk, f->tail,
+ tail);
}
/* store-rel: producer owned index (paired with load-acq in consumer) */
ooo_segment_add (f, offset, head, tail, len);
tail_idx = (tail + offset) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->ooo_enq, tail_idx))
- f->ooo_enq = svm_fifo_find_chunk (f, tail_idx);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ svm_fifo_update_ooo_enq (f, f->tail, tail_idx,
+ (tail_idx + len) % f->size);
svm_fifo_copy_to_chunk (f, f->ooo_enq, tail_idx, src, len, &f->ooo_enq);
tail = f->tail;
tail = (tail + len) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->tail_chunk, tail))
- f->tail_chunk = svm_fifo_find_chunk (f, tail);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->tail_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_enq_lookup,
+ f->tail_chunk, f->tail,
+ tail);
/* store-rel: producer owned index (paired with load-acq in consumer) */
clib_atomic_store_rel_n (&f->tail, tail);
len = clib_min (cursize - offset, len);
head_idx = (head + offset) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->ooo_deq, head_idx))
- f->ooo_deq = svm_fifo_find_chunk (f, head_idx);
+
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ svm_fifo_update_ooo_deq (f, head, head_idx, (head_idx + len) % f->size);
svm_fifo_copy_from_chunk (f, f->ooo_deq, head_idx, dst, len, &f->ooo_deq);
return len;
/* move head */
head = (head + total_drop_bytes) % f->size;
- if (!svm_fifo_chunk_includes_pos (f->head_chunk, head))
- f->head_chunk = svm_fifo_find_chunk (f, head);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->head_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_deq_lookup,
+ f->head_chunk, f->head,
+ head);
if (PREDICT_FALSE (f->flags & SVM_FIFO_F_GROW))
svm_fifo_try_grow (f, head);
/* consumer foreign index */
u32 tail = clib_atomic_load_acq_n (&f->tail);
- if (!svm_fifo_chunk_includes_pos (f->head_chunk, tail))
- f->head_chunk = svm_fifo_find_chunk (f, tail);
+ if (f->flags & SVM_FIFO_F_MULTI_CHUNK)
+ f->head_chunk = svm_fifo_lookup_clear_chunks (f, &f->ooo_deq_lookup,
+ f->head_chunk, tail,
+ tail - 1);
if (PREDICT_FALSE (f->flags & SVM_FIFO_F_GROW))
svm_fifo_try_grow (f, tail);
return 0;
if (prev && (prev->start_byte + prev->length != c->start_byte))
return 0;
+
+ if (c->rb_index != RBTREE_TNIL_INDEX)
+ {
+ u8 found = 0;
+
+ tmp = svm_fifo_find_chunk_rbtree (&f->ooo_enq_lookup,
+ c->start_byte);
+ if (tmp)
+ {
+ found = 1;
+ if (tmp != c)
+ return 0;
+ }
+
+ tmp = svm_fifo_find_chunk_rbtree (&f->ooo_deq_lookup,
+ c->start_byte);
+ if (tmp)
+ {
+ if (found)
+ return 0;
+
+ found = 1;
+ if (tmp != c)
+ return 0;
+ }
+ if (!found)
+ return 0;
+ }
+
size += c->length;
prev = c;
c = c->next;
return 1;
}
+u8
+svm_fifo_set_single_thread_owned (svm_fifo_t * f)
+{
+ if (f->flags & SVM_FIFO_F_SINGLE_THREAD_OWNED)
+ {
+ if (f->master_thread_index == os_get_thread_index ())
+ {
+ /* just a duplicate call */
+ return 0;
+ }
+
+ /* already owned by another thread */
+ return 1;
+ }
+
+ f->flags |= SVM_FIFO_F_SINGLE_THREAD_OWNED;
+ return 0;
+}
+
u8 *
format_ooo_segment (u8 * s, va_list * args)
{