+__clib_unused static ooo_segment_t *
+ooo_segment_last (svm_fifo_t * f)
+{
+ ooo_segment_t *s;
+
+ if (f->ooos_list_head == OOO_SEGMENT_INVALID_INDEX)
+ return 0;
+
+ s = svm_fifo_first_ooo_segment (f);
+ while (s->next != OOO_SEGMENT_INVALID_INDEX)
+ s = pool_elt_at_index (f->ooo_segments, s->next);
+ return s;
+}
+
+void
+svm_fifo_init (svm_fifo_t * f, u32 size)
+{
+ svm_fifo_chunk_t *c, *prev;
+ u32 min_alloc;
+
+ f->size = size;
+ f->ooos_list_head = OOO_SEGMENT_INVALID_INDEX;
+ f->segment_index = SVM_FIFO_INVALID_INDEX;
+ f->refcnt = 1;
+ f->head = f->tail = f->flags = 0;
+ f->head_chunk = f->tail_chunk = f->start_chunk;
+ f->ooo_deq = f->ooo_enq = 0;
+
+ min_alloc = size > 32 << 10 ? size >> 3 : 4096;
+ min_alloc = clib_min (min_alloc, 64 << 10);
+ f->min_alloc = min_alloc;
+
+ /*
+ * Initialize chunks
+ */
+ f->start_chunk->start_byte = 0;
+ prev = f->start_chunk;
+ c = prev->next;
+
+ while (c)
+ {
+ c->start_byte = prev->start_byte + prev->length;
+ prev = c;
+ c = c->next;
+ }
+}
+
+void
+svm_fifo_init_ooo_lookup (svm_fifo_t * f, u8 ooo_type)
+{
+ if (ooo_type == 0)
+ {
+ ASSERT (!rb_tree_is_init (&f->ooo_enq_lookup));
+ rb_tree_init (&f->ooo_enq_lookup);
+ }
+ else
+ {
+ ASSERT (!rb_tree_is_init (&f->ooo_deq_lookup));
+ rb_tree_init (&f->ooo_deq_lookup);
+ }
+}
+
+/**
+ * Creates a fifo in the current heap. Fails vs blow up the process
+ */
+svm_fifo_t *
+svm_fifo_alloc (u32 data_size_in_bytes)
+{
+ u32 rounded_data_size;
+ svm_fifo_chunk_t *c;
+ svm_fifo_t *f;
+
+ f = clib_mem_alloc_aligned_or_null (sizeof (*f), CLIB_CACHE_LINE_BYTES);
+ if (f == 0)
+ return 0;
+
+ clib_memset (f, 0, sizeof (*f));
+
+ /* always round fifo data size to the next highest power-of-two */
+ rounded_data_size = (1 << (max_log2 (data_size_in_bytes)));
+ c = clib_mem_alloc_aligned_or_null (sizeof (*c) + rounded_data_size,
+ CLIB_CACHE_LINE_BYTES);
+ if (!c)
+ {
+ clib_mem_free (f);
+ return 0;
+ }
+
+ clib_memset (c, 0, sizeof (*c));
+ c->start_byte = 0;
+ c->length = data_size_in_bytes;
+ c->enq_rb_index = RBTREE_TNIL_INDEX;
+ c->deq_rb_index = RBTREE_TNIL_INDEX;
+ f->start_chunk = f->end_chunk = c;
+
+ return f;
+}
+
+/**
+ * Creates a fifo chunk in the current heap
+ */
+svm_fifo_chunk_t *
+svm_fifo_chunk_alloc (u32 size)
+{
+ svm_fifo_chunk_t *c;
+ u32 rounded_size;
+
+ /* round chunk size to the next highest power-of-two */
+ rounded_size = (1 << (max_log2 (size)));
+ c = clib_mem_alloc_aligned_or_null (sizeof (*c) + rounded_size,
+ CLIB_CACHE_LINE_BYTES);
+ if (c == 0)
+ return 0;
+
+ clib_memset (c, 0, sizeof (*c));
+ c->length = rounded_size;
+ return c;
+}
+
+/**
+ * Find chunk for given byte position
+ *
+ * @param f fifo
+ * @param pos normalized position in fifo
+ *
+ * @return chunk that includes given position or 0
+ */
+static svm_fifo_chunk_t *
+svm_fifo_find_chunk (svm_fifo_t * f, u32 pos)
+{
+ svm_fifo_chunk_t *c;
+
+ c = f->start_chunk;
+ while (c && !f_chunk_includes_pos (c, pos))
+ c = c->next;
+
+ return c;
+}
+
+static svm_fifo_chunk_t *
+svm_fifo_find_next_chunk (svm_fifo_t * f, svm_fifo_chunk_t * start, u32 pos)
+{
+ svm_fifo_chunk_t *c;
+
+ ASSERT (start != 0);
+
+ c = start;
+ while (c && !f_chunk_includes_pos (c, pos))
+ c = c->next;
+
+ return c;
+}
+
+u32
+svm_fifo_max_read_chunk (svm_fifo_t * f)
+{
+ u32 head, tail, end_chunk;
+
+ f_load_head_tail_cons (f, &head, &tail);
+ ASSERT (!f->head_chunk || f_chunk_includes_pos (f->head_chunk, head));
+
+ if (!f->head_chunk)
+ {
+ f->head_chunk = svm_fifo_find_chunk (f, head);
+ if (PREDICT_FALSE (!f->head_chunk))
+ return 0;
+ }
+
+ end_chunk = f_chunk_end (f->head_chunk);
+
+ return f_pos_lt (end_chunk, tail) ? end_chunk - head : tail - head;
+}
+
+u32
+svm_fifo_max_write_chunk (svm_fifo_t * f)
+{
+ u32 head, tail;
+
+ f_load_head_tail_prod (f, &head, &tail);
+ ASSERT (!f->tail_chunk || f_chunk_includes_pos (f->tail_chunk, tail));
+
+ return f->tail_chunk ? f_chunk_end (f->tail_chunk) - tail : 0;
+}
+
+static rb_node_t *
+f_find_node_rbtree (rb_tree_t * rt, u32 pos)
+{
+ rb_node_t *cur, *prev;
+
+ cur = rb_node (rt, rt->root);
+ if (PREDICT_FALSE (rb_node_is_tnil (rt, cur)))
+ return 0;
+
+ while (pos != cur->key)
+ {
+ prev = cur;
+ if (f_pos_lt (pos, cur->key))
+ {
+ cur = rb_node_left (rt, cur);
+ if (rb_node_is_tnil (rt, cur))
+ {
+ cur = rb_tree_predecessor (rt, prev);
+ break;
+ }
+ }
+ else
+ {
+ cur = rb_node_right (rt, cur);
+ if (rb_node_is_tnil (rt, cur))
+ {
+ cur = prev;
+ break;
+ }
+ }
+ }
+
+ if (rb_node_is_tnil (rt, cur))
+ return 0;
+
+ return cur;
+}
+
+static svm_fifo_chunk_t *
+f_find_chunk_rbtree (rb_tree_t * rt, u32 pos)
+{
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ if (!rb_tree_is_init (rt))
+ return 0;
+
+ n = f_find_node_rbtree (rt, pos);
+ if (!n)
+ return 0;
+ c = uword_to_pointer (n->opaque, svm_fifo_chunk_t *);
+ if (f_chunk_includes_pos (c, pos))
+ return c;
+
+ return 0;
+}
+
+static void
+f_update_ooo_enq (svm_fifo_t * f, u32 start_pos, u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_enq_lookup;
+ svm_fifo_chunk_t *c;
+ rb_node_t *cur;
+
+ /* Use linear search if rbtree is not initialized */
+ if (PREDICT_FALSE (!rb_tree_is_init (rt)))
+ {
+ f->ooo_enq = svm_fifo_find_next_chunk (f, f->tail_chunk, start_pos);
+ return;
+ }
+
+ if (rt->root == RBTREE_TNIL_INDEX)
+ {
+ c = f->tail_chunk;
+ ASSERT (c->enq_rb_index == RBTREE_TNIL_INDEX);
+ c->enq_rb_index = rb_tree_add_custom (rt, c->start_byte,
+ pointer_to_uword (c), f_pos_lt);
+ }
+ else
+ {
+ cur = f_find_node_rbtree (rt, start_pos);
+ c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
+ ASSERT (f_pos_leq (c->start_byte, start_pos));
+ }
+
+ if (f_chunk_includes_pos (c, start_pos))
+ f->ooo_enq = c;
+
+ if (f_chunk_includes_pos (c, end_pos))
+ return;
+
+ do
+ {
+ c = c->next;
+ if (!c || c->enq_rb_index != RBTREE_TNIL_INDEX)
+ break;
+
+ c->enq_rb_index = rb_tree_add_custom (rt, c->start_byte,
+ pointer_to_uword (c), f_pos_lt);
+
+ if (f_chunk_includes_pos (c, start_pos))
+ f->ooo_enq = c;
+ }
+ while (!f_chunk_includes_pos (c, end_pos));
+}
+
+static void
+f_update_ooo_deq (svm_fifo_t * f, u32 start_pos, u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_deq_lookup;
+ rb_node_t *cur;
+ svm_fifo_chunk_t *c;
+
+ /* Use linear search if rbtree is not initialized */
+ if (PREDICT_FALSE (!rb_tree_is_init (rt)))
+ {
+ f->ooo_deq = svm_fifo_find_chunk (f, start_pos);
+ return;
+ }
+
+ if (rt->root == RBTREE_TNIL_INDEX)
+ {
+ c = f->start_chunk;
+ ASSERT (c->deq_rb_index == RBTREE_TNIL_INDEX);
+ c->deq_rb_index = rb_tree_add_custom (rt, c->start_byte,
+ pointer_to_uword (c), f_pos_lt);
+ }
+ else
+ {
+ cur = f_find_node_rbtree (rt, start_pos);
+ c = uword_to_pointer (cur->opaque, svm_fifo_chunk_t *);
+ ASSERT (f_pos_leq (c->start_byte, start_pos));
+ }
+
+ if (f_chunk_includes_pos (c, start_pos))
+ f->ooo_deq = c;
+
+ if (f_chunk_includes_pos (c, end_pos))
+ return;
+
+ do
+ {
+ c = c->next;
+ if (!c || c->deq_rb_index != RBTREE_TNIL_INDEX)
+ break;
+
+ c->deq_rb_index = rb_tree_add_custom (rt, c->start_byte,
+ pointer_to_uword (c), f_pos_lt);
+
+ if (f_chunk_includes_pos (c, start_pos))
+ f->ooo_deq = c;
+ }
+ while (!f_chunk_includes_pos (c, end_pos));
+}
+
+static svm_fifo_chunk_t *
+f_lookup_clear_enq_chunks (svm_fifo_t * f, svm_fifo_chunk_t * start,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_enq_lookup;
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ c = start;
+ while (c && !f_chunk_includes_pos (c, end_pos))
+ {
+ if (c->enq_rb_index != RBTREE_TNIL_INDEX)
+ {
+ n = rb_node (rt, c->enq_rb_index);
+ rb_tree_del_node (rt, n);
+ c->enq_rb_index = RBTREE_TNIL_INDEX;
+ }
+
+ c = c->next;
+ }
+
+ /* No ooo segments left, so make sure the current chunk
+ * is not tracked in the enq rbtree */
+ if (f->ooos_list_head == OOO_SEGMENT_INVALID_INDEX
+ && c && c->enq_rb_index != RBTREE_TNIL_INDEX)
+ {
+ n = rb_node (rt, c->enq_rb_index);
+ rb_tree_del_node (rt, n);
+ c->enq_rb_index = RBTREE_TNIL_INDEX;
+ }
+
+ return c;
+}
+
+static svm_fifo_chunk_t *
+f_lookup_clear_deq_chunks (svm_fifo_t * f, svm_fifo_chunk_t * start,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_deq_lookup;
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ c = start;
+ while (c && !f_chunk_includes_pos (c, end_pos))
+ {
+ if (c->deq_rb_index != RBTREE_TNIL_INDEX)
+ {
+ n = rb_node (rt, c->deq_rb_index);
+ rb_tree_del_node (rt, n);
+ c->deq_rb_index = RBTREE_TNIL_INDEX;
+ }
+
+ c = c->next;
+ }
+
+ return c;
+}
+