+ if (f_chunk_includes_pos (c, end_pos))
+ return;
+
+ do
+ {
+ c = c->next;
+ if (!c || c->deq_rb_index != RBTREE_TNIL_INDEX)
+ break;
+
+ c->deq_rb_index = rb_tree_add_custom (rt, c->start_byte,
+ pointer_to_uword (c), f_pos_lt);
+
+ if (f_chunk_includes_pos (c, start_pos))
+ f->ooo_deq = c;
+ }
+ while (!f_chunk_includes_pos (c, end_pos));
+}
+
+static svm_fifo_chunk_t *
+f_lookup_clear_enq_chunks (svm_fifo_t * f, svm_fifo_chunk_t * start,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_enq_lookup;
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ c = start;
+ while (c && !f_chunk_includes_pos (c, end_pos))
+ {
+ if (c->enq_rb_index != RBTREE_TNIL_INDEX)
+ {
+ n = rb_node (rt, c->enq_rb_index);
+ rb_tree_del_node (rt, n);
+ c->enq_rb_index = RBTREE_TNIL_INDEX;
+ }
+
+ c = c->next;
+ }
+
+ /* No ooo segments left, so make sure the current chunk
+ * is not tracked in the enq rbtree */
+ if (f->ooos_list_head == OOO_SEGMENT_INVALID_INDEX
+ && c && c->enq_rb_index != RBTREE_TNIL_INDEX)
+ {
+ n = rb_node (rt, c->enq_rb_index);
+ rb_tree_del_node (rt, n);
+ c->enq_rb_index = RBTREE_TNIL_INDEX;
+ }
+
+ return c;
+}
+
+static svm_fifo_chunk_t *
+f_lookup_clear_deq_chunks (svm_fifo_t * f, svm_fifo_chunk_t * start,
+ u32 end_pos)
+{
+ rb_tree_t *rt = &f->ooo_deq_lookup;
+ svm_fifo_chunk_t *c;
+ rb_node_t *n;
+
+ c = start;
+ while (c && !f_chunk_includes_pos (c, end_pos))
+ {
+ if (c->deq_rb_index != RBTREE_TNIL_INDEX)
+ {
+ n = rb_node (rt, c->deq_rb_index);
+ rb_tree_del_node (rt, n);
+ c->deq_rb_index = RBTREE_TNIL_INDEX;
+ }
+
+ c = c->next;
+ }
+
+ return c;
+}
+
+void
+svm_fifo_add_chunk (svm_fifo_t * f, svm_fifo_chunk_t * c)
+{
+ svm_fifo_chunk_t *cur, *prev;
+
+ cur = c;
+ prev = f->end_chunk;
+
+ while (cur)
+ {
+ cur->start_byte = prev->start_byte + prev->length;
+ cur->enq_rb_index = RBTREE_TNIL_INDEX;
+ cur->deq_rb_index = RBTREE_TNIL_INDEX;
+
+ prev = cur;
+ cur = cur->next;
+ }
+
+ prev->next = 0;
+ f->end_chunk->next = c;
+ f->end_chunk = prev;
+
+ if (!f->tail_chunk)
+ f->tail_chunk = c;
+
+ return;
+}
+
+void
+svm_fifo_free_chunk_lookup (svm_fifo_t * f)
+{
+ rb_tree_free_nodes (&f->ooo_enq_lookup);
+ rb_tree_free_nodes (&f->ooo_deq_lookup);
+}
+
+void
+svm_fifo_free (svm_fifo_t * f)
+{
+ ASSERT (f->refcnt > 0);
+
+ if (--f->refcnt == 0)
+ {
+ /* ooo data is not allocated on segment heap */
+ svm_fifo_free_chunk_lookup (f);
+ clib_mem_free (f);
+ }
+}
+
+void
+svm_fifo_overwrite_head (svm_fifo_t * f, u8 * src, u32 len)
+{
+ u32 n_chunk;
+ u32 head, tail, head_idx;
+ svm_fifo_chunk_t *c;
+
+ ASSERT (len <= f->size);
+
+ f_load_head_tail_cons (f, &head, &tail);
+
+ if (!f->head_chunk)
+ f->head_chunk = svm_fifo_find_chunk (f, head);
+
+ c = f->head_chunk;
+ head_idx = head - c->start_byte;
+ n_chunk = c->length - head_idx;
+ if (len <= n_chunk)
+ clib_memcpy_fast (&c->data[head_idx], src, len);
+ else
+ {
+ ASSERT (len - n_chunk <= c->next->length);
+ clib_memcpy_fast (&c->data[head_idx], src, n_chunk);
+ clib_memcpy_fast (&c->next->data[0], src + n_chunk, len - n_chunk);
+ }
+}
+
+static int
+f_try_grow (svm_fifo_t * f, u32 head, u32 tail, u32 len)
+{
+ svm_fifo_chunk_t *c;
+ u32 alloc_size, free_alloced;
+
+ free_alloced = f_chunk_end (f->end_chunk) - tail;
+ ASSERT (free_alloced < len);
+
+ alloc_size = clib_min (f->min_alloc, f->size - (tail - head));
+ alloc_size = clib_max (alloc_size, len - free_alloced);
+
+ c = fsh_alloc_chunk (f->fs_hdr, f->slice_index, alloc_size);
+ if (PREDICT_FALSE (!c))
+ return -1;
+
+ svm_fifo_add_chunk (f, c);
+ return 0;
+}