+void
+fsh_virtual_mem_update (fifo_segment_header_t * fsh, u32 slice_index,
+ int n_bytes)
+{
+ fifo_segment_slice_t *fss = fsh_slice_get (fsh, slice_index);
+ fss->virtual_mem += n_bytes;
+}
+
+static void
+fsh_check_mem (fifo_segment_header_t * fsh)
+{
+ uword thresh;
+
+ if (fsh->flags & FIFO_SEGMENT_F_MEM_LIMIT)
+ return;
+
+ thresh = clib_max (0.01 * fsh->ssvm_sh->ssvm_size,
+ 2 * fsh->n_reserved_bytes);
+ if (fsh->n_free_bytes > thresh)
+ return;
+
+ fsh->flags |= FIFO_SEGMENT_F_MEM_LIMIT;
+ fsh_update_free_bytes (fsh);
+}
+
+static inline int
+fss_chunk_fl_index_is_valid (fifo_segment_slice_t * fss, u32 fl_index)
+{
+ return (fl_index < vec_len (fss->free_chunks));
+}
+
+static void
+fss_chunk_free_list_push (fifo_segment_slice_t * fss, u32 fl_index,
+ svm_fifo_chunk_t * c)
+{
+ clib_spinlock_lock (&fss->chunk_lock);
+ c->next = fss->free_chunks[fl_index];
+ fss->free_chunks[fl_index] = c;
+ clib_spinlock_unlock (&fss->chunk_lock);
+}
+
+static void
+fss_chunk_free_list_push_list (fifo_segment_slice_t * fss, u32 fl_index,
+ svm_fifo_chunk_t * head,
+ svm_fifo_chunk_t * tail)
+{
+ clib_spinlock_lock (&fss->chunk_lock);
+ tail->next = fss->free_chunks[fl_index];
+ fss->free_chunks[fl_index] = head;
+ clib_spinlock_unlock (&fss->chunk_lock);
+}
+
+static svm_fifo_chunk_t *
+fss_chunk_free_list_pop (fifo_segment_slice_t * fss, u32 fl_index)
+{
+ svm_fifo_chunk_t *c;
+
+ ASSERT (fss_chunk_fl_index_is_valid (fss, fl_index));
+
+ clib_spinlock_lock (&fss->chunk_lock);
+
+ if (!fss->free_chunks[fl_index])
+ {
+ clib_spinlock_unlock (&fss->chunk_lock);
+ return 0;
+ }
+
+ c = fss->free_chunks[fl_index];
+ fss->free_chunks[fl_index] = c->next;
+
+ clib_spinlock_unlock (&fss->chunk_lock);
+
+ return c;
+}
+
+static inline void
+fss_fifo_add_active_list (fifo_segment_slice_t * fss, svm_fifo_t * f)
+{
+ if (fss->fifos)
+ {
+ fss->fifos->prev = f;
+ f->next = fss->fifos;
+ }
+ fss->fifos = f;
+}
+
+static inline void
+fss_fifo_del_active_list (fifo_segment_slice_t * fss, svm_fifo_t * f)
+{
+ if (f->flags & SVM_FIFO_F_LL_TRACKED)
+ {
+ if (f->prev)
+ f->prev->next = f->next;
+ else
+ fss->fifos = f->next;
+ if (f->next)
+ f->next->prev = f->prev;
+ }
+}
+
+static inline uword
+fss_fl_chunk_bytes (fifo_segment_slice_t * fss)
+{
+ return clib_atomic_load_relax_n (&fss->n_fl_chunk_bytes);
+}
+
+static inline void
+fss_fl_chunk_bytes_add (fifo_segment_slice_t * fss, uword size)
+{
+ clib_atomic_fetch_add_relax (&fss->n_fl_chunk_bytes, size);
+}
+
+static inline void
+fss_fl_chunk_bytes_sub (fifo_segment_slice_t * fss, uword size)
+{
+ clib_atomic_fetch_sub_relax (&fss->n_fl_chunk_bytes, size);