always_inline u64
vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
{
- return vlib_physmem_offset_to_physical (vm, vm->buffer_main->physmem_region,
- (((uword) buffer_index) <<
- CLIB_LOG2_CACHE_LINE_BYTES) +
- STRUCT_OFFSET_OF (vlib_buffer_t,
- data));
+ vlib_buffer_main_t *bm = vm->buffer_main;
+ vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
+ vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
+ b->buffer_pool_index);
+
+ return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
}
/** \brief Prefetch buffer metadata by buffer index
VLIB_BUFFER_KNOWN_ALLOCATED,
} vlib_buffer_known_state_t;
+void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
+ uword n_buffers,
+ vlib_buffer_known_state_t
+ expected_state);
+
always_inline vlib_buffer_known_state_t
-vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
+vlib_buffer_is_known (u32 buffer_index)
{
- vlib_buffer_main_t *bm = vm->buffer_main;
+ vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
clib_spinlock_lock (&bm->buffer_known_hash_lockp);
uword *p = hash_get (bm->buffer_known_hash, buffer_index);
}
always_inline void
-vlib_buffer_set_known_state (vlib_main_t * vm,
- u32 buffer_index,
+vlib_buffer_set_known_state (u32 buffer_index,
vlib_buffer_known_state_t state)
{
- vlib_buffer_main_t *bm = vm->buffer_main;
+ vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
+
clib_spinlock_lock (&bm->buffer_known_hash_lockp);
hash_set (bm->buffer_known_hash, buffer_index, state);
clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
uword follow_chain);
-/** \brief Allocate buffers into supplied array
-
- @param vm - (vlib_main_t *) vlib main data structure pointer
- @param buffers - (u32 * ) buffer index array
- @param n_buffers - (u32) number of buffers requested
- @return - (u32) number of buffers actually allocated, may be
- less than the number requested or zero
-*/
-always_inline u32
-vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
-{
- vlib_buffer_main_t *bm = vm->buffer_main;
-
- ASSERT (bm->cb.vlib_buffer_alloc_cb);
-
- return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
-}
-
always_inline u32
vlib_buffer_round_size (u32 size)
{
u32 n_buffers, u32 free_list_index)
{
vlib_buffer_main_t *bm = vm->buffer_main;
+ vlib_buffer_free_list_t *fl;
+ u32 *src;
+ uword len;
+
+ ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
+
+ fl = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
+
+ len = vec_len (fl->buffers);
+
+ if (PREDICT_FALSE (len < n_buffers))
+ {
+ bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
+ len = vec_len (fl->buffers);
+
+ /* even if fill free list didn't manage to refill free list
+ we should give what we have */
+ n_buffers = clib_min (len, n_buffers);
+
+ /* following code is intentionaly duplicated to allow compiler
+ to optimize fast path when n_buffers is constant value */
+ src = fl->buffers + len - n_buffers;
+ clib_memcpy (buffers, src, n_buffers * sizeof (u32));
+ _vec_len (fl->buffers) -= n_buffers;
+
+ /* Verify that buffers are known free. */
+ vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
+ VLIB_BUFFER_KNOWN_FREE);
+
+ return n_buffers;
+ }
+
+ src = fl->buffers + len - n_buffers;
+ clib_memcpy (buffers, src, n_buffers * sizeof (u32));
+ _vec_len (fl->buffers) -= n_buffers;
+
+ /* Verify that buffers are known free. */
+ vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
+ VLIB_BUFFER_KNOWN_FREE);
+
+ return n_buffers;
+}
+
+/** \brief Allocate buffers into supplied array
+
+ @param vm - (vlib_main_t *) vlib main data structure pointer
+ @param buffers - (u32 * ) buffer index array
+ @param n_buffers - (u32) number of buffers requested
+ @return - (u32) number of buffers actually allocated, may be
+ less than the number requested or zero
+*/
+always_inline u32
+vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
+{
+ return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
+ VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
+}
+
+/** \brief Allocate buffers into ring
- ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
+ @param vm - (vlib_main_t *) vlib main data structure pointer
+ @param buffers - (u32 * ) buffer index ring
+ @param start - (u32) first slot in the ring
+ @param ring_size - (u32) ring size
+ @param n_buffers - (u32) number of buffers requested
+ @return - (u32) number of buffers actually allocated, may be
+ less than the number requested or zero
+*/
+always_inline u32
+vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
+ u32 ring_size, u32 n_buffers)
+{
+ u32 n_alloc;
- return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
- free_list_index);
+ ASSERT (n_buffers <= ring_size);
+
+ if (PREDICT_TRUE (start + n_buffers <= ring_size))
+ return vlib_buffer_alloc (vm, ring + start, n_buffers);
+
+ n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
+
+ if (PREDICT_TRUE (n_alloc == ring_size - start))
+ n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
+
+ return n_alloc;
}
/** \brief Free buffers
vec_add_aligned (mf->global_buffers, f->buffers, VLIB_FRAME_SIZE,
CLIB_CACHE_LINE_BYTES);
vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
+ f->n_alloc -= VLIB_FRAME_SIZE;
clib_spinlock_unlock (&mf->global_buffers_lock);
}
}
#endif
}
+/** minimum data size of first buffer in a buffer chain */
+#define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
+
+/**
+ * @brief compress buffer chain in a way where the first buffer is at least
+ * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
+ *
+ * @param[in] vm - vlib_main
+ * @param[in,out] first - first buffer in chain
+ * @param[in,out] discard_vector - vector of buffer indexes which were removed
+ * from the chain
+ */
+always_inline void
+vlib_buffer_chain_compress (vlib_main_t * vm,
+ vlib_buffer_t * first, u32 ** discard_vector)
+{
+ if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
+ !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
+ {
+ /* this is already big enough or not a chain */
+ return;
+ }
+ /* probe free list to find allocated buffer size to avoid overfill */
+ u32 index;
+ vlib_buffer_free_list_t *free_list =
+ vlib_buffer_get_buffer_free_list (vm, first, &index);
+
+ u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
+ free_list->n_data_bytes -
+ first->current_data);
+ do
+ {
+ vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
+ u32 need = want_first_size - first->current_length;
+ u32 amount_to_copy = clib_min (need, second->current_length);
+ clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
+ first->current_length,
+ vlib_buffer_get_current (second), amount_to_copy);
+ first->current_length += amount_to_copy;
+ vlib_buffer_advance (second, amount_to_copy);
+ if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
+ {
+ first->total_length_not_including_first_buffer -= amount_to_copy;
+ }
+ if (!second->current_length)
+ {
+ vec_add1 (*discard_vector, first->next_buffer);
+ if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
+ {
+ first->next_buffer = second->next_buffer;
+ }
+ else
+ {
+ first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
+ }
+ second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
+ }
+ }
+ while ((first->current_length < want_first_size) &&
+ (first->flags & VLIB_BUFFER_NEXT_PRESENT));
+}
+
#endif /* included_vlib_buffer_funcs_h */
/*