{
vlib_get_buffer_indices_with_offset (vm, (void **) obj_table, bufs,
n, sizeof (struct rte_mbuf));
- vlib_buffer_pool_put (vm, buffer_pool_index, bufs, batch_size);
+ vlib_buffer_pool_put (vm, buffer_pool_index, bufs, n);
}
return 0;
return n_alloc;
}
-static void
+static_always_inline void
vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
u32 * buffers, u32 n_buffers)
{
vlib_buffer_pool_thread_t *bpt =
vec_elt_at_index (bp->threads, vm->thread_index);
+ if (CLIB_DEBUG > 0)
+ vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
+ VLIB_BUFFER_KNOWN_ALLOCATED);
+
vec_add_aligned (bpt->cached_buffers, buffers, n_buffers,
CLIB_CACHE_LINE_BYTES);
vlib_buffer_copy_template (b[3], &bt);
n_queue += 4;
- if (CLIB_DEBUG > 0)
- vlib_buffer_validate_alloc_free (vm, buffers, 4,
- VLIB_BUFFER_KNOWN_ALLOCATED);
-
vlib_buffer_validate (vm, b[0]);
vlib_buffer_validate (vm, b[1]);
vlib_buffer_validate (vm, b[2]);
if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
{
- if (CLIB_DEBUG > 0)
- vlib_buffer_validate_alloc_free (vm, &bi, 1,
- VLIB_BUFFER_KNOWN_ALLOCATED);
vlib_buffer_copy_template (b[0], &bt);
queue[n_queue++] = bi;
}