n_cache += cf->n_elts;
if (n_cache >= VLIB_FRAME_SIZE)
{
- vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indices,
- ct->nexts, n_cache);
+ vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices,
+ &ct->nexts, n_cache);
n_cache = 0;
}
if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT
&& n_elts > 0)
{
- vlib_node_set_interrupt_pending (vlib_mains[enqueue_thread_idx],
- cm->crypto_node_index);
+ vlib_node_set_interrupt_pending (
+ vlib_get_main_by_index (enqueue_thread_idx),
+ cm->crypto_node_index);
}
n_elts = 0;
{
vnet_crypto_main_t *cm = &crypto_main;
vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
- u32 n_dispatched = 0, n_cache = 0;
- u32 index;
+ u32 n_dispatched = 0, n_cache = 0, index;
+ vec_foreach_index (index, cm->dequeue_handlers)
- /* *INDENT-OFF* */
- clib_bitmap_foreach (index, cm->async_active_ids) {
n_cache = crypto_dequeue_frame (vm, node, ct, cm->dequeue_handlers[index],
n_cache, &n_dispatched);
- }
+
/* *INDENT-ON* */
if (n_cache)
- vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indices, ct->nexts,
- n_cache);
+ vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices, &ct->nexts,
+ n_cache);
return n_dispatched;
}