+static_always_inline u32
+rdma_device_legacy_input_refill_additional (vlib_main_t * vm,
+ rdma_device_t * rd,
+ rdma_rxq_t * rxq,
+ rdma_per_thread_data_t * ptd,
+ vlib_buffer_t * bt,
+ u32 first_slot, u32 n_alloc)
+{
+ int i;
+ u8 log_wqe_sz = rxq->log_wqe_sz;
+ u32 *bi = ptd->tmp_bi;
+ vlib_buffer_t **bufs = ptd->tmp_bufs;
+
+ for (i = 0; i < n_alloc; i++)
+ {
+ u8 chain_sz = rxq->n_used_per_chain[first_slot + i];
+ u8 chain_sz_alloc;
+ mlx5dv_wqe_ds_t *current_wqe =
+ rxq->wqes + ((first_slot + i) << log_wqe_sz);
+ if (chain_sz == 0)
+ continue;
+ if (PREDICT_FALSE ((chain_sz_alloc =
+ vlib_buffer_alloc_from_pool (vm, bi, chain_sz,
+ rd->pool)) !=
+ chain_sz))
+ {
+ vlib_buffer_free (vm, bi, chain_sz_alloc);
+ break;
+ }
+ /*Build the chain */
+ vlib_get_buffers (vm, bi, bufs, chain_sz);
+ for (int j = 0; j < chain_sz - 1; j++)
+ {
+ vlib_buffer_copy_template (bufs[j], bt);
+ bufs[j]->next_buffer = bi[j + 1];
+ bufs[j]->flags |= VLIB_BUFFER_NEXT_PRESENT;
+ }
+ /* The chain starting at the second buffer is pre-initialised */
+ vlib_buffer_copy_template (bufs[chain_sz - 1], bt);
+ /* Stick with the already existing chain */
+ if (chain_sz < rxq->n_ds_per_wqe - 1)
+ {
+ bufs[chain_sz - 1]->next_buffer = rxq->second_bufs[first_slot + i];
+ bufs[chain_sz - 1]->flags |= VLIB_BUFFER_NEXT_PRESENT;
+ }
+ else
+ {
+ bufs[chain_sz - 1]->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
+ }
+
+ /* Update the wqes */
+ for (int j = 0; j < chain_sz; j++)
+ {
+ u64 addr;
+ vlib_get_buffers_with_offset (vm, bi + j,
+ (void *) &addr, 1,
+ sizeof (vlib_buffer_t));
+ current_wqe[j + 1].addr = clib_host_to_net_u64 (addr);
+ }
+ rxq->n_used_per_chain[first_slot + i] = 0;
+ rxq->n_total_additional_segs -= chain_sz;
+ rxq->second_bufs[first_slot + i] = bi[0];
+ }
+ return i;
+}
+