- /* Slow path to support ring wrap */
- if (PREDICT_FALSE (n_packets_left))
- {
- txq->n_enqueued += n_desc;
-
- n_desc = 0;
- d = txq->descs + (next & mask);
-
- /* +8 to be consistent with fast path */
- n_desc_left = txq->size - (txq->n_enqueued + 8);
-
- while (n_packets_left && n_desc_left)
- {
-
- txq->bufs[next & mask] = buffers[0];
- b[0] = vlib_get_buffer (vm, buffers[0]);
-
- one_by_one_offload_flags = 0;
- is_tso = ! !(b[0]->flags & VNET_BUFFER_F_GSO);
- if (PREDICT_FALSE (is_tso || b[0]->flags & offload_mask))
- one_by_one_offload_flags |= avf_tx_prepare_cksum (b[0], is_tso);
-
- /* Deal with chain buffer if present */
- if (is_tso || b[0]->flags & VLIB_BUFFER_NEXT_PRESENT)
- {
- n_desc_needed = 1 + is_tso;
- b0 = b[0];
-
- while (b0->flags & VLIB_BUFFER_NEXT_PRESENT)
- {
- b0 = vlib_get_buffer (vm, b0->next_buffer);
- n_desc_needed++;
- }
-
- /* Spec says data descriptor is limited to 8 segments */
- if (PREDICT_FALSE (!is_tso && n_desc_needed > 8))
- {
- vlib_buffer_free_one (vm, buffers[0]);
- vlib_error_count (vm, node->node_index,
- AVF_TX_ERROR_SEGMENT_SIZE_EXCEEDED, 1);
- n_packets_left -= 1;
- buffers += 1;
- continue;
- }
-
- if (PREDICT_FALSE (n_desc_left < n_desc_needed))
- break;
-
- /* Enqueue a context descriptor if needed */
- if (PREDICT_FALSE (is_tso))
- {
- if (avf_tx_fill_ctx_desc (vm, txq, d, b[0]))
- /* Failure to acquire ref on ctx placeholder */
- break;
-
- txq->bufs[(next + 1) & mask] = txq->bufs[next & mask];
- txq->bufs[next & mask] = txq->ctx_desc_placeholder_bi;
- next += 1;
- n_desc += 1;
- n_desc_left -= 1;
- d = txq->descs + (next & mask);
- }
- while (b[0]->flags & VLIB_BUFFER_NEXT_PRESENT)
- {
- if (use_va_dma)
- d[0].qword[0] = vlib_buffer_get_current_va (b[0]);
- else
- d[0].qword[0] = vlib_buffer_get_current_pa (vm, b[0]);
-
- d[0].qword[1] = (((u64) b[0]->current_length) << 34) |
- AVF_TXD_CMD_RSV | one_by_one_offload_flags;
-
- next += 1;
- n_desc += 1;
- n_desc_left -= 1;
- d = txq->descs + (next & mask);
-
- txq->bufs[next & mask] = b[0]->next_buffer;
- b[0] = vlib_get_buffer (vm, b[0]->next_buffer);
- }
- }
-
- if (use_va_dma)
- d[0].qword[0] = vlib_buffer_get_current_va (b[0]);
- else
- d[0].qword[0] = vlib_buffer_get_current_pa (vm, b[0]);
-
- d[0].qword[1] =
- (((u64) b[0]->current_length) << 34) | bits |
- one_by_one_offload_flags;
-
- next += 1;
- n_desc += 1;
- buffers += 1;
- n_packets_left -= 1;
- n_desc_left -= 1;
- d = txq->descs + (next & mask);
- }
- }
-
- if ((slot = clib_ring_enq (txq->rs_slots)))
- {
- u16 rs_slot = slot[0] = (next - 1) & mask;
- d = txq->descs + rs_slot;
- d[0].qword[1] |= AVF_TXD_CMD_RS;
- }
-
- txq->next = next & mask;
- avf_tail_write (txq->qtx_tail, txq->next);
- txq->n_enqueued += n_desc;