#include <vnet/udp/udp_packet.h>
#include <vnet/devices/virtio/virtio.h>
+#define VIRTIO_TX_MAX_CHAIN_LEN 127
+
#define foreach_virtio_tx_func_error \
_(NO_FREE_SLOTS, "no free tx slots") \
_(TRUNC_PACKET, "packet > buffer size -- truncated in tx ring") \
virtio_tx_trace_t *t = va_arg (*va, virtio_tx_trace_t *);
u32 indent = format_get_indent (s);
- s = format (s, "%Ubuffer 0x%x: %U\n",
- format_white_space, indent,
- t->buffer_index, format_vnet_buffer, &t->buffer);
+ s = format (s, "%Ubuffer 0x%x: %U\n", format_white_space, indent,
+ t->buffer_index, format_vnet_buffer_no_chain, &t->buffer);
s =
format (s, "%U%U\n", format_white_space, indent,
format_generic_header_offset, &t->gho);
}
static void
-virtio_interface_drop_inline (vlib_main_t *vm, uword node_index, u32 *buffers,
- u16 n, virtio_tx_func_error_t error)
+virtio_interface_drop_inline (vlib_main_t *vm, virtio_if_t *vif,
+ uword node_index, u32 *buffers, u16 n,
+ virtio_tx_func_error_t error)
{
vlib_error_count (vm, node_index, error, n);
+ vlib_increment_simple_counter (vnet_main.interface_main.sw_if_counters +
+ VNET_INTERFACE_COUNTER_DROP,
+ vm->thread_index, vif->sw_if_index, n);
vlib_buffer_free (vm, buffers, n);
}
set_checksum_offsets (vlib_buffer_t *b, virtio_net_hdr_v1_t *hdr,
const int is_l2)
{
- u32 oflags = vnet_buffer2 (b)->oflags;
+ vnet_buffer_oflags_t oflags = vnet_buffer (b)->oflags;
if (b->flags & VNET_BUFFER_F_IS_IP4)
{
static void
set_gso_offsets (vlib_buffer_t *b, virtio_net_hdr_v1_t *hdr, const int is_l2)
{
- u32 oflags = vnet_buffer2 (b)->oflags;
+ vnet_buffer_oflags_t oflags = vnet_buffer (b)->oflags;
if (b->flags & VNET_BUFFER_F_IS_IP4)
{
static u16
add_buffer_to_slot (vlib_main_t *vm, vlib_node_runtime_t *node,
- virtio_vring_t *vring, u32 bi, u16 free_desc_count,
- u16 avail, u16 next, u16 mask, int hdr_sz, int do_gso,
- int csum_offload, int is_pci, int is_tun, int is_indirect,
- int is_any_layout)
+ virtio_if_t *vif, virtio_vring_t *vring, u32 bi,
+ u16 free_desc_count, u16 avail, u16 next, u16 mask,
+ int hdr_sz, int do_gso, int csum_offload, int is_pci,
+ int is_tun, int is_indirect, int is_any_layout)
{
u16 n_added = 0;
vring_desc_t *d;
b = vlib_get_buffer (vm, b->next_buffer);
id->addr = vlib_buffer_get_current_pa (vm, b);
id->len = b->current_length;
+ if (PREDICT_FALSE (count == VIRTIO_TX_MAX_CHAIN_LEN))
+ {
+ if (b->flags & VLIB_BUFFER_NEXT_PRESENT)
+ vlib_error_count (vm, node->node_index,
+ VIRTIO_TX_ERROR_TRUNC_PACKET, 1);
+ break;
+ }
}
}
else /* VIRTIO_IF_TYPE_[TAP | TUN] */
b = vlib_get_buffer (vm, b->next_buffer);
id->addr = pointer_to_uword (vlib_buffer_get_current (b));
id->len = b->current_length;
+ if (PREDICT_FALSE (count == VIRTIO_TX_MAX_CHAIN_LEN))
+ {
+ if (b->flags & VLIB_BUFFER_NEXT_PRESENT)
+ vlib_error_count (vm, node->node_index,
+ VIRTIO_TX_ERROR_TRUNC_PACKET, 1);
+ break;
+ }
}
}
id->flags = 0;
done:
if (drop_inline != ~0)
- virtio_interface_drop_inline (vm, node->node_index, &bi, 1, drop_inline);
+ virtio_interface_drop_inline (vm, vif, node->node_index, &bi, 1,
+ drop_inline);
return n_added;
}
static u16
add_buffer_to_slot_packed (vlib_main_t *vm, vlib_node_runtime_t *node,
- virtio_vring_t *vring, u32 bi, u16 next, int hdr_sz,
- int do_gso, int csum_offload, int is_pci,
- int is_tun, int is_indirect, int is_any_layout)
+ virtio_if_t *vif, virtio_vring_t *vring, u32 bi,
+ u16 next, int hdr_sz, int do_gso, int csum_offload,
+ int is_pci, int is_tun, int is_indirect,
+ int is_any_layout)
{
u16 n_added = 0, flags = 0;
int is_l2 = !is_tun;
b = vlib_get_buffer (vm, b->next_buffer);
id->addr = vlib_buffer_get_current_pa (vm, b);
id->len = b->current_length;
+ if (PREDICT_FALSE (count == VIRTIO_TX_MAX_CHAIN_LEN))
+ {
+ if (b->flags & VLIB_BUFFER_NEXT_PRESENT)
+ vlib_error_count (vm, node->node_index,
+ VIRTIO_TX_ERROR_TRUNC_PACKET, 1);
+ break;
+ }
}
}
id->flags = 0;
done:
if (drop_inline != ~0)
- virtio_interface_drop_inline (vm, node->node_index, &bi, 1, drop_inline);
+ virtio_interface_drop_inline (vm, vif, node->node_index, &bi, 1,
+ drop_inline);
return n_added;
}
u32 bi = virtio_vring_buffering_read_from_front (vring->buffering);
if (bi == ~0)
break;
- n_added = add_buffer_to_slot_packed (vm, node,
- vring, bi, next,
- hdr_sz, do_gso, csum_offload,
- is_pci, is_tun, is_indirect,
- is_any_layout);
+ n_added = add_buffer_to_slot_packed (
+ vm, node, vif, vring, bi, next, hdr_sz, do_gso, csum_offload,
+ is_pci, is_tun, is_indirect, is_any_layout);
n_buffers_left--;
if (PREDICT_FALSE (n_added == 0))
continue;
{
u16 n_added = 0;
- n_added = add_buffer_to_slot_packed (vm, node,
- vring, buffers[0], next,
- hdr_sz, do_gso, csum_offload,
- is_pci, is_tun, is_indirect,
- is_any_layout);
+ n_added = add_buffer_to_slot_packed (
+ vm, node, vif, vring, buffers[0], next, hdr_sz, do_gso, csum_offload,
+ is_pci, is_tun, is_indirect, is_any_layout);
buffers++;
n_left--;
if (PREDICT_FALSE (n_added == 0))
if (bi == ~0)
break;
- n_added = add_buffer_to_slot (vm, node, vring, bi, free_desc_count,
- avail, next, mask, hdr_sz, do_gso,
- csum_offload, is_pci, is_tun,
- is_indirect, is_any_layout);
+ n_added = add_buffer_to_slot (vm, node, vif, vring, bi,
+ free_desc_count, avail, next, mask,
+ hdr_sz, do_gso, csum_offload, is_pci,
+ is_tun, is_indirect, is_any_layout);
if (PREDICT_FALSE (n_added == 0))
{
n_buffers_left--;
{
u16 n_added = 0;
- n_added = add_buffer_to_slot (vm, node, vring, buffers[0],
- free_desc_count, avail, next, mask,
- hdr_sz, do_gso, csum_offload, is_pci,
- is_tun, is_indirect, is_any_layout);
+ n_added =
+ add_buffer_to_slot (vm, node, vif, vring, buffers[0], free_desc_count,
+ avail, next, mask, hdr_sz, do_gso, csum_offload,
+ is_pci, is_tun, is_indirect, is_any_layout);
if (PREDICT_FALSE (n_added == 0))
{
n_left -= n_buffered;
}
if (n_left)
- virtio_interface_drop_inline (vm, node->node_index,
+ virtio_interface_drop_inline (vm, vif, node->node_index,
&buffers[n_vectors - n_left], n_left,
VIRTIO_TX_ERROR_NO_FREE_SLOTS);