+static_always_inline void
+eth_input_adv_and_flags_x1 (vlib_buffer_t ** b, i16 adv, u32 flags, int is_l3)
+{
+ vnet_buffer (b[0])->l2_hdr_offset = b[0]->current_data;
+ vnet_buffer (b[0])->l3_hdr_offset = b[0]->current_data + adv;
+
+ if (is_l3)
+ vlib_buffer_advance (b[0], adv);
+ b[0]->flags |= flags;
+ if (!is_l3)
+ vnet_buffer (b[0])->l2.l2_len = adv;
+}
+
+static_always_inline void
+eth_input_process_frame (vlib_main_t * vm, u32 * from, u16 * etype,
+ u32 n_left, int is_l3)
+{
+ vlib_buffer_t *b[16];
+ ethernet_header_t *e;
+ int adv = sizeof (ethernet_header_t);
+
+ u32 flags = VNET_BUFFER_F_L2_HDR_OFFSET_VALID |
+ VNET_BUFFER_F_L3_HDR_OFFSET_VALID;
+
+ while (n_left >= 16)
+ {
+ vlib_buffer_t **ph = b + 12, **pd = b + 8;
+ vlib_get_buffers (vm, from, b, 4);
+ vlib_get_buffers (vm, from + 8, b + 8, 8);
+
+ vlib_prefetch_buffer_header (ph[0], LOAD);
+ vlib_prefetch_buffer_data (pd[0], LOAD);
+ e = vlib_buffer_get_current (b[0]);
+ etype[0] = e->type;
+
+ vlib_prefetch_buffer_header (ph[1], LOAD);
+ vlib_prefetch_buffer_data (pd[1], LOAD);
+ e = vlib_buffer_get_current (b[1]);
+ etype[1] = e->type;
+
+ vlib_prefetch_buffer_header (ph[2], LOAD);
+ vlib_prefetch_buffer_data (pd[2], LOAD);
+ e = vlib_buffer_get_current (b[2]);
+ etype[2] = e->type;
+
+ vlib_prefetch_buffer_header (ph[3], LOAD);
+ vlib_prefetch_buffer_data (pd[3], LOAD);
+ e = vlib_buffer_get_current (b[3]);
+ etype[3] = e->type;
+
+ eth_input_adv_and_flags_x4 (b, adv, flags, is_l3);
+
+ /* next */
+ n_left -= 4;
+ etype += 4;
+ from += 4;
+ }
+ while (n_left >= 4)
+ {
+ vlib_get_buffers (vm, from, b, 4);
+
+ e = vlib_buffer_get_current (b[0]);
+ etype[0] = e->type;
+
+ e = vlib_buffer_get_current (b[1]);
+ etype[1] = e->type;
+
+ e = vlib_buffer_get_current (b[2]);
+ etype[2] = e->type;
+
+ e = vlib_buffer_get_current (b[3]);
+ etype[3] = e->type;
+
+ eth_input_adv_and_flags_x4 (b, adv, flags, is_l3);
+
+ /* next */
+ n_left -= 4;
+ etype += 4;
+ from += 4;
+ }
+ while (n_left)
+ {
+ vlib_get_buffers (vm, from, b, 1);
+
+ e = vlib_buffer_get_current (b[0]);
+ etype[0] = e->type;
+
+ eth_input_adv_and_flags_x1 (b, adv, flags, is_l3);
+
+ /* next */
+ n_left -= 1;
+ etype += 1;
+ from += 1;
+ }
+}
+
+static_always_inline void
+eth_input_sort (vlib_main_t * vm, u32 * from, u32 n_packets,
+ eth_input_data_t * d)
+{
+ u16 *etype = d->etypes;
+ i32 n_left = n_packets;
+
+#if defined (CLIB_HAVE_VEC256)
+ u16x16 e16;
+ u16x16 et16_ip4 = u16x16_splat (clib_host_to_net_u16 (ETHERNET_TYPE_IP4));
+ u16x16 et16_ip6 = u16x16_splat (clib_host_to_net_u16 (ETHERNET_TYPE_IP6));
+ u16x16 et16_mpls = u16x16_splat (clib_host_to_net_u16 (ETHERNET_TYPE_MPLS));
+ u16x16 id16_ip4 = u16x16_splat (ETYPE_ID_IP4);
+ u16x16 id16_ip6 = u16x16_splat (ETYPE_ID_IP6);
+ u16x16 id16_mpls = u16x16_splat (ETYPE_ID_MPLS);
+
+ while (n_left > 0)
+ {
+ u16x16 r = { 0 };
+ e16 = u16x16_load_unaligned (etype);
+ r += (e16 == et16_ip4) & id16_ip4;
+ r += (e16 == et16_ip6) & id16_ip6;
+ r += (e16 == et16_mpls) & id16_mpls;
+ u16x16_store_unaligned (r, etype);
+ etype += 16;
+ n_left -= 16;
+ }
+#elif defined (CLIB_HAVE_VEC128)
+ u16x8 e8;
+ u16x8 et8_ip4 = u16x8_splat (clib_host_to_net_u16 (ETHERNET_TYPE_IP4));
+ u16x8 et8_ip6 = u16x8_splat (clib_host_to_net_u16 (ETHERNET_TYPE_IP6));
+ u16x8 et8_mpls = u16x8_splat (clib_host_to_net_u16 (ETHERNET_TYPE_MPLS));
+ u16x8 id8_ip4 = u16x8_splat (ETYPE_ID_IP4);
+ u16x8 id8_ip6 = u16x8_splat (ETYPE_ID_IP6);
+ u16x8 id8_mpls = u16x8_splat (ETYPE_ID_MPLS);
+
+ while (n_left > 0)
+ {
+ u16x8 r = { 0 };
+ e8 = u16x8_load_unaligned (etype);
+ r += (e8 == et8_ip4) & id8_ip4;
+ r += (e8 == et8_ip6) & id8_ip6;
+ r += (e8 == et8_mpls) & id8_mpls;
+ u16x8_store_unaligned (r, etype);
+ etype += 8;
+ n_left -= 8;
+ }
+#else
+ while (n_left)
+ {
+ if (etype[0] == ETHERNET_TYPE_IP4)
+ etype[0] = ETYPE_ID_IP4;
+ else if (etype[0] == ETHERNET_TYPE_IP6)
+ etype[0] = ETYPE_ID_IP6;
+ else if (etype[0] == ETHERNET_TYPE_MPLS)
+ etype[0] = ETYPE_ID_MPLS;
+ else
+ etype[0] = ETYPE_ID_UNKNOWN;
+
+ etype += 1;
+ n_left -= 1;
+ }
+#endif
+
+ etype = d->etypes;
+ n_left = n_packets;
+
+ clib_memset_u16 (d->n_bufs_by_etype, 0, ETYPE_N_IDS);
+ while (n_left)
+ {
+ u16 x, y;
+ x = etype[0];
+ y = d->n_bufs_by_etype[x];
+
+#ifdef CLIB_HAVE_VEC256
+ if (n_left >= 16 && u16x16_is_all_equal (u16x16_load_unaligned (etype),
+ etype[0]))
+ {
+ clib_memcpy_fast (&d->bufs_by_etype[x][y], from, 16 * sizeof (u32));
+ d->n_bufs_by_etype[x] += 16;
+
+ /* next */
+ n_left -= 16;
+ etype += 16;
+ from += 16;
+ continue;
+ }
+#endif
+#ifdef CLIB_HAVE_VEC128
+ if (n_left >= 8 && u16x8_is_all_equal (u16x8_load_unaligned (etype),
+ etype[0]))
+ {
+ clib_memcpy_fast (&d->bufs_by_etype[x][y], from, 8 * sizeof (u32));
+ d->n_bufs_by_etype[x] += 8;
+
+ /* next */
+ n_left -= 8;
+ etype += 8;
+ from += 8;
+ continue;
+ }
+#endif
+ d->bufs_by_etype[x][y] = from[0];
+ d->n_bufs_by_etype[x]++;
+
+ /* next */
+ n_left -= 1;
+ etype += 1;
+ from += 1;
+ }
+}
+
+static_always_inline void
+ethernet_input_trace (vlib_main_t * vm, vlib_node_runtime_t * node,
+ vlib_frame_t * from_frame)
+{
+ u32 *from, n_left;
+ if ((node->flags & VLIB_NODE_FLAG_TRACE) == 0)
+ return;
+
+ from = vlib_frame_vector_args (from_frame);
+ n_left = from_frame->n_vectors;
+
+ while (n_left)
+ {
+ ethernet_input_trace_t *t0;
+ vlib_buffer_t *b0 = vlib_get_buffer (vm, from[0]);
+
+ if (b0->flags & VLIB_BUFFER_IS_TRACED)
+ {
+ t0 = vlib_add_trace (vm, node, b0, sizeof (ethernet_input_trace_t));
+ clib_memcpy_fast (t0->packet_data, b0->data + b0->current_data,
+ sizeof (t0->packet_data));
+ t0->frame_flags = from_frame->flags;
+ clib_memcpy_fast (&t0->frame_data,
+ vlib_frame_scalar_args (from_frame),
+ sizeof (ethernet_input_frame_t));
+ }
+ from += 1;
+ n_left -= 1;
+ }
+}
+
+static_always_inline void