matching a local VTEP address */
vtep6_key_t last_vtep6; /* last IPv6 address / fib index
matching a local VTEP address */
+ vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
+#ifdef CLIB_HAVE_VEC512
+ vtep4_cache_t vtep4_u512;
+ clib_memset (&vtep4_u512, 0, sizeof (vtep4_u512));
+#endif
from = vlib_frame_vector_args (frame);
n_left_from = frame->n_vectors;
next_index = node->cached_next_index;
+ vlib_get_buffers (vm, from, bufs, n_left_from);
if (node->flags & VLIB_NODE_FLAG_TRACE)
ip4_forward_next_trace (vm, node, frame, VLIB_TX);
/* Prefetch next iteration. */
{
- vlib_buffer_t * p2, * p3;
+ vlib_prefetch_buffer_header (b[2], LOAD);
+ vlib_prefetch_buffer_header (b[3], LOAD);
- p2 = vlib_get_buffer (vm, from[2]);
- p3 = vlib_get_buffer (vm, from[3]);
-
- vlib_prefetch_buffer_header (p2, LOAD);
- vlib_prefetch_buffer_header (p3, LOAD);
-
- CLIB_PREFETCH (p2->data, 2*CLIB_CACHE_LINE_BYTES, LOAD);
- CLIB_PREFETCH (p3->data, 2*CLIB_CACHE_LINE_BYTES, LOAD);
+ CLIB_PREFETCH (b[2]->data, 2*CLIB_CACHE_LINE_BYTES, LOAD);
+ CLIB_PREFETCH (b[3]->data, 2*CLIB_CACHE_LINE_BYTES, LOAD);
}
bi0 = to_next[0] = from[0];
to_next += 2;
n_left_to_next -= 2;
- b0 = vlib_get_buffer (vm, bi0);
- b1 = vlib_get_buffer (vm, bi1);
+ b0 = b[0];
+ b1 = b[1];
+ b += 2;
if (is_ip4)
{
ip40 = vlib_buffer_get_current (b0);
/* Validate DIP against VTEPs*/
if (is_ip4)
{
+#ifdef CLIB_HAVE_VEC512
+ if (!vtep4_check_vector
+ (>m->vtep_table, b0, ip40, &last_vtep4, &vtep4_u512))
+#else
if (!vtep4_check (>m->vtep_table, b0, ip40, &last_vtep4))
+#endif
goto exit0; /* no local VTEP for GTPU packet */
}
else
/* Validate DIP against VTEPs*/
if (is_ip4)
{
+#ifdef CLIB_HAVE_VEC512
+ if (!vtep4_check_vector
+ (>m->vtep_table, b1, ip41, &last_vtep4, &vtep4_u512))
+#else
if (!vtep4_check (>m->vtep_table, b1, ip41, &last_vtep4))
+#endif
goto exit1; /* no local VTEP for GTPU packet */
}
else
to_next += 1;
n_left_to_next -= 1;
- b0 = vlib_get_buffer (vm, bi0);
+ b0 = b[0];
+ b++;
if (is_ip4)
ip40 = vlib_buffer_get_current (b0);
else
/* Validate DIP against VTEPs*/
if (is_ip4)
{
+#ifdef CLIB_HAVE_VEC512
+ if (!vtep4_check_vector
+ (>m->vtep_table, b0, ip40, &last_vtep4, &vtep4_u512))
+#else
if (!vtep4_check (>m->vtep_table, b0, ip40, &last_vtep4))
+#endif
goto exit; /* no local VTEP for GTPU packet */
}
else
#define foreach_gtpu_flow_error \
_(NONE, "no error") \
+ _(PAYLOAD_ERROR, "Payload type errors") \
_(IP_CHECKSUM_ERROR, "Rx ip checksum errors") \
_(IP_HEADER_ERROR, "Rx ip header errors") \
_(UDP_CHECKSUM_ERROR, "Rx udp checksum errors") \
/* Pop gtpu header */
vlib_buffer_advance (b0, gtpu_hdr_len0);
- next0 = GTPU_INPUT_NEXT_IP4_INPUT;
+ /* assign the next node */
+ if (PREDICT_FALSE (t0->decap_next_index != GTPU_INPUT_NEXT_IP4_INPUT) &&
+ (t0->decap_next_index != GTPU_INPUT_NEXT_IP6_INPUT))
+ {
+ error0 = GTPU_FLOW_ERROR_PAYLOAD_ERROR;
+ next0 = GTPU_INPUT_NEXT_DROP;
+ goto trace0;
+ }
+ next0 = t0->decap_next_index;
+
sw_if_index0 = t0->sw_if_index;
/* Set packet input sw_if_index to unicast GTPU tunnel for learning */
/* Pop gtpu header */
vlib_buffer_advance (b1, gtpu_hdr_len1);
- next1 = GTPU_INPUT_NEXT_IP4_INPUT;
+ /* assign the next node */
+ if (PREDICT_FALSE (t1->decap_next_index != GTPU_INPUT_NEXT_IP4_INPUT) &&
+ (t1->decap_next_index != GTPU_INPUT_NEXT_IP6_INPUT))
+ {
+ error1 = GTPU_FLOW_ERROR_PAYLOAD_ERROR;
+ next1 = GTPU_INPUT_NEXT_DROP;
+ goto trace1;
+ }
+ next1 = t1->decap_next_index;
+
sw_if_index1 = t1->sw_if_index;
/* Required to make the l2 tag push / pop code work on l2 subifs */
/* Pop gtpu header */
vlib_buffer_advance (b0, gtpu_hdr_len0);
- next0 = GTPU_INPUT_NEXT_IP4_INPUT;
+ /* assign the next node */
+ if (PREDICT_FALSE (t0->decap_next_index != GTPU_INPUT_NEXT_IP4_INPUT) &&
+ (t0->decap_next_index != GTPU_INPUT_NEXT_IP6_INPUT))
+ {
+ error0 = GTPU_FLOW_ERROR_PAYLOAD_ERROR;
+ next0 = GTPU_INPUT_NEXT_DROP;
+ goto trace00;
+ }
+ next0 = t0->decap_next_index;
+
sw_if_index0 = t0->sw_if_index;
/* Set packet input sw_if_index to unicast GTPU tunnel for learning */