l2: convert l2_patch to new multiarch scheme 40/15740/2
authorDamjan Marion <damarion@cisco.com>
Tue, 6 Nov 2018 12:33:27 +0000 (13:33 +0100)
committerDamjan Marion <dmarion@me.com>
Wed, 7 Nov 2018 12:00:55 +0000 (12:00 +0000)
Change-Id: I30487bd736407378fb5a6d313e4eef12bbb262b8
Signed-off-by: Damjan Marion <damarion@cisco.com>
src/vnet/CMakeLists.txt
src/vnet/l2/l2_patch.c

index 6d26b5a..708e56d 100644 (file)
@@ -164,6 +164,7 @@ list(APPEND VNET_MULTIARCH_SOURCES
   l2/l2_fwd.c
   l2/l2_learn.c
   l2/l2_output.c
+  l2/l2_patch.c
 )
 
 list(APPEND VNET_HEADERS
index 83e14a7..e2d2a67 100644 (file)
@@ -77,182 +77,126 @@ typedef enum
   L2_PATCH_N_NEXT,
 } l2_patch_next_t;
 
-static uword
-l2_patch_node_fn (vlib_main_t * vm,
-                 vlib_node_runtime_t * node, vlib_frame_t * frame)
+static_always_inline void
+l2_patch_trace (vlib_main_t * vm, vlib_node_runtime_t * node,
+               l2_patch_main_t * l2pm, vlib_buffer_t * b, u32 sw_if_index)
 {
-  u32 n_left_from, *from, *to_next;
-  l2_patch_next_t next_index;
-  l2_patch_main_t *l2pm = &l2_patch_main;
-  vlib_node_t *n = vlib_get_node (vm, l2_patch_node.index);
-  u32 node_counter_base_index = n->error_heap_index;
-  vlib_error_main_t *em = &vm->error_main;
+  l2_patch_trace_t *t;
 
-  from = vlib_frame_vector_args (frame);
-  n_left_from = frame->n_vectors;
-  next_index = node->cached_next_index;
+  if ((b->flags & VLIB_BUFFER_IS_TRACED) == 0)
+    return;
 
-  while (n_left_from > 0)
-    {
-      u32 n_left_to_next;
+  t = vlib_add_trace (vm, node, b, sizeof (*t));
+  t->rx_sw_if_index = sw_if_index;
+  t->tx_sw_if_index = l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index];
+}
 
-      vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
+static_always_inline void
+l2_patch_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
+                l2_patch_main_t * l2pm, vlib_buffer_t ** b, u16 * next,
+                u32 n_left, int do_trace)
+{
+  u32 sw_if_index[4];
 
-      while (n_left_from >= 8 && n_left_to_next >= 4)
+  while (n_left >= 4)
+    {
+      /* Prefetch next iteration. */
+      if (n_left >= 8)
        {
-         u32 bi0, bi1, bi2, bi3;
-         vlib_buffer_t *b0, *b1, *b2, *b3;
-         u32 next0, next1, next2, next3;
-         u32 sw_if_index0, sw_if_index1, sw_if_index2, sw_if_index3;
-
-         /* Prefetch next iteration. */
-         {
-           vlib_buffer_t *p4, *p5, *p6, *p7;
-
-           p4 = vlib_get_buffer (vm, from[4]);
-           p5 = vlib_get_buffer (vm, from[5]);
-           p6 = vlib_get_buffer (vm, from[6]);
-           p7 = vlib_get_buffer (vm, from[7]);
-
-           vlib_prefetch_buffer_header (p4, LOAD);
-           vlib_prefetch_buffer_header (p5, LOAD);
-           vlib_prefetch_buffer_header (p6, LOAD);
-           vlib_prefetch_buffer_header (p7, LOAD);
-         }
-
-         /* speculatively enqueue b0 and b1 to the current next frame */
-         to_next[0] = bi0 = from[0];
-         to_next[1] = bi1 = from[1];
-         to_next[2] = bi2 = from[2];
-         to_next[3] = bi3 = from[3];
-         from += 4;
-         to_next += 4;
-         n_left_from -= 4;
-         n_left_to_next -= 4;
-
-         b0 = vlib_get_buffer (vm, bi0);
-         b1 = vlib_get_buffer (vm, bi1);
-         b2 = vlib_get_buffer (vm, bi2);
-         b3 = vlib_get_buffer (vm, bi3);
-
-         sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
-         sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
-         sw_if_index2 = vnet_buffer (b2)->sw_if_index[VLIB_RX];
-         sw_if_index3 = vnet_buffer (b3)->sw_if_index[VLIB_RX];
-
-         ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index0] != ~0);
-         ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index0] != ~0);
-         ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index1] != ~0);
-         ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index1] != ~0);
-         ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index2] != ~0);
-         ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index2] != ~0);
-         ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index3] != ~0);
-         ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index3] != ~0);
-
-         next0 = l2pm->tx_next_by_rx_sw_if_index[sw_if_index0];
-         next1 = l2pm->tx_next_by_rx_sw_if_index[sw_if_index1];
-         next2 = l2pm->tx_next_by_rx_sw_if_index[sw_if_index2];
-         next3 = l2pm->tx_next_by_rx_sw_if_index[sw_if_index3];
-
-         vnet_buffer (b0)->sw_if_index[VLIB_TX] =
-           l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index0];
-         vnet_buffer (b1)->sw_if_index[VLIB_TX] =
-           l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index1];
-         vnet_buffer (b2)->sw_if_index[VLIB_TX] =
-           l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index2];
-         vnet_buffer (b3)->sw_if_index[VLIB_TX] =
-           l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index3];
-
-         if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
-           {
-             if (b0->flags & VLIB_BUFFER_IS_TRACED)
-               {
-                 l2_patch_trace_t *t =
-                   vlib_add_trace (vm, node, b0, sizeof (*t));
-                 t->rx_sw_if_index = sw_if_index0;
-                 t->tx_sw_if_index =
-                   l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index0];
-               }
-             if (b1->flags & VLIB_BUFFER_IS_TRACED)
-               {
-                 l2_patch_trace_t *t =
-                   vlib_add_trace (vm, node, b1, sizeof (*t));
-                 t->rx_sw_if_index = sw_if_index1;
-                 t->tx_sw_if_index =
-                   l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index1];
-               }
-             if (b2->flags & VLIB_BUFFER_IS_TRACED)
-               {
-                 l2_patch_trace_t *t =
-                   vlib_add_trace (vm, node, b2, sizeof (*t));
-                 t->rx_sw_if_index = sw_if_index2;
-                 t->tx_sw_if_index =
-                   l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index2];
-               }
-             if (b3->flags & VLIB_BUFFER_IS_TRACED)
-               {
-                 l2_patch_trace_t *t =
-                   vlib_add_trace (vm, node, b3, sizeof (*t));
-                 t->rx_sw_if_index = sw_if_index3;
-                 t->tx_sw_if_index =
-                   l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index3];
-               }
-           }
-
-         /* verify speculative enqueues, maybe switch current next frame */
-         vlib_validate_buffer_enqueue_x4 (vm, node, next_index,
-                                          to_next, n_left_to_next,
-                                          bi0, bi1, bi2, bi3,
-                                          next0, next1, next2, next3);
+         vlib_buffer_t **p = b + 4;
+         vlib_prefetch_buffer_header (p[0], LOAD);
+         vlib_prefetch_buffer_header (p[1], LOAD);
+         vlib_prefetch_buffer_header (p[2], LOAD);
+         vlib_prefetch_buffer_header (p[3], LOAD);
        }
 
-      while (n_left_from > 0 && n_left_to_next > 0)
+      sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
+      sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
+      sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
+      sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_RX];
+
+      ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index[0]] != ~0);
+      ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[0]] != ~0);
+      ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index[1]] != ~0);
+      ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[1]] != ~0);
+      ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index[2]] != ~0);
+      ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[2]] != ~0);
+      ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index[3]] != ~0);
+      ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[3]] != ~0);
+
+      next[0] = l2pm->tx_next_by_rx_sw_if_index[sw_if_index[0]];
+      next[1] = l2pm->tx_next_by_rx_sw_if_index[sw_if_index[1]];
+      next[2] = l2pm->tx_next_by_rx_sw_if_index[sw_if_index[2]];
+      next[3] = l2pm->tx_next_by_rx_sw_if_index[sw_if_index[3]];
+
+      vnet_buffer (b[0])->sw_if_index[VLIB_TX] =
+       l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[0]];
+      vnet_buffer (b[1])->sw_if_index[VLIB_TX] =
+       l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[1]];
+      vnet_buffer (b[2])->sw_if_index[VLIB_TX] =
+       l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[2]];
+      vnet_buffer (b[3])->sw_if_index[VLIB_TX] =
+       l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[3]];
+
+      if (do_trace)
        {
-         u32 bi0;
-         vlib_buffer_t *b0;
-         u32 next0;
-         u32 sw_if_index0;
-
-         /* speculatively enqueue b0 to the current next frame */
-         bi0 = from[0];
-         to_next[0] = bi0;
-         from += 1;
-         to_next += 1;
-         n_left_from -= 1;
-         n_left_to_next -= 1;
-
-         b0 = vlib_get_buffer (vm, bi0);
-
-         sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
-
-         ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index0] != ~0);
-         ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index0] != ~0);
-
-         next0 = l2pm->tx_next_by_rx_sw_if_index[sw_if_index0];
-         vnet_buffer (b0)->sw_if_index[VLIB_TX] =
-           l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index0];
-
-         if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
-           {
-             if (b0->flags & VLIB_BUFFER_IS_TRACED)
-               {
-                 l2_patch_trace_t *t =
-                   vlib_add_trace (vm, node, b0, sizeof (*t));
-                 t->rx_sw_if_index = sw_if_index0;
-                 t->tx_sw_if_index =
-                   l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index0];
-               }
-           }
-
-         /* verify speculative enqueue, maybe switch current next frame */
-         vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
-                                          to_next, n_left_to_next,
-                                          bi0, next0);
+         l2_patch_trace (vm, node, l2pm, b[0], sw_if_index[0]);
+         l2_patch_trace (vm, node, l2pm, b[1], sw_if_index[1]);
+         l2_patch_trace (vm, node, l2pm, b[2], sw_if_index[2]);
+         l2_patch_trace (vm, node, l2pm, b[3], sw_if_index[3]);
        }
 
-      vlib_put_next_frame (vm, node, next_index, n_left_to_next);
+      /* next */
+      next += 4;
+      b += 4;
+      n_left -= 4;
     }
 
+  while (n_left)
+    {
+      sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
+
+      ASSERT (l2pm->tx_next_by_rx_sw_if_index[sw_if_index[0]] != ~0);
+      ASSERT (l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[0]] != ~0);
+
+      next[0] = l2pm->tx_next_by_rx_sw_if_index[sw_if_index[0]];
+
+      vnet_buffer (b[0])->sw_if_index[VLIB_TX] =
+       l2pm->tx_sw_if_index_by_rx_sw_if_index[sw_if_index[0]];
+
+      if (do_trace)
+       l2_patch_trace (vm, node, l2pm, b[0], sw_if_index[0]);
+
+      /* next */
+      next += 1;
+      b += 1;
+      n_left -= 1;
+    }
+}
+
+VLIB_NODE_FN (l2_patch_node) (vlib_main_t * vm,
+                             vlib_node_runtime_t * node,
+                             vlib_frame_t * frame)
+{
+  u32 *from;
+  l2_patch_main_t *l2pm = &l2_patch_main;
+  vlib_node_t *n = vlib_get_node (vm, l2_patch_node.index);
+  u32 node_counter_base_index = n->error_heap_index;
+  vlib_error_main_t *em = &vm->error_main;
+  vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
+  u16 nexts[VLIB_FRAME_SIZE];
+
+  from = vlib_frame_vector_args (frame);
+
+  vlib_get_buffers (vm, from, bufs, frame->n_vectors);
+
+  if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
+    l2_patch_inline (vm, node, l2pm, bufs, nexts, frame->n_vectors, 1);
+  else
+    l2_patch_inline (vm, node, l2pm, bufs, nexts, frame->n_vectors, 0);
+
+  vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
+
   em->counters[node_counter_base_index + L2_PATCH_ERROR_PATCHED] +=
     frame->n_vectors;
 
@@ -261,7 +205,6 @@ l2_patch_node_fn (vlib_main_t * vm,
 
 /* *INDENT-OFF* */
 VLIB_REGISTER_NODE (l2_patch_node, static) = {
-  .function = l2_patch_node_fn,
   .name = "l2-patch",
   .vector_size = sizeof (u32),
   .format_trace = format_l2_patch_trace,
@@ -279,9 +222,11 @@ VLIB_REGISTER_NODE (l2_patch_node, static) = {
 };
 /* *INDENT-ON* */
 
-VLIB_NODE_FUNCTION_MULTIARCH (l2_patch_node, l2_patch_node_fn)
-     int vnet_l2_patch_add_del (u32 rx_sw_if_index, u32 tx_sw_if_index,
-                               int is_add)
+extern int
+vnet_l2_patch_add_del (u32 rx_sw_if_index, u32 tx_sw_if_index, int is_add);
+#ifndef CLIB_MARCH_VARIANT
+int
+vnet_l2_patch_add_del (u32 rx_sw_if_index, u32 tx_sw_if_index, int is_add)
 {
   l2_patch_main_t *l2pm = &l2_patch_main;
   vnet_hw_interface_t *rxhi, *txhi;
@@ -339,6 +284,7 @@ VLIB_NODE_FUNCTION_MULTIARCH (l2_patch_node, l2_patch_node_fn)
 
   return 0;
 }
+#endif
 
 static clib_error_t *
 test_patch_command_fn (vlib_main_t * vm,
@@ -480,7 +426,7 @@ VLIB_CLI_COMMAND (show_l2patch_cli, static) = {
 };
 /* *INDENT-ON* */
 
-clib_error_t *
+static clib_error_t *
 l2_patch_init (vlib_main_t * vm)
 {
   l2_patch_main_t *mp = &l2_patch_main;