2 * Copyright (c) 2016 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #include <vlib/vlib.h>
17 #include <vnet/vnet.h>
18 #include <vppinfra/error.h>
20 #include <vnet/span/span.h>
21 #include <vnet/l2/l2_input.h>
22 #include <vnet/l2/l2_output.h>
23 #include <vnet/l2/feat_bitmap.h>
25 #include <vppinfra/error.h>
26 #include <vppinfra/elog.h>
28 /* packet trace format function */
30 format_span_trace (u8 * s, va_list * args)
32 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
33 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
34 span_trace_t *t = va_arg (*args, span_trace_t *);
36 vnet_main_t *vnm = vnet_get_main ();
37 s = format (s, "SPAN: mirrored %U -> %U",
38 format_vnet_sw_if_index_name, vnm, t->src_sw_if_index,
39 format_vnet_sw_if_index_name, vnm, t->mirror_sw_if_index);
44 #define foreach_span_error \
45 _(HITS, "SPAN incoming packets processed")
49 #define _(sym,str) SPAN_ERROR_##sym,
55 static char *span_error_strings[] = {
56 #define _(sym,string) string,
61 static_always_inline void
62 span_mirror (vlib_main_t * vm, vlib_node_runtime_t * node, u32 sw_if_index0,
63 vlib_buffer_t * b0, vlib_frame_t ** mirror_frames,
64 vlib_rx_or_tx_t rxtx, span_feat_t sf)
67 span_main_t *sm = &span_main;
68 vnet_main_t *vnm = vnet_get_main ();
69 u32 *to_mirror_next = 0;
71 span_interface_t *si0;
74 if (sw_if_index0 >= vec_len (sm->interfaces))
77 si0 = vec_elt_at_index (sm->interfaces, sw_if_index0);
78 sm0 = &si0->mirror_rxtx[sf][rxtx];
80 if (sm0->num_mirror_ports == 0)
83 /* Don't do it again */
84 if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE))
87 clib_bitmap_foreach (i, sm0->mirror_ports)
89 if (mirror_frames[i] == 0)
91 if (sf == SPAN_FEAT_L2)
92 mirror_frames[i] = vlib_get_frame_to_node (vm, l2output_node.index);
94 mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i);
96 to_mirror_next = vlib_frame_vector_args (mirror_frames[i]);
97 to_mirror_next += mirror_frames[i]->n_vectors;
99 c0 = vlib_buffer_copy (vm, b0);
100 if (PREDICT_TRUE(c0 != 0))
102 vnet_buffer (c0)->sw_if_index[VLIB_TX] = i;
103 c0->flags |= VNET_BUFFER_F_SPAN_CLONE;
104 if (sf == SPAN_FEAT_L2)
105 vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT;
106 to_mirror_next[0] = vlib_get_buffer_index (vm, c0);
107 mirror_frames[i]->n_vectors++;
108 if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED))
110 span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
111 t->src_sw_if_index = sw_if_index0;
112 t->mirror_sw_if_index = i;
114 /* Enable this path to allow packet trace of SPAN packets.
115 Note that all SPAN packets will show up on the trace output
116 with the first SPAN packet (since they are in the same frame)
117 thus making trace output of the original packet confusing */
118 mirror_frames[i]->flags |= VLIB_FRAME_TRACE;
119 c0->flags |= VLIB_BUFFER_IS_TRACED;
126 static_always_inline uword
127 span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
128 vlib_frame_t * frame, vlib_rx_or_tx_t rxtx,
131 span_main_t *sm = &span_main;
132 vnet_main_t *vnm = vnet_get_main ();
133 u32 n_left_from, *from, *to_next;
136 static __thread vlib_frame_t **mirror_frames = 0;
138 from = vlib_frame_vector_args (frame);
139 n_left_from = frame->n_vectors;
140 next_index = node->cached_next_index;
142 vec_validate_aligned (mirror_frames, sm->max_sw_if_index,
143 CLIB_CACHE_LINE_BYTES);
145 while (n_left_from > 0)
149 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
151 while (n_left_from >= 4 && n_left_to_next >= 2)
162 /* speculatively enqueue b0, b1 to the current next frame */
163 to_next[0] = bi0 = from[0];
164 to_next[1] = bi1 = from[1];
170 b0 = vlib_get_buffer (vm, bi0);
171 b1 = vlib_get_buffer (vm, bi1);
172 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
173 sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx];
175 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
176 span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf);
183 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
185 next1 = vnet_l2_feature_next (b1, sm->l2_input_next,
190 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
192 next1 = vnet_l2_feature_next (b1, sm->l2_output_next,
196 case SPAN_FEAT_DEVICE:
198 vnet_feature_next (&next0, b0);
199 vnet_feature_next (&next1, b1);
203 /* verify speculative enqueue, maybe switch current next frame */
204 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
205 to_next, n_left_to_next,
206 bi0, bi1, next0, next1);
208 while (n_left_from > 0 && n_left_to_next > 0)
215 /* speculatively enqueue b0 to the current next frame */
216 to_next[0] = bi0 = from[0];
222 b0 = vlib_get_buffer (vm, bi0);
223 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
225 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
231 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
234 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
237 case SPAN_FEAT_DEVICE:
239 vnet_feature_next (&next0, b0);
243 /* verify speculative enqueue, maybe switch current next frame */
244 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
245 n_left_to_next, bi0, next0);
248 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
252 for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++)
254 vlib_frame_t *f = mirror_frames[sw_if_index];
258 if (sf == SPAN_FEAT_L2)
259 vlib_put_frame_to_node (vm, l2output_node.index, f);
261 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
262 mirror_frames[sw_if_index] = 0;
265 return frame->n_vectors;
268 VLIB_NODE_FN (span_input_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
269 vlib_frame_t * frame)
271 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE);
274 VLIB_NODE_FN (span_output_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
275 vlib_frame_t * frame)
277 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE);
280 VLIB_NODE_FN (span_l2_input_node) (vlib_main_t * vm,
281 vlib_node_runtime_t * node,
282 vlib_frame_t * frame)
284 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2);
287 VLIB_NODE_FN (span_l2_output_node) (vlib_main_t * vm,
288 vlib_node_runtime_t * node,
289 vlib_frame_t * frame)
291 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2);
294 #define span_node_defs \
295 .vector_size = sizeof (u32), \
296 .format_trace = format_span_trace, \
297 .type = VLIB_NODE_TYPE_INTERNAL, \
298 .n_errors = ARRAY_LEN(span_error_strings), \
299 .error_strings = span_error_strings, \
305 VLIB_REGISTER_NODE (span_input_node) = {
307 .name = "span-input",
310 VLIB_REGISTER_NODE (span_output_node) = {
312 .name = "span-output",
315 VLIB_REGISTER_NODE (span_l2_input_node) = {
317 .name = "span-l2-input",
320 VLIB_REGISTER_NODE (span_l2_output_node) = {
322 .name = "span-l2-output",
325 #ifndef CLIB_MARCH_VARIANT
326 clib_error_t *span_init (vlib_main_t * vm)
328 span_main_t *sm = &span_main;
331 sm->vnet_main = vnet_get_main ();
333 /* Initialize the feature next-node indexes */
334 feat_bitmap_init_next_nodes (vm,
335 span_l2_input_node.index,
337 l2input_get_feat_names (),
340 feat_bitmap_init_next_nodes (vm,
341 span_l2_output_node.index,
343 l2output_get_feat_names (),
348 VLIB_INIT_FUNCTION (span_init);
349 #endif /* CLIB_MARCH_VARIANT */
351 #undef span_node_defs
353 * fd.io coding-style-patch-verification: ON
356 * eval: (c-set-style "gnu")