2 * Copyright (c) 2016 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #include <vlib/vlib.h>
17 #include <vnet/vnet.h>
18 #include <vppinfra/error.h>
20 #include <vnet/span/span.h>
21 #include <vnet/l2/l2_input.h>
22 #include <vnet/l2/l2_output.h>
23 #include <vnet/l2/feat_bitmap.h>
25 #include <vppinfra/error.h>
26 #include <vppinfra/elog.h>
28 /* packet trace format function */
30 format_span_trace (u8 * s, va_list * args)
32 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
33 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
34 span_trace_t *t = va_arg (*args, span_trace_t *);
36 vnet_main_t *vnm = &vnet_main;
37 s = format (s, "SPAN: mirrored %U -> %U",
38 format_vnet_sw_if_index_name, vnm, t->src_sw_if_index,
39 format_vnet_sw_if_index_name, vnm, t->mirror_sw_if_index);
44 #define foreach_span_error \
45 _(HITS, "SPAN incoming packets processed")
49 #define _(sym,str) SPAN_ERROR_##sym,
55 static char *span_error_strings[] = {
56 #define _(sym,string) string,
61 static_always_inline void
62 span_mirror (vlib_main_t * vm, vlib_node_runtime_t * node, u32 sw_if_index0,
63 vlib_buffer_t * b0, vlib_frame_t ** mirror_frames,
64 vlib_rx_or_tx_t rxtx, span_feat_t sf)
67 span_main_t *sm = &span_main;
68 vnet_main_t *vnm = &vnet_main;
69 u32 *to_mirror_next = 0;
71 span_interface_t *si0;
74 if (sw_if_index0 >= vec_len (sm->interfaces))
77 si0 = vec_elt_at_index (sm->interfaces, sw_if_index0);
78 sm0 = &si0->mirror_rxtx[sf][rxtx];
80 if (sm0->num_mirror_ports == 0)
83 /* Don't do it again */
84 if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE))
88 clib_bitmap_foreach (i, sm0->mirror_ports, (
90 if (mirror_frames[i] == 0)
92 if (sf == SPAN_FEAT_L2)
93 mirror_frames[i] = vlib_get_frame_to_node (vnm->vlib_main,
96 mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i);
98 to_mirror_next = vlib_frame_vector_args (mirror_frames[i]);
99 to_mirror_next += mirror_frames[i]->n_vectors;
101 c0 = vlib_buffer_copy (vm, b0);
102 if (PREDICT_TRUE(c0 != 0))
104 vnet_buffer (c0)->sw_if_index[VLIB_TX] = i;
105 c0->flags |= VNET_BUFFER_F_SPAN_CLONE;
106 if (sf == SPAN_FEAT_L2)
107 vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT;
108 to_mirror_next[0] = vlib_get_buffer_index (vm, c0);
109 mirror_frames[i]->n_vectors++;
110 if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED))
112 span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
113 t->src_sw_if_index = sw_if_index0;
114 t->mirror_sw_if_index = i;
116 /* Enable this path to allow packet trace of SPAN packets.
117 Note that all SPAN packets will show up on the trace output
118 with the first SPAN packet (since they are in the same frame)
119 thus making trace output of the original packet confusing */
120 mirror_frames[i]->flags |= VLIB_FRAME_TRACE;
121 c0->flags |= VLIB_BUFFER_IS_TRACED;
129 static_always_inline uword
130 span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
131 vlib_frame_t * frame, vlib_rx_or_tx_t rxtx,
134 span_main_t *sm = &span_main;
135 vnet_main_t *vnm = &vnet_main;
136 u32 n_left_from, *from, *to_next;
139 static __thread vlib_frame_t **mirror_frames = 0;
141 from = vlib_frame_vector_args (frame);
142 n_left_from = frame->n_vectors;
143 next_index = node->cached_next_index;
145 vec_validate_aligned (mirror_frames, sm->max_sw_if_index,
146 CLIB_CACHE_LINE_BYTES);
148 while (n_left_from > 0)
152 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
154 while (n_left_from >= 4 && n_left_to_next >= 2)
165 /* speculatively enqueue b0, b1 to the current next frame */
166 to_next[0] = bi0 = from[0];
167 to_next[1] = bi1 = from[1];
173 b0 = vlib_get_buffer (vm, bi0);
174 b1 = vlib_get_buffer (vm, bi1);
175 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
176 sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx];
178 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
179 span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf);
186 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
188 next1 = vnet_l2_feature_next (b1, sm->l2_input_next,
193 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
195 next1 = vnet_l2_feature_next (b1, sm->l2_output_next,
199 case SPAN_FEAT_DEVICE:
201 vnet_feature_next (&next0, b0);
202 vnet_feature_next (&next1, b1);
206 /* verify speculative enqueue, maybe switch current next frame */
207 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
208 to_next, n_left_to_next,
209 bi0, bi1, next0, next1);
211 while (n_left_from > 0 && n_left_to_next > 0)
218 /* speculatively enqueue b0 to the current next frame */
219 to_next[0] = bi0 = from[0];
225 b0 = vlib_get_buffer (vm, bi0);
226 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
228 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
234 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
237 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
240 case SPAN_FEAT_DEVICE:
242 vnet_feature_next (&next0, b0);
246 /* verify speculative enqueue, maybe switch current next frame */
247 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
248 n_left_to_next, bi0, next0);
251 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
255 for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++)
257 vlib_frame_t *f = mirror_frames[sw_if_index];
261 if (sf == SPAN_FEAT_L2)
262 vlib_put_frame_to_node (vnm->vlib_main, l2output_node.index, f);
264 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
265 mirror_frames[sw_if_index] = 0;
268 return frame->n_vectors;
271 VLIB_NODE_FN (span_input_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
272 vlib_frame_t * frame)
274 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE);
277 VLIB_NODE_FN (span_output_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
278 vlib_frame_t * frame)
280 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE);
283 VLIB_NODE_FN (span_l2_input_node) (vlib_main_t * vm,
284 vlib_node_runtime_t * node,
285 vlib_frame_t * frame)
287 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2);
290 VLIB_NODE_FN (span_l2_output_node) (vlib_main_t * vm,
291 vlib_node_runtime_t * node,
292 vlib_frame_t * frame)
294 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2);
297 #define span_node_defs \
298 .vector_size = sizeof (u32), \
299 .format_trace = format_span_trace, \
300 .type = VLIB_NODE_TYPE_INTERNAL, \
301 .n_errors = ARRAY_LEN(span_error_strings), \
302 .error_strings = span_error_strings, \
309 VLIB_REGISTER_NODE (span_input_node) = {
311 .name = "span-input",
314 VLIB_REGISTER_NODE (span_output_node) = {
316 .name = "span-output",
319 VLIB_REGISTER_NODE (span_l2_input_node) = {
321 .name = "span-l2-input",
324 VLIB_REGISTER_NODE (span_l2_output_node) = {
326 .name = "span-l2-output",
329 #ifndef CLIB_MARCH_VARIANT
330 clib_error_t *span_init (vlib_main_t * vm)
332 span_main_t *sm = &span_main;
335 sm->vnet_main = vnet_get_main ();
337 /* Initialize the feature next-node indexes */
338 feat_bitmap_init_next_nodes (vm,
339 span_l2_input_node.index,
341 l2input_get_feat_names (),
344 feat_bitmap_init_next_nodes (vm,
345 span_l2_output_node.index,
347 l2output_get_feat_names (),
352 VLIB_INIT_FUNCTION (span_init);
354 #endif /* CLIB_MARCH_VARIANT */
356 #undef span_node_defs
358 * fd.io coding-style-patch-verification: ON
361 * eval: (c-set-style "gnu")