2 * Copyright (c) 2016 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #include <vlib/vlib.h>
17 #include <vnet/vnet.h>
18 #include <vppinfra/error.h>
20 #include <vnet/span/span.h>
21 #include <vnet/l2/l2_input.h>
22 #include <vnet/l2/l2_output.h>
23 #include <vnet/l2/feat_bitmap.h>
25 #include <vppinfra/error.h>
26 #include <vppinfra/elog.h>
28 /* packet trace format function */
30 format_span_trace (u8 * s, va_list * args)
32 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
33 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
34 span_trace_t *t = va_arg (*args, span_trace_t *);
36 vnet_main_t *vnm = vnet_get_main ();
37 s = format (s, "SPAN: mirrored %U -> %U",
38 format_vnet_sw_if_index_name, vnm, t->src_sw_if_index,
39 format_vnet_sw_if_index_name, vnm, t->mirror_sw_if_index);
44 #define foreach_span_error \
45 _(HITS, "SPAN incoming packets processed")
49 #define _(sym,str) SPAN_ERROR_##sym,
55 static char *span_error_strings[] = {
56 #define _(sym,string) string,
61 static_always_inline void
62 span_mirror (vlib_main_t * vm, vlib_node_runtime_t * node, u32 sw_if_index0,
63 vlib_buffer_t * b0, vlib_frame_t ** mirror_frames,
64 vlib_rx_or_tx_t rxtx, span_feat_t sf)
67 span_main_t *sm = &span_main;
68 vnet_main_t *vnm = vnet_get_main ();
69 u32 *to_mirror_next = 0;
71 span_interface_t *si0;
74 if (sw_if_index0 >= vec_len (sm->interfaces))
77 si0 = vec_elt_at_index (sm->interfaces, sw_if_index0);
78 sm0 = &si0->mirror_rxtx[sf][rxtx];
80 if (sm0->num_mirror_ports == 0)
83 /* Don't do it again */
84 if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE))
88 clib_bitmap_foreach (i, sm0->mirror_ports)
90 if (mirror_frames[i] == 0)
92 if (sf == SPAN_FEAT_L2)
93 mirror_frames[i] = vlib_get_frame_to_node (vm, l2output_node.index);
95 mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i);
97 to_mirror_next = vlib_frame_vector_args (mirror_frames[i]);
98 to_mirror_next += mirror_frames[i]->n_vectors;
100 c0 = vlib_buffer_copy (vm, b0);
101 if (PREDICT_TRUE(c0 != 0))
103 vnet_buffer (c0)->sw_if_index[VLIB_TX] = i;
104 c0->flags |= VNET_BUFFER_F_SPAN_CLONE;
105 if (sf == SPAN_FEAT_L2)
106 vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT;
107 to_mirror_next[0] = vlib_get_buffer_index (vm, c0);
108 mirror_frames[i]->n_vectors++;
109 if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED))
111 span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
112 t->src_sw_if_index = sw_if_index0;
113 t->mirror_sw_if_index = i;
115 /* Enable this path to allow packet trace of SPAN packets.
116 Note that all SPAN packets will show up on the trace output
117 with the first SPAN packet (since they are in the same frame)
118 thus making trace output of the original packet confusing */
119 mirror_frames[i]->flags |= VLIB_FRAME_TRACE;
120 c0->flags |= VLIB_BUFFER_IS_TRACED;
128 static_always_inline uword
129 span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
130 vlib_frame_t * frame, vlib_rx_or_tx_t rxtx,
133 span_main_t *sm = &span_main;
134 vnet_main_t *vnm = vnet_get_main ();
135 u32 n_left_from, *from, *to_next;
138 static __thread vlib_frame_t **mirror_frames = 0;
140 from = vlib_frame_vector_args (frame);
141 n_left_from = frame->n_vectors;
142 next_index = node->cached_next_index;
144 vec_validate_aligned (mirror_frames, sm->max_sw_if_index,
145 CLIB_CACHE_LINE_BYTES);
147 while (n_left_from > 0)
151 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
153 while (n_left_from >= 4 && n_left_to_next >= 2)
164 /* speculatively enqueue b0, b1 to the current next frame */
165 to_next[0] = bi0 = from[0];
166 to_next[1] = bi1 = from[1];
172 b0 = vlib_get_buffer (vm, bi0);
173 b1 = vlib_get_buffer (vm, bi1);
174 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
175 sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx];
177 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
178 span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf);
185 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
187 next1 = vnet_l2_feature_next (b1, sm->l2_input_next,
192 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
194 next1 = vnet_l2_feature_next (b1, sm->l2_output_next,
198 case SPAN_FEAT_DEVICE:
200 vnet_feature_next (&next0, b0);
201 vnet_feature_next (&next1, b1);
205 /* verify speculative enqueue, maybe switch current next frame */
206 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
207 to_next, n_left_to_next,
208 bi0, bi1, next0, next1);
210 while (n_left_from > 0 && n_left_to_next > 0)
217 /* speculatively enqueue b0 to the current next frame */
218 to_next[0] = bi0 = from[0];
224 b0 = vlib_get_buffer (vm, bi0);
225 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
227 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
233 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
236 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
239 case SPAN_FEAT_DEVICE:
241 vnet_feature_next (&next0, b0);
245 /* verify speculative enqueue, maybe switch current next frame */
246 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
247 n_left_to_next, bi0, next0);
250 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
254 for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++)
256 vlib_frame_t *f = mirror_frames[sw_if_index];
260 if (sf == SPAN_FEAT_L2)
261 vlib_put_frame_to_node (vm, l2output_node.index, f);
263 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
264 mirror_frames[sw_if_index] = 0;
267 return frame->n_vectors;
270 VLIB_NODE_FN (span_input_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
271 vlib_frame_t * frame)
273 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE);
276 VLIB_NODE_FN (span_output_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
277 vlib_frame_t * frame)
279 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE);
282 VLIB_NODE_FN (span_l2_input_node) (vlib_main_t * vm,
283 vlib_node_runtime_t * node,
284 vlib_frame_t * frame)
286 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2);
289 VLIB_NODE_FN (span_l2_output_node) (vlib_main_t * vm,
290 vlib_node_runtime_t * node,
291 vlib_frame_t * frame)
293 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2);
296 #define span_node_defs \
297 .vector_size = sizeof (u32), \
298 .format_trace = format_span_trace, \
299 .type = VLIB_NODE_TYPE_INTERNAL, \
300 .n_errors = ARRAY_LEN(span_error_strings), \
301 .error_strings = span_error_strings, \
308 VLIB_REGISTER_NODE (span_input_node) = {
310 .name = "span-input",
313 VLIB_REGISTER_NODE (span_output_node) = {
315 .name = "span-output",
318 VLIB_REGISTER_NODE (span_l2_input_node) = {
320 .name = "span-l2-input",
323 VLIB_REGISTER_NODE (span_l2_output_node) = {
325 .name = "span-l2-output",
328 #ifndef CLIB_MARCH_VARIANT
329 clib_error_t *span_init (vlib_main_t * vm)
331 span_main_t *sm = &span_main;
334 sm->vnet_main = vnet_get_main ();
336 /* Initialize the feature next-node indexes */
337 feat_bitmap_init_next_nodes (vm,
338 span_l2_input_node.index,
340 l2input_get_feat_names (),
343 feat_bitmap_init_next_nodes (vm,
344 span_l2_output_node.index,
346 l2output_get_feat_names (),
351 VLIB_INIT_FUNCTION (span_init);
353 #endif /* CLIB_MARCH_VARIANT */
355 #undef span_node_defs
357 * fd.io coding-style-patch-verification: ON
360 * eval: (c-set-style "gnu")