X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fvnet%2Fspan%2Fnode.c;h=1a9d1bae7240dc99a7fe83f31fda9d95711134c4;hb=240dcb24a00004017ec2d1b23b93d9ae8d3c9f65;hp=50d642c2f8b4542c81aa1f3bc80f74d835a395a2;hpb=7cd468a3d7dee7d6c92f69a0bb7061ae208ec727;p=vpp.git diff --git a/src/vnet/span/node.c b/src/vnet/span/node.c index 50d642c2f8b..1a9d1bae724 100644 --- a/src/vnet/span/node.c +++ b/src/vnet/span/node.c @@ -18,21 +18,22 @@ #include #include +#include +#include +#include #include #include -vlib_node_registration_t span_node; - /* packet trace format function */ -u8 * +static u8 * format_span_trace (u8 * s, va_list * args) { CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *); CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *); span_trace_t *t = va_arg (*args, span_trace_t *); - vnet_main_t *vnm = &vnet_main; + vnet_main_t *vnm = vnet_get_main (); s = format (s, "SPAN: mirrored %U -> %U", format_vnet_sw_if_index_name, vnm, t->src_sw_if_index, format_vnet_sw_if_index_name, vnm, t->mirror_sw_if_index); @@ -41,7 +42,7 @@ format_span_trace (u8 * s, va_list * args) } #define foreach_span_error \ -_(HITS, "SPAN incomming packets processed") +_(HITS, "SPAN incoming packets processed") typedef enum { @@ -58,52 +59,83 @@ static char *span_error_strings[] = { }; static_always_inline void -span_mirror (vlib_main_t * vm, span_interface_t * si0, vlib_buffer_t * b0, - vlib_frame_t ** mirror_frames, int is_rx) +span_mirror (vlib_main_t * vm, vlib_node_runtime_t * node, u32 sw_if_index0, + vlib_buffer_t * b0, vlib_frame_t ** mirror_frames, + vlib_rx_or_tx_t rxtx, span_feat_t sf) { vlib_buffer_t *c0; - vnet_main_t *vnm = &vnet_main; + span_main_t *sm = &span_main; + vnet_main_t *vnm = vnet_get_main (); u32 *to_mirror_next = 0; u32 i; + span_interface_t *si0; + span_mirror_t *sm0; - if (is_rx != 0 && si0->num_rx_mirror_ports == 0) + if (sw_if_index0 >= vec_len (sm->interfaces)) return; - if (is_rx == 0 && si0->num_tx_mirror_ports == 0) + si0 = vec_elt_at_index (sm->interfaces, sw_if_index0); + sm0 = &si0->mirror_rxtx[sf][rxtx]; + + if (sm0->num_mirror_ports == 0) return; /* Don't do it again */ - if (PREDICT_FALSE (b0->flags & VNET_BUFFER_SPAN_CLONE)) + if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE)) return; /* *INDENT-OFF* */ - clib_bitmap_foreach (i, is_rx ? si0->rx_mirror_ports : si0->tx_mirror_ports, ( + clib_bitmap_foreach (i, sm0->mirror_ports, ( { if (mirror_frames[i] == 0) - mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i); + { + if (sf == SPAN_FEAT_L2) + mirror_frames[i] = vlib_get_frame_to_node (vm, l2output_node.index); + else + mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i); + } to_mirror_next = vlib_frame_vector_args (mirror_frames[i]); to_mirror_next += mirror_frames[i]->n_vectors; + /* This can fail */ c0 = vlib_buffer_copy (vm, b0); - vnet_buffer (c0)->sw_if_index[VLIB_TX] = i; - c0->flags |= VNET_BUFFER_SPAN_CLONE; - to_mirror_next[0] = vlib_get_buffer_index (vm, c0); - mirror_frames[i]->n_vectors++; + if (PREDICT_TRUE(c0 != 0)) + { + vnet_buffer (c0)->sw_if_index[VLIB_TX] = i; + c0->flags |= VNET_BUFFER_F_SPAN_CLONE; + if (sf == SPAN_FEAT_L2) + vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT; + to_mirror_next[0] = vlib_get_buffer_index (vm, c0); + mirror_frames[i]->n_vectors++; + if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED)) + { + span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t)); + t->src_sw_if_index = sw_if_index0; + t->mirror_sw_if_index = i; +#if 0 + /* Enable this path to allow packet trace of SPAN packets. + Note that all SPAN packets will show up on the trace output + with the first SPAN packet (since they are in the same frame) + thus making trace output of the original packet confusing */ + mirror_frames[i]->flags |= VLIB_FRAME_TRACE; + c0->flags |= VLIB_BUFFER_IS_TRACED; +#endif + } + } })); /* *INDENT-ON* */ } static_always_inline uword span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node, - vlib_frame_t * frame, int is_rx) + vlib_frame_t * frame, vlib_rx_or_tx_t rxtx, + span_feat_t sf) { span_main_t *sm = &span_main; - vnet_main_t *vnm = &vnet_main; + vnet_main_t *vnm = vnet_get_main (); u32 n_left_from, *from, *to_next; - u32 n_span_packets = 0; u32 next_index; u32 sw_if_index; static __thread vlib_frame_t **mirror_frames = 0; - vlib_rx_or_tx_t rxtx = is_rx ? VLIB_RX : VLIB_TX; from = vlib_frame_vector_args (frame); n_left_from = frame->n_vectors; @@ -124,7 +156,6 @@ span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node, u32 bi1; vlib_buffer_t *b0; vlib_buffer_t *b1; - span_interface_t *si0, *si1; u32 sw_if_index0; u32 next0 = 0; u32 sw_if_index1; @@ -142,28 +173,35 @@ span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node, b1 = vlib_get_buffer (vm, bi1); sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx]; sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx]; - si0 = vec_elt_at_index (sm->interfaces, sw_if_index0); - si1 = vec_elt_at_index (sm->interfaces, sw_if_index1); - - span_mirror (vm, si0, b0, mirror_frames, is_rx); - span_mirror (vm, si1, b1, mirror_frames, is_rx); - vnet_feature_next (sw_if_index0, &next0, b0); - vnet_feature_next (sw_if_index1, &next1, b1); + span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf); + span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf); - if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED)) + switch (sf) { - span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t)); - t->src_sw_if_index = sw_if_index0; - //t->mirror_sw_if_index = si0->mirror_sw_if_index; + case SPAN_FEAT_L2: + if (rxtx == VLIB_RX) + { + next0 = vnet_l2_feature_next (b0, sm->l2_input_next, + L2INPUT_FEAT_SPAN); + next1 = vnet_l2_feature_next (b1, sm->l2_input_next, + L2INPUT_FEAT_SPAN); + } + else + { + next0 = vnet_l2_feature_next (b0, sm->l2_output_next, + L2OUTPUT_FEAT_SPAN); + next1 = vnet_l2_feature_next (b1, sm->l2_output_next, + L2OUTPUT_FEAT_SPAN); + } + break; + case SPAN_FEAT_DEVICE: + default: + vnet_feature_next (&next0, b0); + vnet_feature_next (&next1, b1); + break; } - if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED)) - { - span_trace_t *t = vlib_add_trace (vm, node, b1, sizeof (*t)); - t->src_sw_if_index = sw_if_index1; - //t->mirror_sw_if_index = si1->mirror_sw_if_index; - } /* verify speculative enqueue, maybe switch current next frame */ vlib_validate_buffer_enqueue_x2 (vm, node, next_index, to_next, n_left_to_next, @@ -173,7 +211,6 @@ span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node, { u32 bi0; vlib_buffer_t *b0; - span_interface_t *si0; u32 sw_if_index0; u32 next0 = 0; @@ -186,16 +223,25 @@ span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node, b0 = vlib_get_buffer (vm, bi0); sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx]; - si0 = vec_elt_at_index (sm->interfaces, sw_if_index0); - span_mirror (vm, si0, b0, mirror_frames, is_rx); - vnet_feature_next (sw_if_index0, &next0, b0); + span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf); - if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED)) + switch (sf) { - span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t)); - t->src_sw_if_index = sw_if_index0; + case SPAN_FEAT_L2: + if (rxtx == VLIB_RX) + next0 = vnet_l2_feature_next (b0, sm->l2_input_next, + L2INPUT_FEAT_SPAN); + else + next0 = vnet_l2_feature_next (b0, sm->l2_output_next, + L2OUTPUT_FEAT_SPAN); + break; + case SPAN_FEAT_DEVICE: + default: + vnet_feature_next (&next0, b0); + break; } + /* verify speculative enqueue, maybe switch current next frame */ vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next, n_left_to_next, bi0, next0); @@ -207,76 +253,106 @@ span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node, for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++) { - if (mirror_frames[sw_if_index] == 0) + vlib_frame_t *f = mirror_frames[sw_if_index]; + if (f == 0) continue; - vnet_put_frame_to_sw_interface (vnm, sw_if_index, - mirror_frames[sw_if_index]); + if (sf == SPAN_FEAT_L2) + vlib_put_frame_to_node (vm, l2output_node.index, f); + else + vnet_put_frame_to_sw_interface (vnm, sw_if_index, f); mirror_frames[sw_if_index] = 0; } - vlib_node_increment_counter (vm, span_node.index, SPAN_ERROR_HITS, - n_span_packets); return frame->n_vectors; } -static uword -span_input_node_fn (vlib_main_t * vm, vlib_node_runtime_t * node, - vlib_frame_t * frame) +VLIB_NODE_FN (span_input_node) (vlib_main_t * vm, vlib_node_runtime_t * node, + vlib_frame_t * frame) { - return span_node_inline_fn (vm, node, frame, 1); + return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE); } -static uword -span_output_node_fn (vlib_main_t * vm, vlib_node_runtime_t * node, - vlib_frame_t * frame) +VLIB_NODE_FN (span_output_node) (vlib_main_t * vm, vlib_node_runtime_t * node, + vlib_frame_t * frame) { - return span_node_inline_fn (vm, node, frame, 0); + return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE); } -/* *INDENT-OFF* */ -VLIB_REGISTER_NODE (span_input_node) = { - .function = span_input_node_fn, - .name = "span-input", - .vector_size = sizeof (u32), - .format_trace = format_span_trace, - .type = VLIB_NODE_TYPE_INTERNAL, +VLIB_NODE_FN (span_l2_input_node) (vlib_main_t * vm, + vlib_node_runtime_t * node, + vlib_frame_t * frame) +{ + return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2); +} - .n_errors = ARRAY_LEN(span_error_strings), - .error_strings = span_error_strings, +VLIB_NODE_FN (span_l2_output_node) (vlib_main_t * vm, + vlib_node_runtime_t * node, + vlib_frame_t * frame) +{ + return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2); +} - .n_next_nodes = 0, +#define span_node_defs \ + .vector_size = sizeof (u32), \ + .format_trace = format_span_trace, \ + .type = VLIB_NODE_TYPE_INTERNAL, \ + .n_errors = ARRAY_LEN(span_error_strings), \ + .error_strings = span_error_strings, \ + .n_next_nodes = 0, \ + .next_nodes = { \ + [0] = "error-drop" \ + } - /* edit / add dispositions here */ - .next_nodes = { - [0] = "error-drop", - }, +/* *INDENT-OFF* */ +VLIB_REGISTER_NODE (span_input_node) = { + span_node_defs, + .name = "span-input", }; -VLIB_NODE_FUNCTION_MULTIARCH (span_input_node, span_input_node_fn) - VLIB_REGISTER_NODE (span_output_node) = { - .function = span_output_node_fn, + span_node_defs, .name = "span-output", - .vector_size = sizeof (u32), - .format_trace = format_span_trace, - .type = VLIB_NODE_TYPE_INTERNAL, - - .n_errors = ARRAY_LEN(span_error_strings), - .error_strings = span_error_strings, +}; - .n_next_nodes = 0, +VLIB_REGISTER_NODE (span_l2_input_node) = { + span_node_defs, + .name = "span-l2-input", +}; - /* edit / add dispositions here */ - .next_nodes = { - [0] = "error-drop", - }, +VLIB_REGISTER_NODE (span_l2_output_node) = { + span_node_defs, + .name = "span-l2-output", }; -VLIB_NODE_FUNCTION_MULTIARCH (span_output_node, span_output_node_fn) +#ifndef CLIB_MARCH_VARIANT +clib_error_t *span_init (vlib_main_t * vm) +{ + span_main_t *sm = &span_main; + + sm->vlib_main = vm; + sm->vnet_main = vnet_get_main (); + + /* Initialize the feature next-node indexes */ + feat_bitmap_init_next_nodes (vm, + span_l2_input_node.index, + L2INPUT_N_FEAT, + l2input_get_feat_names (), + sm->l2_input_next); + + feat_bitmap_init_next_nodes (vm, + span_l2_output_node.index, + L2OUTPUT_N_FEAT, + l2output_get_feat_names (), + sm->l2_output_next); + return 0; +} +VLIB_INIT_FUNCTION (span_init); /* *INDENT-ON* */ +#endif /* CLIB_MARCH_VARIANT */ +#undef span_node_defs /* * fd.io coding-style-patch-verification: ON *