Remove c-11 memcpy checks from perf-critical code
[vpp.git] / src / vnet / l2 / l2_input_vtr.c
1 /*
2  * l2_input_vtr.c : layer 2 input vlan tag rewrite processing
3  *
4  * Copyright (c) 2013 Cisco and/or its affiliates.
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at:
8  *
9  *     http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17
18 #include <vlib/vlib.h>
19 #include <vnet/vnet.h>
20 #include <vnet/ethernet/ethernet.h>
21 #include <vnet/ethernet/packet.h>
22 #include <vnet/l2/l2_input.h>
23 #include <vnet/l2/feat_bitmap.h>
24 #include <vnet/l2/l2_vtr.h>
25 #include <vnet/l2/l2_input_vtr.h>
26 #include <vnet/l2/l2_output.h>
27
28 #include <vppinfra/error.h>
29 #include <vppinfra/cache.h>
30
31
32 typedef struct
33 {
34   /* per-pkt trace data */
35   u8 src[6];
36   u8 dst[6];
37   u8 raw[12];                   /* raw data (vlans) */
38   u32 sw_if_index;
39 } l2_invtr_trace_t;
40
41 /* packet trace format function */
42 static u8 *
43 format_l2_invtr_trace (u8 * s, va_list * args)
44 {
45   CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
46   CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
47   l2_invtr_trace_t *t = va_arg (*args, l2_invtr_trace_t *);
48
49   s = format (s, "l2-input-vtr: sw_if_index %d dst %U src %U data "
50               "%02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x",
51               t->sw_if_index,
52               format_ethernet_address, t->dst,
53               format_ethernet_address, t->src,
54               t->raw[0], t->raw[1], t->raw[2], t->raw[3], t->raw[4],
55               t->raw[5], t->raw[6], t->raw[7], t->raw[8], t->raw[9],
56               t->raw[10], t->raw[11]);
57   return s;
58 }
59
60 l2_invtr_main_t l2_invtr_main;
61
62 static vlib_node_registration_t l2_invtr_node;
63
64 #define foreach_l2_invtr_error                  \
65 _(L2_INVTR,    "L2 inverter packets")           \
66 _(DROP,        "L2 input tag rewrite drops")
67
68 typedef enum
69 {
70 #define _(sym,str) L2_INVTR_ERROR_##sym,
71   foreach_l2_invtr_error
72 #undef _
73     L2_INVTR_N_ERROR,
74 } l2_invtr_error_t;
75
76 static char *l2_invtr_error_strings[] = {
77 #define _(sym,string) string,
78   foreach_l2_invtr_error
79 #undef _
80 };
81
82 typedef enum
83 {
84   L2_INVTR_NEXT_DROP,
85   L2_INVTR_N_NEXT,
86 } l2_invtr_next_t;
87
88
89 static uword
90 l2_invtr_node_fn (vlib_main_t * vm,
91                   vlib_node_runtime_t * node, vlib_frame_t * frame)
92 {
93   u32 n_left_from, *from, *to_next;
94   l2_invtr_next_t next_index;
95   l2_invtr_main_t *msm = &l2_invtr_main;
96
97   from = vlib_frame_vector_args (frame);
98   n_left_from = frame->n_vectors;       /* number of packets to process */
99   next_index = node->cached_next_index;
100
101   while (n_left_from > 0)
102     {
103       u32 n_left_to_next;
104
105       /* get space to enqueue frame to graph node "next_index" */
106       vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
107
108       while (n_left_from >= 6 && n_left_to_next >= 2)
109         {
110           u32 bi0, bi1;
111           vlib_buffer_t *b0, *b1;
112           u32 next0, next1;
113           u32 sw_if_index0, sw_if_index1;
114
115           /* Prefetch next iteration. */
116           {
117             vlib_buffer_t *p2, *p3, *p4, *p5;
118             u32 sw_if_index2, sw_if_index3;
119
120             p2 = vlib_get_buffer (vm, from[2]);
121             p3 = vlib_get_buffer (vm, from[3]);
122             p4 = vlib_get_buffer (vm, from[4]);
123             p5 = vlib_get_buffer (vm, from[5]);
124
125             /* Prefetch the buffer header and packet for the N+2 loop iteration */
126             vlib_prefetch_buffer_header (p4, LOAD);
127             vlib_prefetch_buffer_header (p5, LOAD);
128
129             CLIB_PREFETCH (p4->data, CLIB_CACHE_LINE_BYTES, STORE);
130             CLIB_PREFETCH (p5->data, CLIB_CACHE_LINE_BYTES, STORE);
131
132             /*
133              * Prefetch the input config for the N+1 loop iteration
134              * This depends on the buffer header above
135              */
136             sw_if_index2 = vnet_buffer (p2)->sw_if_index[VLIB_RX];
137             sw_if_index3 = vnet_buffer (p3)->sw_if_index[VLIB_RX];
138             CLIB_PREFETCH (vec_elt_at_index
139                            (l2output_main.configs, sw_if_index2),
140                            CLIB_CACHE_LINE_BYTES, LOAD);
141             CLIB_PREFETCH (vec_elt_at_index
142                            (l2output_main.configs, sw_if_index3),
143                            CLIB_CACHE_LINE_BYTES, LOAD);
144           }
145
146           /* speculatively enqueue b0 and b1 to the current next frame */
147           /* bi is "buffer index", b is pointer to the buffer */
148           to_next[0] = bi0 = from[0];
149           to_next[1] = bi1 = from[1];
150           from += 2;
151           to_next += 2;
152           n_left_from -= 2;
153           n_left_to_next -= 2;
154
155           b0 = vlib_get_buffer (vm, bi0);
156           b1 = vlib_get_buffer (vm, bi1);
157
158           /* RX interface handles */
159           sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
160           sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
161
162           /* Determine the next node */
163           next0 = vnet_l2_feature_next (b0, msm->feat_next_node_index,
164                                         L2INPUT_FEAT_VTR);
165           next1 = vnet_l2_feature_next (b1, msm->feat_next_node_index,
166                                         L2INPUT_FEAT_VTR);
167
168           l2_output_config_t *config0;
169           l2_output_config_t *config1;
170           config0 = vec_elt_at_index (l2output_main.configs, sw_if_index0);
171           config1 = vec_elt_at_index (l2output_main.configs, sw_if_index1);
172
173           if (PREDICT_FALSE (config0->out_vtr_flag))
174             {
175               if (config0->output_vtr.push_and_pop_bytes)
176                 {
177                   /* perform the tag rewrite on two packets */
178                   if (l2_vtr_process (b0, &config0->input_vtr))
179                     {
180                       /* Drop packet */
181                       next0 = L2_INVTR_NEXT_DROP;
182                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
183                     }
184                 }
185               else if (config0->output_pbb_vtr.push_and_pop_bytes)
186                 {
187                   if (l2_pbb_process (b0, &(config0->input_pbb_vtr)))
188                     {
189                       /* Drop packet */
190                       next0 = L2_INVTR_NEXT_DROP;
191                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
192                     }
193                 }
194             }
195           if (PREDICT_FALSE (config1->out_vtr_flag))
196             {
197               if (config1->output_vtr.push_and_pop_bytes)
198                 {
199                   if (l2_vtr_process (b1, &config1->input_vtr))
200                     {
201                       /* Drop packet */
202                       next1 = L2_INVTR_NEXT_DROP;
203                       b1->error = node->errors[L2_INVTR_ERROR_DROP];
204                     }
205                 }
206               else if (config1->output_pbb_vtr.push_and_pop_bytes)
207                 {
208                   if (l2_pbb_process (b1, &(config1->input_pbb_vtr)))
209                     {
210                       /* Drop packet */
211                       next1 = L2_INVTR_NEXT_DROP;
212                       b1->error = node->errors[L2_INVTR_ERROR_DROP];
213                     }
214                 }
215             }
216
217           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
218             {
219               if (b0->flags & VLIB_BUFFER_IS_TRACED)
220                 {
221                   l2_invtr_trace_t *t =
222                     vlib_add_trace (vm, node, b0, sizeof (*t));
223                   ethernet_header_t *h0 = vlib_buffer_get_current (b0);
224                   t->sw_if_index = sw_if_index0;
225                   clib_memcpy_fast (t->src, h0->src_address, 6);
226                   clib_memcpy_fast (t->dst, h0->dst_address, 6);
227                   clib_memcpy_fast (t->raw, &h0->type, sizeof (t->raw));
228                 }
229               if (b1->flags & VLIB_BUFFER_IS_TRACED)
230                 {
231                   l2_invtr_trace_t *t =
232                     vlib_add_trace (vm, node, b1, sizeof (*t));
233                   ethernet_header_t *h1 = vlib_buffer_get_current (b1);
234                   t->sw_if_index = sw_if_index0;
235                   clib_memcpy_fast (t->src, h1->src_address, 6);
236                   clib_memcpy_fast (t->dst, h1->dst_address, 6);
237                   clib_memcpy_fast (t->raw, &h1->type, sizeof (t->raw));
238                 }
239             }
240
241           /* verify speculative enqueues, maybe switch current next frame */
242           /* if next0==next1==next_index then nothing special needs to be done */
243           vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
244                                            to_next, n_left_to_next,
245                                            bi0, bi1, next0, next1);
246         }
247
248       while (n_left_from > 0 && n_left_to_next > 0)
249         {
250           u32 bi0;
251           vlib_buffer_t *b0;
252           u32 next0;
253           u32 sw_if_index0;
254
255           /* speculatively enqueue b0 to the current next frame */
256           bi0 = from[0];
257           to_next[0] = bi0;
258           from += 1;
259           to_next += 1;
260           n_left_from -= 1;
261           n_left_to_next -= 1;
262
263           b0 = vlib_get_buffer (vm, bi0);
264
265           sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
266
267           /* Determine the next node */
268           next0 = vnet_l2_feature_next (b0, msm->feat_next_node_index,
269                                         L2INPUT_FEAT_VTR);
270
271           l2_output_config_t *config0;
272           config0 = vec_elt_at_index (l2output_main.configs, sw_if_index0);
273
274           if (PREDICT_FALSE (config0->out_vtr_flag))
275             {
276               if (config0->output_vtr.push_and_pop_bytes)
277                 {
278                   /* perform the tag rewrite on one packet */
279                   if (l2_vtr_process (b0, &config0->input_vtr))
280                     {
281                       /* Drop packet */
282                       next0 = L2_INVTR_NEXT_DROP;
283                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
284                     }
285                 }
286               else if (config0->output_pbb_vtr.push_and_pop_bytes)
287                 {
288                   if (l2_pbb_process (b0, &(config0->input_pbb_vtr)))
289                     {
290                       /* Drop packet */
291                       next0 = L2_INVTR_NEXT_DROP;
292                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
293                     }
294                 }
295             }
296
297           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
298                              && (b0->flags & VLIB_BUFFER_IS_TRACED)))
299             {
300               l2_invtr_trace_t *t =
301                 vlib_add_trace (vm, node, b0, sizeof (*t));
302               ethernet_header_t *h0 = vlib_buffer_get_current (b0);
303               t->sw_if_index = sw_if_index0;
304               clib_memcpy_fast (t->src, h0->src_address, 6);
305               clib_memcpy_fast (t->dst, h0->dst_address, 6);
306               clib_memcpy_fast (t->raw, &h0->type, sizeof (t->raw));
307             }
308
309           /* verify speculative enqueue, maybe switch current next frame */
310           vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
311                                            to_next, n_left_to_next,
312                                            bi0, next0);
313         }
314
315       vlib_put_next_frame (vm, node, next_index, n_left_to_next);
316     }
317
318   return frame->n_vectors;
319 }
320
321
322 /* *INDENT-OFF* */
323 VLIB_REGISTER_NODE (l2_invtr_node,static) = {
324   .function = l2_invtr_node_fn,
325   .name = "l2-input-vtr",
326   .vector_size = sizeof (u32),
327   .format_trace = format_l2_invtr_trace,
328   .type = VLIB_NODE_TYPE_INTERNAL,
329
330   .n_errors = ARRAY_LEN(l2_invtr_error_strings),
331   .error_strings = l2_invtr_error_strings,
332
333   .n_next_nodes = L2_INVTR_N_NEXT,
334
335   /* edit / add dispositions here */
336   .next_nodes = {
337        [L2_INVTR_NEXT_DROP]  = "error-drop",
338   },
339 };
340 /* *INDENT-ON* */
341
342 VLIB_NODE_FUNCTION_MULTIARCH (l2_invtr_node, l2_invtr_node_fn)
343      clib_error_t *l2_invtr_init (vlib_main_t * vm)
344 {
345   l2_invtr_main_t *mp = &l2_invtr_main;
346
347   mp->vlib_main = vm;
348   mp->vnet_main = vnet_get_main ();
349
350   /* Initialize the feature next-node indexes */
351   feat_bitmap_init_next_nodes (vm,
352                                l2_invtr_node.index,
353                                L2INPUT_N_FEAT,
354                                l2input_get_feat_names (),
355                                mp->feat_next_node_index);
356
357   return 0;
358 }
359
360 VLIB_INIT_FUNCTION (l2_invtr_init);
361
362
363 /*
364  * fd.io coding-style-patch-verification: ON
365  *
366  * Local Variables:
367  * eval: (c-set-style "gnu")
368  * End:
369  */