Reorganize source tree to use single autotools instance
[vpp.git] / src / vnet / l2 / l2_input_vtr.c
1 /*
2  * l2_input_vtr.c : layer 2 input vlan tag rewrite processing
3  *
4  * Copyright (c) 2013 Cisco and/or its affiliates.
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at:
8  *
9  *     http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17
18 #include <vlib/vlib.h>
19 #include <vnet/vnet.h>
20 #include <vnet/ethernet/ethernet.h>
21 #include <vnet/ethernet/packet.h>
22 #include <vnet/l2/l2_input.h>
23 #include <vnet/l2/feat_bitmap.h>
24 #include <vnet/l2/l2_vtr.h>
25 #include <vnet/l2/l2_input_vtr.h>
26 #include <vnet/l2/l2_output.h>
27
28 #include <vppinfra/error.h>
29 #include <vppinfra/cache.h>
30
31
32 typedef struct
33 {
34   /* per-pkt trace data */
35   u8 src[6];
36   u8 dst[6];
37   u8 raw[12];                   /* raw data (vlans) */
38   u32 sw_if_index;
39 } l2_invtr_trace_t;
40
41 /* packet trace format function */
42 static u8 *
43 format_l2_invtr_trace (u8 * s, va_list * args)
44 {
45   CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
46   CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
47   l2_invtr_trace_t *t = va_arg (*args, l2_invtr_trace_t *);
48
49   s = format (s, "l2-input-vtr: sw_if_index %d dst %U src %U data "
50               "%02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x",
51               t->sw_if_index,
52               format_ethernet_address, t->dst,
53               format_ethernet_address, t->src,
54               t->raw[0], t->raw[1], t->raw[2], t->raw[3], t->raw[4],
55               t->raw[5], t->raw[6], t->raw[7], t->raw[8], t->raw[9],
56               t->raw[10], t->raw[11]);
57   return s;
58 }
59
60 l2_invtr_main_t l2_invtr_main;
61
62 static vlib_node_registration_t l2_invtr_node;
63
64 #define foreach_l2_invtr_error                  \
65 _(L2_INVTR,    "L2 inverter packets")           \
66 _(DROP,        "L2 input tag rewrite drops")
67
68 typedef enum
69 {
70 #define _(sym,str) L2_INVTR_ERROR_##sym,
71   foreach_l2_invtr_error
72 #undef _
73     L2_INVTR_N_ERROR,
74 } l2_invtr_error_t;
75
76 static char *l2_invtr_error_strings[] = {
77 #define _(sym,string) string,
78   foreach_l2_invtr_error
79 #undef _
80 };
81
82 typedef enum
83 {
84   L2_INVTR_NEXT_DROP,
85   L2_INVTR_N_NEXT,
86 } l2_invtr_next_t;
87
88
89 static uword
90 l2_invtr_node_fn (vlib_main_t * vm,
91                   vlib_node_runtime_t * node, vlib_frame_t * frame)
92 {
93   u32 n_left_from, *from, *to_next;
94   l2_invtr_next_t next_index;
95   l2_invtr_main_t *msm = &l2_invtr_main;
96
97   from = vlib_frame_vector_args (frame);
98   n_left_from = frame->n_vectors;       /* number of packets to process */
99   next_index = node->cached_next_index;
100
101   while (n_left_from > 0)
102     {
103       u32 n_left_to_next;
104
105       /* get space to enqueue frame to graph node "next_index" */
106       vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
107
108       while (n_left_from >= 6 && n_left_to_next >= 2)
109         {
110           u32 bi0, bi1;
111           vlib_buffer_t *b0, *b1;
112           u32 next0, next1;
113           u32 sw_if_index0, sw_if_index1;
114           u32 feature_bitmap0, feature_bitmap1;
115
116           /* Prefetch next iteration. */
117           {
118             vlib_buffer_t *p2, *p3, *p4, *p5;
119             u32 sw_if_index2, sw_if_index3;
120
121             p2 = vlib_get_buffer (vm, from[2]);
122             p3 = vlib_get_buffer (vm, from[3]);
123             p4 = vlib_get_buffer (vm, from[4]);
124             p5 = vlib_get_buffer (vm, from[5]);
125
126             /* Prefetch the buffer header and packet for the N+2 loop iteration */
127             vlib_prefetch_buffer_header (p4, LOAD);
128             vlib_prefetch_buffer_header (p5, LOAD);
129
130             CLIB_PREFETCH (p4->data, CLIB_CACHE_LINE_BYTES, STORE);
131             CLIB_PREFETCH (p5->data, CLIB_CACHE_LINE_BYTES, STORE);
132
133             /*
134              * Prefetch the input config for the N+1 loop iteration
135              * This depends on the buffer header above
136              */
137             sw_if_index2 = vnet_buffer (p2)->sw_if_index[VLIB_RX];
138             sw_if_index3 = vnet_buffer (p3)->sw_if_index[VLIB_RX];
139             CLIB_PREFETCH (vec_elt_at_index
140                            (l2output_main.configs, sw_if_index2),
141                            CLIB_CACHE_LINE_BYTES, LOAD);
142             CLIB_PREFETCH (vec_elt_at_index
143                            (l2output_main.configs, sw_if_index3),
144                            CLIB_CACHE_LINE_BYTES, LOAD);
145           }
146
147           /* speculatively enqueue b0 and b1 to the current next frame */
148           /* bi is "buffer index", b is pointer to the buffer */
149           to_next[0] = bi0 = from[0];
150           to_next[1] = bi1 = from[1];
151           from += 2;
152           to_next += 2;
153           n_left_from -= 2;
154           n_left_to_next -= 2;
155
156           b0 = vlib_get_buffer (vm, bi0);
157           b1 = vlib_get_buffer (vm, bi1);
158
159           /* RX interface handles */
160           sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
161           sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
162
163           /* process 2 packets */
164
165           /* Remove ourself from the feature bitmap */
166           feature_bitmap0 =
167             vnet_buffer (b0)->l2.feature_bitmap & ~L2INPUT_FEAT_VTR;
168           feature_bitmap1 =
169             vnet_buffer (b1)->l2.feature_bitmap & ~L2INPUT_FEAT_VTR;
170
171           /* save for next feature graph nodes */
172           vnet_buffer (b0)->l2.feature_bitmap = feature_bitmap0;
173           vnet_buffer (b1)->l2.feature_bitmap = feature_bitmap1;
174
175           /* Determine the next node */
176           next0 = feat_bitmap_get_next_node_index (msm->feat_next_node_index,
177                                                    feature_bitmap0);
178           next1 = feat_bitmap_get_next_node_index (msm->feat_next_node_index,
179                                                    feature_bitmap1);
180
181           l2_output_config_t *config0;
182           l2_output_config_t *config1;
183           config0 = vec_elt_at_index (l2output_main.configs, sw_if_index0);
184           config1 = vec_elt_at_index (l2output_main.configs, sw_if_index1);
185
186           if (PREDICT_FALSE (config0->out_vtr_flag))
187             {
188               if (config0->output_vtr.push_and_pop_bytes)
189                 {
190                   /* perform the tag rewrite on two packets */
191                   if (l2_vtr_process
192                       (b0,
193                        &(vec_elt_at_index
194                          (l2output_main.configs, sw_if_index0)->input_vtr)))
195                     {
196                       /* Drop packet */
197                       next0 = L2_INVTR_NEXT_DROP;
198                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
199                     }
200                 }
201               else if (config0->output_pbb_vtr.push_and_pop_bytes)
202                 {
203                   if (l2_pbb_process (b0, &(config0->input_pbb_vtr)))
204                     {
205                       /* Drop packet */
206                       next0 = L2_INVTR_NEXT_DROP;
207                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
208                     }
209                 }
210             }
211           if (PREDICT_FALSE (config1->out_vtr_flag))
212             {
213               if (config1->output_vtr.push_and_pop_bytes)
214                 {
215                   if (l2_vtr_process
216                       (b1,
217                        &(vec_elt_at_index
218                          (l2output_main.configs, sw_if_index1)->input_vtr)))
219                     {
220                       /* Drop packet */
221                       next1 = L2_INVTR_NEXT_DROP;
222                       b1->error = node->errors[L2_INVTR_ERROR_DROP];
223                     }
224                 }
225               else if (config1->output_pbb_vtr.push_and_pop_bytes)
226                 {
227                   if (l2_pbb_process (b1, &(config1->input_pbb_vtr)))
228                     {
229                       /* Drop packet */
230                       next1 = L2_INVTR_NEXT_DROP;
231                       b1->error = node->errors[L2_INVTR_ERROR_DROP];
232                     }
233                 }
234             }
235
236           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
237             {
238               if (b0->flags & VLIB_BUFFER_IS_TRACED)
239                 {
240                   l2_invtr_trace_t *t =
241                     vlib_add_trace (vm, node, b0, sizeof (*t));
242                   ethernet_header_t *h0 = vlib_buffer_get_current (b0);
243                   t->sw_if_index = sw_if_index0;
244                   clib_memcpy (t->src, h0->src_address, 6);
245                   clib_memcpy (t->dst, h0->dst_address, 6);
246                   clib_memcpy (t->raw, &h0->type, sizeof (t->raw));
247                 }
248               if (b1->flags & VLIB_BUFFER_IS_TRACED)
249                 {
250                   l2_invtr_trace_t *t =
251                     vlib_add_trace (vm, node, b1, sizeof (*t));
252                   ethernet_header_t *h1 = vlib_buffer_get_current (b1);
253                   t->sw_if_index = sw_if_index0;
254                   clib_memcpy (t->src, h1->src_address, 6);
255                   clib_memcpy (t->dst, h1->dst_address, 6);
256                   clib_memcpy (t->raw, &h1->type, sizeof (t->raw));
257                 }
258             }
259
260           /* verify speculative enqueues, maybe switch current next frame */
261           /* if next0==next1==next_index then nothing special needs to be done */
262           vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
263                                            to_next, n_left_to_next,
264                                            bi0, bi1, next0, next1);
265         }
266
267       while (n_left_from > 0 && n_left_to_next > 0)
268         {
269           u32 bi0;
270           vlib_buffer_t *b0;
271           u32 next0;
272           u32 sw_if_index0;
273           u32 feature_bitmap0;
274
275           /* speculatively enqueue b0 to the current next frame */
276           bi0 = from[0];
277           to_next[0] = bi0;
278           from += 1;
279           to_next += 1;
280           n_left_from -= 1;
281           n_left_to_next -= 1;
282
283           b0 = vlib_get_buffer (vm, bi0);
284
285           sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
286
287           /* process 1 packet */
288
289           /* Remove ourself from the feature bitmap */
290           feature_bitmap0 =
291             vnet_buffer (b0)->l2.feature_bitmap & ~L2INPUT_FEAT_VTR;
292
293           /* save for next feature graph nodes */
294           vnet_buffer (b0)->l2.feature_bitmap = feature_bitmap0;
295
296           /* Determine the next node */
297           next0 = feat_bitmap_get_next_node_index (msm->feat_next_node_index,
298                                                    feature_bitmap0);
299
300           l2_output_config_t *config0;
301           config0 = vec_elt_at_index (l2output_main.configs, sw_if_index0);
302
303           if (PREDICT_FALSE (config0->out_vtr_flag))
304             {
305               if (config0->output_vtr.push_and_pop_bytes)
306                 {
307                   /* perform the tag rewrite on one packet */
308                   if (l2_vtr_process
309                       (b0,
310                        &(vec_elt_at_index
311                          (l2output_main.configs, sw_if_index0)->input_vtr)))
312                     {
313                       /* Drop packet */
314                       next0 = L2_INVTR_NEXT_DROP;
315                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
316                     }
317                 }
318               else if (config0->output_pbb_vtr.push_and_pop_bytes)
319                 {
320                   if (l2_pbb_process (b0, &(config0->input_pbb_vtr)))
321                     {
322                       /* Drop packet */
323                       next0 = L2_INVTR_NEXT_DROP;
324                       b0->error = node->errors[L2_INVTR_ERROR_DROP];
325                     }
326                 }
327             }
328
329           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
330                              && (b0->flags & VLIB_BUFFER_IS_TRACED)))
331             {
332               l2_invtr_trace_t *t =
333                 vlib_add_trace (vm, node, b0, sizeof (*t));
334               ethernet_header_t *h0 = vlib_buffer_get_current (b0);
335               t->sw_if_index = sw_if_index0;
336               clib_memcpy (t->src, h0->src_address, 6);
337               clib_memcpy (t->dst, h0->dst_address, 6);
338               clib_memcpy (t->raw, &h0->type, sizeof (t->raw));
339             }
340
341           /* verify speculative enqueue, maybe switch current next frame */
342           vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
343                                            to_next, n_left_to_next,
344                                            bi0, next0);
345         }
346
347       vlib_put_next_frame (vm, node, next_index, n_left_to_next);
348     }
349
350   return frame->n_vectors;
351 }
352
353
354 /* *INDENT-OFF* */
355 VLIB_REGISTER_NODE (l2_invtr_node,static) = {
356   .function = l2_invtr_node_fn,
357   .name = "l2-input-vtr",
358   .vector_size = sizeof (u32),
359   .format_trace = format_l2_invtr_trace,
360   .type = VLIB_NODE_TYPE_INTERNAL,
361
362   .n_errors = ARRAY_LEN(l2_invtr_error_strings),
363   .error_strings = l2_invtr_error_strings,
364
365   .n_next_nodes = L2_INVTR_N_NEXT,
366
367   /* edit / add dispositions here */
368   .next_nodes = {
369        [L2_INVTR_NEXT_DROP]  = "error-drop",
370   },
371 };
372 /* *INDENT-ON* */
373
374 VLIB_NODE_FUNCTION_MULTIARCH (l2_invtr_node, l2_invtr_node_fn)
375      clib_error_t *l2_invtr_init (vlib_main_t * vm)
376 {
377   l2_invtr_main_t *mp = &l2_invtr_main;
378
379   mp->vlib_main = vm;
380   mp->vnet_main = vnet_get_main ();
381
382   /* Initialize the feature next-node indexes */
383   feat_bitmap_init_next_nodes (vm,
384                                l2_invtr_node.index,
385                                L2INPUT_N_FEAT,
386                                l2input_get_feat_names (),
387                                mp->feat_next_node_index);
388
389   return 0;
390 }
391
392 VLIB_INIT_FUNCTION (l2_invtr_init);
393
394
395 /*
396  * fd.io coding-style-patch-verification: ON
397  *
398  * Local Variables:
399  * eval: (c-set-style "gnu")
400  * End:
401  */