udp: fix csum computation when offload disabled
[vpp.git] / src / vnet / l2 / l2_in_out_feat_arc.c
1 /*
2  * l2_in_out_feat_arc.c : layer 2 input/output acl processing
3  *
4  * Copyright (c) 2013,2018 Cisco and/or its affiliates.
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at:
8  *
9  *     http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17
18 #include <vlib/vlib.h>
19 #include <vnet/ethernet/ethernet.h>
20 #include <vnet/ip/ip_packet.h>
21 #include <vnet/l2/l2_input.h>
22 #include <vnet/l2/l2_output.h>
23 #include <vnet/l2/l2_in_out_feat_arc.h>
24
25 #include <vppinfra/error.h>
26 #include <vppinfra/hash.h>
27 #include <vppinfra/cache.h>
28
29
30 typedef struct
31 {
32
33   /* Next nodes for each feature */
34   u32 feat_next_node_index[IN_OUT_FEAT_ARC_N_TABLE_GROUPS][32];
35   u8 ip4_feat_arc_index[IN_OUT_FEAT_ARC_N_TABLE_GROUPS];
36   u8 ip6_feat_arc_index[IN_OUT_FEAT_ARC_N_TABLE_GROUPS];
37   u8 nonip_feat_arc_index[IN_OUT_FEAT_ARC_N_TABLE_GROUPS];
38   u32 next_slot[IN_OUT_FEAT_ARC_N_TABLE_GROUPS];
39 } l2_in_out_feat_arc_main_t __attribute__ ((aligned (CLIB_CACHE_LINE_BYTES)));
40
41 typedef struct
42 {
43   u32 sw_if_index;
44   u32 next_index;
45   u32 feature_bitmap;
46   u16 ethertype;
47   u8 arc_head;
48 } l2_in_out_feat_arc_trace_t;
49
50 /* packet trace format function */
51 static u8 *
52 format_l2_in_out_feat_arc_trace (u8 * s, u32 is_output, va_list * args)
53 {
54   CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
55   CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
56   l2_in_out_feat_arc_trace_t *t =
57     va_arg (*args, l2_in_out_feat_arc_trace_t *);
58
59   s =
60     format (s,
61             "%s: head %d feature_bitmap %x ethertype %x sw_if_index %d, next_index %d",
62             is_output ? "OUT-FEAT-ARC" : "IN-FEAT-ARC", t->arc_head,
63             t->feature_bitmap, t->ethertype, t->sw_if_index, t->next_index);
64   return s;
65 }
66
67 static u8 *
68 format_l2_in_feat_arc_trace (u8 * s, va_list * args)
69 {
70   return format_l2_in_out_feat_arc_trace (s,
71                                           IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP,
72                                           args);
73 }
74
75 static u8 *
76 format_l2_out_feat_arc_trace (u8 * s, va_list * args)
77 {
78   return format_l2_in_out_feat_arc_trace (s,
79                                           IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP,
80                                           args);
81 }
82
83
84 #define foreach_l2_in_feat_arc_error                   \
85 _(DEFAULT, "in default")                         \
86
87
88 #define foreach_l2_out_feat_arc_error                   \
89 _(DEFAULT, "out default")                         \
90
91
92 typedef enum
93 {
94 #define _(sym,str) L2_IN_FEAT_ARC_ERROR_##sym,
95   foreach_l2_in_feat_arc_error
96 #undef _
97     L2_IN_FEAT_ARC_N_ERROR,
98 } l2_in_feat_arc_error_t;
99
100 static char *l2_in_feat_arc_error_strings[] = {
101 #define _(sym,string) string,
102   foreach_l2_in_feat_arc_error
103 #undef _
104 };
105
106 typedef enum
107 {
108 #define _(sym,str) L2_OUT_FEAT_ARC_ERROR_##sym,
109   foreach_l2_out_feat_arc_error
110 #undef _
111     L2_OUT_FEAT_ARC_N_ERROR,
112 } l2_out_feat_arc_error_t;
113
114 static char *l2_out_feat_arc_error_strings[] = {
115 #define _(sym,string) string,
116   foreach_l2_out_feat_arc_error
117 #undef _
118 };
119
120 extern l2_in_out_feat_arc_main_t l2_in_out_feat_arc_main;
121
122 #ifndef CLIB_MARCH_VARIANT
123 l2_in_out_feat_arc_main_t l2_in_out_feat_arc_main;
124 #endif /* CLIB_MARCH_VARIANT */
125
126 #define get_u16(addr) ( *((u16 *)(addr)) )
127 #define L2_FEAT_ARC_VEC_SIZE 2
128
129 static_always_inline void
130 buffer_prefetch_xN (int vector_sz, vlib_buffer_t ** b)
131 {
132   int ii;
133   for (ii = 0; ii < vector_sz; ii++)
134     clib_prefetch_store (b[ii]);
135 }
136
137 static_always_inline void
138 get_sw_if_index_xN (int vector_sz, int is_output, vlib_buffer_t ** b,
139                     u32 * out_sw_if_index)
140 {
141   int ii;
142   for (ii = 0; ii < vector_sz; ii++)
143     if (is_output)
144       out_sw_if_index[ii] = vnet_buffer (b[ii])->sw_if_index[VLIB_TX];
145     else
146       out_sw_if_index[ii] = vnet_buffer (b[ii])->sw_if_index[VLIB_RX];
147 }
148
149 static_always_inline void
150 get_ethertype_xN (int vector_sz, int is_output, vlib_buffer_t ** b,
151                   u16 * out_ethertype)
152 {
153   int ii;
154   for (ii = 0; ii < vector_sz; ii++)
155     {
156       ethernet_header_t *h0 = vlib_buffer_get_current (b[ii]);
157       u8 *l3h0 = (u8 *) h0 + vnet_buffer (b[ii])->l2.l2_len;
158       out_ethertype[ii] = clib_net_to_host_u16 (get_u16 (l3h0 - 2));
159     }
160 }
161
162
163 static_always_inline void
164 set_next_in_arc_head_xN (int vector_sz, int is_output, u32 * next_nodes,
165                          vlib_buffer_t ** b, u32 * sw_if_index,
166                          u16 * ethertype, u8 ip4_arc, u8 ip6_arc,
167                          u8 nonip_arc, u16 * out_next)
168 {
169   int ii;
170   for (ii = 0; ii < vector_sz; ii++)
171     {
172       u32 next_index = 0;
173       u8 feature_arc;
174       switch (ethertype[ii])
175         {
176         case ETHERNET_TYPE_IP4:
177           feature_arc = ip4_arc;
178           break;
179         case ETHERNET_TYPE_IP6:
180           feature_arc = ip6_arc;
181           break;
182         default:
183           feature_arc = nonip_arc;
184         }
185       if (PREDICT_TRUE (vnet_have_features (feature_arc, sw_if_index[ii])))
186         vnet_feature_arc_start (feature_arc,
187                                 sw_if_index[ii], &next_index, b[ii]);
188       else
189         next_index =
190           vnet_l2_feature_next (b[ii], next_nodes,
191                                 is_output ? L2OUTPUT_FEAT_OUTPUT_FEAT_ARC :
192                                 L2INPUT_FEAT_INPUT_FEAT_ARC);
193
194       out_next[ii] = next_index;
195     }
196 }
197
198 static_always_inline void
199 set_next_in_arc_tail_xN (int vector_sz, int is_output, u32 * next_nodes,
200                          vlib_buffer_t ** b, u16 * out_next)
201 {
202   int ii;
203   for (ii = 0; ii < vector_sz; ii++)
204     {
205       out_next[ii] =
206         vnet_l2_feature_next (b[ii], next_nodes,
207                               is_output ? L2OUTPUT_FEAT_OUTPUT_FEAT_ARC :
208                               L2INPUT_FEAT_INPUT_FEAT_ARC);
209     }
210
211 }
212
213
214 static_always_inline void
215 maybe_trace_xN (int vector_sz, int arc_head, vlib_main_t * vm,
216                 vlib_node_runtime_t * node, vlib_buffer_t ** b,
217                 u32 * sw_if_index, u16 * ethertype, u16 * next)
218 {
219   int ii;
220   for (ii = 0; ii < vector_sz; ii++)
221     if (PREDICT_FALSE (b[ii]->flags & VLIB_BUFFER_IS_TRACED))
222       {
223         l2_in_out_feat_arc_trace_t *t =
224           vlib_add_trace (vm, node, b[ii], sizeof (*t));
225         t->arc_head = arc_head;
226         t->sw_if_index = arc_head ? sw_if_index[ii] : ~0;
227         t->feature_bitmap = vnet_buffer (b[ii])->l2.feature_bitmap;
228         t->ethertype = arc_head ? ethertype[ii] : 0;
229         t->next_index = next[ii];
230       }
231 }
232
233 always_inline uword
234 l2_in_out_feat_arc_node_fn (vlib_main_t * vm,
235                             vlib_node_runtime_t * node, vlib_frame_t * frame,
236                             int is_output, vlib_node_registration_t * fa_node,
237                             int arc_head, int do_trace)
238 {
239   u32 n_left, *from;
240   u16 nexts[VLIB_FRAME_SIZE], *next;
241   u16 ethertypes[VLIB_FRAME_SIZE], *ethertype;
242   vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
243   u32 sw_if_indices[VLIB_FRAME_SIZE], *sw_if_index;
244   l2_in_out_feat_arc_main_t *fam = &l2_in_out_feat_arc_main;
245
246   u8 ip4_arc_index = fam->ip4_feat_arc_index[is_output];
247   u8 ip6_arc_index = fam->ip6_feat_arc_index[is_output];
248   u8 nonip_arc_index = fam->nonip_feat_arc_index[is_output];
249   u32 *next_node_indices = fam->feat_next_node_index[is_output];
250
251   from = vlib_frame_vector_args (frame);
252   vlib_get_buffers (vm, from, bufs, frame->n_vectors);
253   /* set the initial values for the current buffer the next pointers */
254   b = bufs;
255   next = nexts;
256   ethertype = ethertypes;
257   sw_if_index = sw_if_indices;
258   n_left = frame->n_vectors;
259
260   CLIB_PREFETCH (next_node_indices,
261                  sizeof (fam->feat_next_node_index[is_output]), LOAD);
262
263   while (n_left > 3 * L2_FEAT_ARC_VEC_SIZE)
264     {
265       const int vec_sz = L2_FEAT_ARC_VEC_SIZE;
266       /* prefetch next N buffers */
267       buffer_prefetch_xN (vec_sz, b + 2 * vec_sz);
268
269       if (arc_head)
270         {
271           get_sw_if_index_xN (vec_sz, is_output, b, sw_if_index);
272           get_ethertype_xN (vec_sz, is_output, b, ethertype);
273           set_next_in_arc_head_xN (vec_sz, is_output, next_node_indices, b,
274                                    sw_if_index, ethertype, ip4_arc_index,
275                                    ip6_arc_index, nonip_arc_index, next);
276         }
277       else
278         {
279           set_next_in_arc_tail_xN (vec_sz, is_output, next_node_indices, b,
280                                    next);
281         }
282       if (do_trace)
283         maybe_trace_xN (vec_sz, arc_head, vm, node, b, sw_if_index, ethertype,
284                         next);
285
286       next += vec_sz;
287       b += vec_sz;
288       sw_if_index += vec_sz;
289       ethertype += vec_sz;
290
291       n_left -= vec_sz;
292     }
293
294   while (n_left > 0)
295     {
296       const int vec_sz = 1;
297
298       if (arc_head)
299         {
300           get_sw_if_index_xN (vec_sz, is_output, b, sw_if_index);
301           get_ethertype_xN (vec_sz, is_output, b, ethertype);
302           set_next_in_arc_head_xN (vec_sz, is_output, next_node_indices, b,
303                                    sw_if_index, ethertype, ip4_arc_index,
304                                    ip6_arc_index, nonip_arc_index, next);
305         }
306       else
307         {
308           set_next_in_arc_tail_xN (vec_sz, is_output, next_node_indices, b,
309                                    next);
310         }
311       if (do_trace)
312         maybe_trace_xN (vec_sz, arc_head, vm, node, b, sw_if_index, ethertype,
313                         next);
314
315       next += vec_sz;
316       b += vec_sz;
317       sw_if_index += vec_sz;
318       ethertype += vec_sz;
319
320       n_left -= vec_sz;
321     }
322
323   vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
324
325   return frame->n_vectors;
326 }
327
328 VLIB_NODE_FN (l2_in_feat_arc_node) (vlib_main_t * vm,
329                                     vlib_node_runtime_t * node,
330                                     vlib_frame_t * frame)
331 {
332   if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
333     return l2_in_out_feat_arc_node_fn (vm, node, frame,
334                                        IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP,
335                                        &l2_in_feat_arc_node, 1, 1);
336   else
337     return l2_in_out_feat_arc_node_fn (vm, node, frame,
338                                        IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP,
339                                        &l2_in_feat_arc_node, 1, 0);
340 }
341
342 VLIB_NODE_FN (l2_out_feat_arc_node) (vlib_main_t * vm,
343                                      vlib_node_runtime_t * node,
344                                      vlib_frame_t * frame)
345 {
346   if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
347     return l2_in_out_feat_arc_node_fn (vm, node, frame,
348                                        IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP,
349                                        &l2_out_feat_arc_node, 1, 1);
350   else
351     return l2_in_out_feat_arc_node_fn (vm, node, frame,
352                                        IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP,
353                                        &l2_out_feat_arc_node, 1, 0);
354 }
355
356 VLIB_NODE_FN (l2_in_feat_arc_end_node) (vlib_main_t * vm,
357                                         vlib_node_runtime_t * node,
358                                         vlib_frame_t * frame)
359 {
360   if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
361     return l2_in_out_feat_arc_node_fn (vm, node, frame,
362                                        IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP,
363                                        &l2_in_feat_arc_end_node, 0, 1);
364   else
365     return l2_in_out_feat_arc_node_fn (vm, node, frame,
366                                        IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP,
367                                        &l2_in_feat_arc_end_node, 0, 0);
368 }
369
370 VLIB_NODE_FN (l2_out_feat_arc_end_node) (vlib_main_t * vm,
371                                          vlib_node_runtime_t * node,
372                                          vlib_frame_t * frame)
373 {
374   if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
375     return l2_in_out_feat_arc_node_fn (vm, node, frame,
376                                        IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP,
377                                        &l2_out_feat_arc_end_node, 0, 1);
378   else
379     return l2_in_out_feat_arc_node_fn (vm, node, frame,
380                                        IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP,
381                                        &l2_out_feat_arc_end_node, 0, 0);
382 }
383
384
385 #ifndef CLIB_MARCH_VARIANT
386 void
387 vnet_l2_in_out_feat_arc_enable_disable (u32 sw_if_index, int is_output,
388                                         int enable_disable)
389 {
390   if (is_output)
391     l2output_intf_bitmap_enable (sw_if_index, L2OUTPUT_FEAT_OUTPUT_FEAT_ARC,
392                                  (u32) enable_disable);
393   else
394     l2input_intf_bitmap_enable (sw_if_index, L2INPUT_FEAT_INPUT_FEAT_ARC,
395                                 (u32) enable_disable);
396 }
397 #endif /* CLIB_MARCH_VARIANT */
398
399 VNET_FEATURE_ARC_INIT (l2_in_ip4_arc, static) =
400 {
401   .arc_name  = "l2-input-ip4",
402   .start_nodes = VNET_FEATURES ("l2-input-feat-arc"),
403   .arc_index_ptr = &l2_in_out_feat_arc_main.ip4_feat_arc_index[IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP],
404 };
405
406 VNET_FEATURE_ARC_INIT (l2_out_ip4_arc, static) =
407 {
408   .arc_name  = "l2-output-ip4",
409   .start_nodes = VNET_FEATURES ("l2-output-feat-arc"),
410   .arc_index_ptr = &l2_in_out_feat_arc_main.ip4_feat_arc_index[IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP],
411 };
412
413 VNET_FEATURE_ARC_INIT (l2_out_ip6_arc, static) =
414 {
415   .arc_name  = "l2-input-ip6",
416   .start_nodes = VNET_FEATURES ("l2-input-feat-arc"),
417   .arc_index_ptr = &l2_in_out_feat_arc_main.ip6_feat_arc_index[IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP],
418 };
419 VNET_FEATURE_ARC_INIT (l2_in_ip6_arc, static) =
420 {
421   .arc_name  = "l2-output-ip6",
422   .start_nodes = VNET_FEATURES ("l2-output-feat-arc"),
423   .arc_index_ptr = &l2_in_out_feat_arc_main.ip6_feat_arc_index[IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP],
424 };
425
426 VNET_FEATURE_ARC_INIT (l2_out_nonip_arc, static) =
427 {
428   .arc_name  = "l2-input-nonip",
429   .start_nodes = VNET_FEATURES ("l2-input-feat-arc"),
430   .arc_index_ptr = &l2_in_out_feat_arc_main.nonip_feat_arc_index[IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP],
431 };
432 VNET_FEATURE_ARC_INIT (l2_in_nonip_arc, static) =
433 {
434   .arc_name  = "l2-output-nonip",
435   .start_nodes = VNET_FEATURES ("l2-output-feat-arc"),
436   .arc_index_ptr = &l2_in_out_feat_arc_main.nonip_feat_arc_index[IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP],
437 };
438
439
440
441
442 VLIB_REGISTER_NODE (l2_in_feat_arc_node) = {
443   .name = "l2-input-feat-arc",
444   .vector_size = sizeof (u32),
445   .format_trace = format_l2_in_feat_arc_trace,
446   .type = VLIB_NODE_TYPE_INTERNAL,
447
448   .n_errors = ARRAY_LEN(l2_in_feat_arc_error_strings),
449   .error_strings = l2_in_feat_arc_error_strings,
450
451 };
452
453 VLIB_REGISTER_NODE (l2_out_feat_arc_node) = {
454   .name = "l2-output-feat-arc",
455   .vector_size = sizeof (u32),
456   .format_trace = format_l2_out_feat_arc_trace,
457   .type = VLIB_NODE_TYPE_INTERNAL,
458
459   .n_errors = ARRAY_LEN(l2_out_feat_arc_error_strings),
460   .error_strings = l2_out_feat_arc_error_strings,
461
462 };
463
464 VLIB_REGISTER_NODE (l2_in_feat_arc_end_node) = {
465   .name = "l2-input-feat-arc-end",
466   .vector_size = sizeof (u32),
467   .format_trace = format_l2_in_feat_arc_trace,
468   .sibling_of = "l2-input-feat-arc",
469 };
470
471 VLIB_REGISTER_NODE (l2_out_feat_arc_end_node) = {
472   .name = "l2-output-feat-arc-end",
473   .vector_size = sizeof (u32),
474   .format_trace = format_l2_out_feat_arc_trace,
475   .sibling_of = "l2-output-feat-arc",
476 };
477
478 VNET_FEATURE_INIT (l2_in_ip4_arc_end, static) =
479 {
480   .arc_name = "l2-input-ip4",
481   .node_name = "l2-input-feat-arc-end",
482   .runs_before = 0,     /* not before any other features */
483 };
484
485 VNET_FEATURE_INIT (l2_out_ip4_arc_end, static) =
486 {
487   .arc_name = "l2-output-ip4",
488   .node_name = "l2-output-feat-arc-end",
489   .runs_before = 0,     /* not before any other features */
490 };
491
492 VNET_FEATURE_INIT (l2_in_ip6_arc_end, static) =
493 {
494   .arc_name = "l2-input-ip6",
495   .node_name = "l2-input-feat-arc-end",
496   .runs_before = 0,     /* not before any other features */
497 };
498
499
500 VNET_FEATURE_INIT (l2_out_ip6_arc_end, static) =
501 {
502   .arc_name = "l2-output-ip6",
503   .node_name = "l2-output-feat-arc-end",
504   .runs_before = 0,     /* not before any other features */
505 };
506
507 VNET_FEATURE_INIT (l2_in_nonip_arc_end, static) =
508 {
509   .arc_name = "l2-input-nonip",
510   .node_name = "l2-input-feat-arc-end",
511   .runs_before = 0,     /* not before any other features */
512 };
513
514
515 VNET_FEATURE_INIT (l2_out_nonip_arc_end, static) =
516 {
517   .arc_name = "l2-output-nonip",
518   .node_name = "l2-output-feat-arc-end",
519   .runs_before = 0,     /* not before any other features */
520 };
521
522
523 #ifndef CLIB_MARCH_VARIANT
524 clib_error_t *
525 l2_in_out_feat_arc_init (vlib_main_t * vm)
526 {
527   l2_in_out_feat_arc_main_t *mp = &l2_in_out_feat_arc_main;
528
529   /* Initialize the feature next-node indexes */
530   feat_bitmap_init_next_nodes (vm,
531                                l2_in_feat_arc_end_node.index,
532                                L2INPUT_N_FEAT,
533                                l2input_get_feat_names (),
534                                mp->feat_next_node_index
535                                [IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP]);
536   feat_bitmap_init_next_nodes (vm, l2_out_feat_arc_end_node.index,
537                                L2OUTPUT_N_FEAT, l2output_get_feat_names (),
538                                mp->feat_next_node_index
539                                [IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP]);
540   return 0;
541 }
542
543
544 static int
545 l2_has_features (u32 sw_if_index, int is_output)
546 {
547   int has_features = 0;
548   l2_in_out_feat_arc_main_t *mp = &l2_in_out_feat_arc_main;
549   has_features +=
550     vnet_have_features (mp->ip4_feat_arc_index[is_output], sw_if_index);
551   has_features +=
552     vnet_have_features (mp->ip6_feat_arc_index[is_output], sw_if_index);
553   has_features +=
554     vnet_have_features (mp->nonip_feat_arc_index[is_output], sw_if_index);
555   return has_features > 0;
556 }
557
558 static int
559 l2_is_output_arc (u8 arc_index)
560 {
561   l2_in_out_feat_arc_main_t *mp = &l2_in_out_feat_arc_main;
562   int idx = IN_OUT_FEAT_ARC_OUTPUT_TABLE_GROUP;
563   return (mp->ip4_feat_arc_index[idx] == arc_index
564           || mp->ip6_feat_arc_index[idx] == arc_index
565           || mp->nonip_feat_arc_index[idx] == arc_index);
566 }
567
568 static int
569 l2_is_input_arc (u8 arc_index)
570 {
571   l2_in_out_feat_arc_main_t *mp = &l2_in_out_feat_arc_main;
572   int idx = IN_OUT_FEAT_ARC_INPUT_TABLE_GROUP;
573   return (mp->ip4_feat_arc_index[idx] == arc_index
574           || mp->ip6_feat_arc_index[idx] == arc_index
575           || mp->nonip_feat_arc_index[idx] == arc_index);
576 }
577
578 int
579 vnet_l2_feature_enable_disable (const char *arc_name, const char *node_name,
580                                 u32 sw_if_index, int enable_disable,
581                                 void *feature_config,
582                                 u32 n_feature_config_bytes)
583 {
584   u8 arc_index = vnet_get_feature_arc_index (arc_name);
585   if (arc_index == (u8) ~ 0)
586     return VNET_API_ERROR_INVALID_VALUE;
587
588   /* check the state before we tried to enable/disable */
589   int had_features = vnet_have_features (arc_index, sw_if_index);
590
591   int ret = vnet_feature_enable_disable (arc_name, node_name, sw_if_index,
592                                          enable_disable, feature_config,
593                                          n_feature_config_bytes);
594   if (ret)
595     return ret;
596
597   int has_features = vnet_have_features (arc_index, sw_if_index);
598
599   if (had_features != has_features)
600     {
601       if (l2_is_output_arc (arc_index))
602         {
603           vnet_l2_in_out_feat_arc_enable_disable (sw_if_index, 1,
604                                                   l2_has_features
605                                                   (sw_if_index, 1));
606         }
607       if (l2_is_input_arc (arc_index))
608         {
609           vnet_l2_in_out_feat_arc_enable_disable (sw_if_index, 0,
610                                                   l2_has_features
611                                                   (sw_if_index, 0));
612         }
613     }
614   return 0;
615 }
616
617
618 VLIB_INIT_FUNCTION (l2_in_out_feat_arc_init);
619 #endif /* CLIB_MARCH_VARIANT */
620
621 /*
622  * fd.io coding-style-patch-verification: ON
623  *
624  * Local Variables:
625  * eval: (c-set-style "gnu")
626  * End:
627  */