virtio: enable the interrupt support for uio_pci_generic
[vpp.git] / src / plugins / nat / nat44_classify.c
1 /*
2  * Copyright (c) 2018 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /**
16  * @file
17  * @brief Classify for one armed NAT44 (in+out interface)
18  */
19
20 #include <vlib/vlib.h>
21 #include <vnet/vnet.h>
22 #include <vnet/fib/ip4_fib.h>
23 #include <nat/nat.h>
24 #include <nat/nat_inlines.h>
25 #include <nat/nat44/ed_inlines.h>
26
27 #define foreach_nat44_classify_error                      \
28 _(NEXT_IN2OUT, "next in2out")                             \
29 _(NEXT_OUT2IN, "next out2in")                             \
30 _(FRAG_CACHED, "fragment cached")
31
32 typedef enum
33 {
34 #define _(sym,str) NAT44_CLASSIFY_ERROR_##sym,
35   foreach_nat44_classify_error
36 #undef _
37     NAT44_CLASSIFY_N_ERROR,
38 } nat44_classify_error_t;
39
40 static char *nat44_classify_error_strings[] = {
41 #define _(sym,string) string,
42   foreach_nat44_classify_error
43 #undef _
44 };
45
46 typedef enum
47 {
48   NAT44_CLASSIFY_NEXT_IN2OUT,
49   NAT44_CLASSIFY_NEXT_OUT2IN,
50   NAT44_CLASSIFY_NEXT_DROP,
51   NAT44_CLASSIFY_N_NEXT,
52 } nat44_classify_next_t;
53
54 typedef struct
55 {
56   u8 next_in2out;
57   u8 cached;
58 } nat44_classify_trace_t;
59
60 static u8 *
61 format_nat44_classify_trace (u8 * s, va_list * args)
62 {
63   CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
64   CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
65   nat44_classify_trace_t *t = va_arg (*args, nat44_classify_trace_t *);
66   char *next;
67
68   if (t->cached)
69     s = format (s, "nat44-classify: fragment cached");
70   else
71     {
72       next = t->next_in2out ? "nat44-in2out" : "nat44-out2in";
73       s = format (s, "nat44-classify: next %s", next);
74     }
75
76   return s;
77 }
78
79 static inline uword
80 nat44_classify_node_fn_inline (vlib_main_t * vm,
81                                vlib_node_runtime_t * node,
82                                vlib_frame_t * frame)
83 {
84   u32 n_left_from, *from, *to_next;
85   nat44_classify_next_t next_index;
86   snat_main_t *sm = &snat_main;
87   snat_static_mapping_t *m;
88   u32 next_in2out = 0, next_out2in = 0;
89
90   from = vlib_frame_vector_args (frame);
91   n_left_from = frame->n_vectors;
92   next_index = node->cached_next_index;
93
94   while (n_left_from > 0)
95     {
96       u32 n_left_to_next;
97
98       vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
99
100       while (n_left_from > 0 && n_left_to_next > 0)
101         {
102           u32 bi0;
103           vlib_buffer_t *b0;
104           u32 next0 = NAT44_CLASSIFY_NEXT_IN2OUT;
105           ip4_header_t *ip0;
106           snat_address_t *ap;
107           clib_bihash_kv_8_8_t kv0, value0;
108
109           /* speculatively enqueue b0 to the current next frame */
110           bi0 = from[0];
111           to_next[0] = bi0;
112           from += 1;
113           to_next += 1;
114           n_left_from -= 1;
115           n_left_to_next -= 1;
116
117           b0 = vlib_get_buffer (vm, bi0);
118           ip0 = vlib_buffer_get_current (b0);
119
120           /* *INDENT-OFF* */
121           vec_foreach (ap, sm->addresses)
122             {
123               if (ip0->dst_address.as_u32 == ap->addr.as_u32)
124                 {
125                   next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
126                   goto enqueue0;
127                 }
128             }
129           /* *INDENT-ON* */
130
131           if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
132             {
133               init_nat_k (&kv0, ip0->dst_address, 0, 0, 0);
134               /* try to classify the fragment based on IP header alone */
135               if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
136                                            &kv0, &value0))
137                 {
138                   m = pool_elt_at_index (sm->static_mappings, value0.value);
139                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
140                     next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
141                   goto enqueue0;
142                 }
143               init_nat_k (&kv0, ip0->dst_address,
144                           vnet_buffer (b0)->ip.reass.l4_dst_port, 0,
145                           ip_proto_to_nat_proto (ip0->protocol));
146               if (!clib_bihash_search_8_8
147                   (&sm->static_mapping_by_external, &kv0, &value0))
148                 {
149                   m = pool_elt_at_index (sm->static_mappings, value0.value);
150                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
151                     next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
152                 }
153             }
154
155         enqueue0:
156           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
157                              && (b0->flags & VLIB_BUFFER_IS_TRACED)))
158             {
159               nat44_classify_trace_t *t =
160                 vlib_add_trace (vm, node, b0, sizeof (*t));
161               t->cached = 0;
162               t->next_in2out = next0 == NAT44_CLASSIFY_NEXT_IN2OUT ? 1 : 0;
163             }
164
165           next_in2out += next0 == NAT44_CLASSIFY_NEXT_IN2OUT;
166           next_out2in += next0 == NAT44_CLASSIFY_NEXT_OUT2IN;
167
168           /* verify speculative enqueue, maybe switch current next frame */
169           vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
170                                            to_next, n_left_to_next,
171                                            bi0, next0);
172         }
173
174       vlib_put_next_frame (vm, node, next_index, n_left_to_next);
175     }
176
177   vlib_node_increment_counter (vm, node->node_index,
178                                NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
179   vlib_node_increment_counter (vm, node->node_index,
180                                NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
181   return frame->n_vectors;
182 }
183
184 static inline uword
185 nat44_handoff_classify_node_fn_inline (vlib_main_t * vm,
186                                        vlib_node_runtime_t * node,
187                                        vlib_frame_t * frame)
188 {
189   u32 n_left_from, *from, *to_next;
190   nat44_classify_next_t next_index;
191   snat_main_t *sm = &snat_main;
192   snat_static_mapping_t *m;
193   u32 next_in2out = 0, next_out2in = 0;
194
195   from = vlib_frame_vector_args (frame);
196   n_left_from = frame->n_vectors;
197   next_index = node->cached_next_index;
198
199   while (n_left_from > 0)
200     {
201       u32 n_left_to_next;
202
203       vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
204
205       while (n_left_from > 0 && n_left_to_next > 0)
206         {
207           u32 bi0;
208           vlib_buffer_t *b0;
209           u32 next0 = NAT_NEXT_IN2OUT_CLASSIFY;
210           ip4_header_t *ip0;
211           snat_address_t *ap;
212           clib_bihash_kv_8_8_t kv0, value0;
213
214           /* speculatively enqueue b0 to the current next frame */
215           bi0 = from[0];
216           to_next[0] = bi0;
217           from += 1;
218           to_next += 1;
219           n_left_from -= 1;
220           n_left_to_next -= 1;
221
222           b0 = vlib_get_buffer (vm, bi0);
223           ip0 = vlib_buffer_get_current (b0);
224
225           /* *INDENT-OFF* */
226           vec_foreach (ap, sm->addresses)
227             {
228               if (ip0->dst_address.as_u32 == ap->addr.as_u32)
229                 {
230                   next0 = NAT_NEXT_OUT2IN_CLASSIFY;
231                   goto enqueue0;
232                 }
233             }
234           /* *INDENT-ON* */
235
236           if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
237             {
238               init_nat_k (&kv0, ip0->dst_address, 0, 0, 0);
239               /* try to classify the fragment based on IP header alone */
240               if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
241                                            &kv0, &value0))
242                 {
243                   m = pool_elt_at_index (sm->static_mappings, value0.value);
244                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
245                     next0 = NAT_NEXT_OUT2IN_CLASSIFY;
246                   goto enqueue0;
247                 }
248               init_nat_k (&kv0, ip0->dst_address,
249                           vnet_buffer (b0)->ip.reass.l4_dst_port, 0,
250                           ip_proto_to_nat_proto (ip0->protocol));
251               if (!clib_bihash_search_8_8
252                   (&sm->static_mapping_by_external, &kv0, &value0))
253                 {
254                   m = pool_elt_at_index (sm->static_mappings, value0.value);
255                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
256                     next0 = NAT_NEXT_OUT2IN_CLASSIFY;
257                 }
258             }
259
260         enqueue0:
261           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
262                              && (b0->flags & VLIB_BUFFER_IS_TRACED)))
263             {
264               nat44_classify_trace_t *t =
265                 vlib_add_trace (vm, node, b0, sizeof (*t));
266               t->cached = 0;
267               t->next_in2out = next0 == NAT_NEXT_IN2OUT_CLASSIFY ? 1 : 0;
268             }
269
270           next_in2out += next0 == NAT_NEXT_IN2OUT_CLASSIFY;
271           next_out2in += next0 == NAT_NEXT_OUT2IN_CLASSIFY;
272
273           /* verify speculative enqueue, maybe switch current next frame */
274           vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
275                                            to_next, n_left_to_next,
276                                            bi0, next0);
277         }
278
279       vlib_put_next_frame (vm, node, next_index, n_left_to_next);
280     }
281
282   vlib_node_increment_counter (vm, node->node_index,
283                                NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
284   vlib_node_increment_counter (vm, node->node_index,
285                                NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
286   return frame->n_vectors;
287 }
288
289 static inline uword
290 nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
291                                   vlib_node_runtime_t * node,
292                                   vlib_frame_t * frame)
293 {
294   u32 n_left_from, *from, *to_next;
295   nat44_classify_next_t next_index;
296   snat_main_t *sm = &snat_main;
297   snat_static_mapping_t *m;
298   u32 next_in2out = 0, next_out2in = 0;
299
300   from = vlib_frame_vector_args (frame);
301   n_left_from = frame->n_vectors;
302   next_index = node->cached_next_index;
303
304   while (n_left_from > 0)
305     {
306       u32 n_left_to_next;
307
308       vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
309
310       while (n_left_from > 0 && n_left_to_next > 0)
311         {
312           u32 bi0;
313           vlib_buffer_t *b0;
314           u32 next0 = NAT_NEXT_IN2OUT_ED_FAST_PATH;
315           u32 sw_if_index0, rx_fib_index0;
316           ip4_header_t *ip0;
317           snat_address_t *ap;
318           clib_bihash_kv_8_8_t kv0, value0;
319           clib_bihash_kv_16_8_t ed_kv0, ed_value0;
320
321           /* speculatively enqueue b0 to the current next frame */
322           bi0 = from[0];
323           to_next[0] = bi0;
324           from += 1;
325           to_next += 1;
326           n_left_from -= 1;
327           n_left_to_next -= 1;
328
329           b0 = vlib_get_buffer (vm, bi0);
330           ip0 = vlib_buffer_get_current (b0);
331
332           u32 arc_next;
333           vnet_feature_next (&arc_next, b0);
334           vnet_buffer2 (b0)->nat.arc_next = arc_next;
335
336           if (ip0->protocol != IP_PROTOCOL_ICMP)
337             {
338               /* process leading fragment/whole packet (with L4 header) */
339               sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
340               rx_fib_index0 =
341                 fib_table_get_index_for_sw_if_index (FIB_PROTOCOL_IP4,
342                                                      sw_if_index0);
343               init_ed_k (&ed_kv0, ip0->src_address,
344                          vnet_buffer (b0)->ip.reass.l4_src_port,
345                          ip0->dst_address,
346                          vnet_buffer (b0)->ip.reass.l4_dst_port,
347                          rx_fib_index0, ip0->protocol);
348               /* process whole packet */
349               if (!clib_bihash_search_16_8 (&sm->flow_hash, &ed_kv0,
350                                             &ed_value0))
351                 {
352                   ASSERT (vm->thread_index ==
353                           ed_value_get_thread_index (&ed_value0));
354                   snat_main_per_thread_data_t *tsm =
355                     &sm->per_thread_data[vm->thread_index];
356                   snat_session_t *s = pool_elt_at_index (
357                     tsm->sessions, ed_value_get_session_index (&ed_value0));
358                   clib_bihash_kv_16_8_t i2o_kv;
359                   nat_6t_flow_to_ed_k (&i2o_kv, &s->i2o);
360                   vnet_buffer2 (b0)->nat.cached_session_index =
361                     ed_value_get_session_index (&ed_value0);
362                   if (i2o_kv.key[0] == ed_kv0.key[0] &&
363                       i2o_kv.key[1] == ed_kv0.key[1])
364                     {
365                       next0 = NAT_NEXT_IN2OUT_ED_FAST_PATH;
366                     }
367                   else
368                     {
369                       next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
370                     }
371
372                   goto enqueue0;
373                 }
374               /* session doesn't exist so continue in code */
375             }
376
377           /* *INDENT-OFF* */
378           vec_foreach (ap, sm->addresses)
379             {
380               if (ip0->dst_address.as_u32 == ap->addr.as_u32)
381                 {
382                   next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
383                   goto enqueue0;
384                 }
385             }
386           /* *INDENT-ON* */
387
388           if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
389             {
390               init_nat_k (&kv0, ip0->dst_address, 0, 0, 0);
391               /* try to classify the fragment based on IP header alone */
392               if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
393                                            &kv0, &value0))
394                 {
395                   m = pool_elt_at_index (sm->static_mappings, value0.value);
396                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
397                     next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
398                   goto enqueue0;
399                 }
400               init_nat_k (&kv0, ip0->dst_address,
401                           vnet_buffer (b0)->ip.reass.l4_dst_port, 0,
402                           ip_proto_to_nat_proto (ip0->protocol));
403               if (!clib_bihash_search_8_8
404                   (&sm->static_mapping_by_external, &kv0, &value0))
405                 {
406                   m = pool_elt_at_index (sm->static_mappings, value0.value);
407                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
408                     next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
409                 }
410             }
411
412         enqueue0:
413           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
414                              && (b0->flags & VLIB_BUFFER_IS_TRACED)))
415             {
416               nat44_classify_trace_t *t =
417                 vlib_add_trace (vm, node, b0, sizeof (*t));
418               t->cached = 0;
419               t->next_in2out = next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH ? 1 : 0;
420             }
421
422           next_in2out += next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH;
423           next_out2in += next0 == NAT_NEXT_OUT2IN_ED_FAST_PATH;
424
425           /* verify speculative enqueue, maybe switch current next frame */
426           vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
427                                            to_next, n_left_to_next,
428                                            bi0, next0);
429         }
430
431       vlib_put_next_frame (vm, node, next_index, n_left_to_next);
432     }
433
434   vlib_node_increment_counter (vm, node->node_index,
435                                NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
436   vlib_node_increment_counter (vm, node->node_index,
437                                NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
438   return frame->n_vectors;
439 }
440
441 VLIB_NODE_FN (nat44_classify_node) (vlib_main_t * vm,
442                                     vlib_node_runtime_t * node,
443                                     vlib_frame_t * frame)
444 {
445   return nat44_classify_node_fn_inline (vm, node, frame);
446 }
447
448 /* *INDENT-OFF* */
449 VLIB_REGISTER_NODE (nat44_classify_node) = {
450   .name = "nat44-classify",
451   .vector_size = sizeof (u32),
452   .format_trace = format_nat44_classify_trace,
453   .type = VLIB_NODE_TYPE_INTERNAL,
454   .n_errors = ARRAY_LEN(nat44_classify_error_strings),
455   .error_strings = nat44_classify_error_strings,
456   .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
457   .next_nodes = {
458     [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out",
459     [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in",
460     [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
461   },
462 };
463 /* *INDENT-ON* */
464
465 VLIB_NODE_FN (nat44_ed_classify_node) (vlib_main_t * vm,
466                                        vlib_node_runtime_t * node,
467                                        vlib_frame_t * frame)
468 {
469   return nat44_ed_classify_node_fn_inline (vm, node, frame);
470 }
471
472 /* *INDENT-OFF* */
473 VLIB_REGISTER_NODE (nat44_ed_classify_node) = {
474   .name = "nat44-ed-classify",
475   .vector_size = sizeof (u32),
476   .sibling_of = "nat-default",
477   .format_trace = format_nat44_classify_trace,
478   .type = VLIB_NODE_TYPE_INTERNAL,
479 };
480 /* *INDENT-ON* */
481
482 VLIB_NODE_FN (nat44_handoff_classify_node) (vlib_main_t * vm,
483                                             vlib_node_runtime_t * node,
484                                             vlib_frame_t * frame)
485 {
486   return nat44_handoff_classify_node_fn_inline (vm, node, frame);
487 }
488
489 /* *INDENT-OFF* */
490 VLIB_REGISTER_NODE (nat44_handoff_classify_node) = {
491   .name = "nat44-handoff-classify",
492   .vector_size = sizeof (u32),
493   .sibling_of = "nat-default",
494   .format_trace = format_nat44_classify_trace,
495   .type = VLIB_NODE_TYPE_INTERNAL,
496 };
497
498 /* *INDENT-ON* */
499
500 /*
501  * fd.io coding-style-patch-verification: ON
502  *
503  * Local Variables:
504  * eval: (c-set-style "gnu")
505  * End:
506  */