nat: use SVR
[vpp.git] / src / plugins / nat / nat44_classify.c
1 /*
2  * Copyright (c) 2018 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /**
16  * @file
17  * @brief Classify for one armed NAT44 (in+out interface)
18  */
19
20 #include <vlib/vlib.h>
21 #include <vnet/vnet.h>
22 #include <vnet/fib/ip4_fib.h>
23 #include <nat/nat.h>
24 #include <nat/nat_inlines.h>
25
26 #define foreach_nat44_classify_error                      \
27 _(NEXT_IN2OUT, "next in2out")                             \
28 _(NEXT_OUT2IN, "next out2in")                             \
29 _(FRAG_CACHED, "fragment cached")
30
31 typedef enum
32 {
33 #define _(sym,str) NAT44_CLASSIFY_ERROR_##sym,
34   foreach_nat44_classify_error
35 #undef _
36     NAT44_CLASSIFY_N_ERROR,
37 } nat44_classify_error_t;
38
39 static char *nat44_classify_error_strings[] = {
40 #define _(sym,string) string,
41   foreach_nat44_classify_error
42 #undef _
43 };
44
45 typedef enum
46 {
47   NAT44_CLASSIFY_NEXT_IN2OUT,
48   NAT44_CLASSIFY_NEXT_OUT2IN,
49   NAT44_CLASSIFY_NEXT_DROP,
50   NAT44_CLASSIFY_N_NEXT,
51 } nat44_classify_next_t;
52
53 typedef struct
54 {
55   u8 next_in2out;
56   u8 cached;
57 } nat44_classify_trace_t;
58
59 static u8 *
60 format_nat44_classify_trace (u8 * s, va_list * args)
61 {
62   CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
63   CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
64   nat44_classify_trace_t *t = va_arg (*args, nat44_classify_trace_t *);
65   char *next;
66
67   if (t->cached)
68     s = format (s, "nat44-classify: fragment cached");
69   else
70     {
71       next = t->next_in2out ? "nat44-in2out" : "nat44-out2in";
72       s = format (s, "nat44-classify: next %s", next);
73     }
74
75   return s;
76 }
77
78 static inline uword
79 nat44_classify_node_fn_inline (vlib_main_t * vm,
80                                vlib_node_runtime_t * node,
81                                vlib_frame_t * frame)
82 {
83   u32 n_left_from, *from, *to_next;
84   nat44_classify_next_t next_index;
85   snat_main_t *sm = &snat_main;
86   snat_static_mapping_t *m;
87   u32 *fragments_to_drop = 0;
88   u32 *fragments_to_loopback = 0;
89   u32 next_in2out = 0, next_out2in = 0, frag_cached = 0;
90
91   from = vlib_frame_vector_args (frame);
92   n_left_from = frame->n_vectors;
93   next_index = node->cached_next_index;
94
95   while (n_left_from > 0)
96     {
97       u32 n_left_to_next;
98
99       vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
100
101       while (n_left_from > 0 && n_left_to_next > 0)
102         {
103           u32 bi0;
104           vlib_buffer_t *b0;
105           u32 next0 = NAT44_CLASSIFY_NEXT_IN2OUT;
106           ip4_header_t *ip0;
107           snat_address_t *ap;
108           snat_session_key_t m_key0;
109           clib_bihash_kv_8_8_t kv0, value0;
110           u8 cached0 = 0;
111
112           /* speculatively enqueue b0 to the current next frame */
113           bi0 = from[0];
114           to_next[0] = bi0;
115           from += 1;
116           to_next += 1;
117           n_left_from -= 1;
118           n_left_to_next -= 1;
119
120           b0 = vlib_get_buffer (vm, bi0);
121           ip0 = vlib_buffer_get_current (b0);
122
123           /* *INDENT-OFF* */
124           vec_foreach (ap, sm->addresses)
125             {
126               if (ip0->dst_address.as_u32 == ap->addr.as_u32)
127                 {
128                   next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
129                   goto enqueue0;
130                 }
131             }
132           /* *INDENT-ON* */
133
134           if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
135             {
136               m_key0.addr = ip0->dst_address;
137               m_key0.port = 0;
138               m_key0.protocol = 0;
139               m_key0.fib_index = 0;
140               kv0.key = m_key0.as_u64;
141               /* try to classify the fragment based on IP header alone */
142               if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
143                                            &kv0, &value0))
144                 {
145                   m = pool_elt_at_index (sm->static_mappings, value0.value);
146                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
147                     next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
148                   goto enqueue0;
149                 }
150               m_key0.port =
151                 clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
152               m_key0.protocol = ip_proto_to_snat_proto (ip0->protocol);
153               kv0.key = m_key0.as_u64;
154               if (!clib_bihash_search_8_8
155                   (&sm->static_mapping_by_external, &kv0, &value0))
156                 {
157                   m = pool_elt_at_index (sm->static_mappings, value0.value);
158                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
159                     next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
160                 }
161             }
162
163         enqueue0:
164           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
165                              && (b0->flags & VLIB_BUFFER_IS_TRACED)))
166             {
167               nat44_classify_trace_t *t =
168                 vlib_add_trace (vm, node, b0, sizeof (*t));
169               t->cached = cached0;
170               if (!cached0)
171                 t->next_in2out = next0 == NAT44_CLASSIFY_NEXT_IN2OUT ? 1 : 0;
172             }
173
174           if (cached0)
175             {
176               n_left_to_next++;
177               to_next--;
178               frag_cached++;
179             }
180           else
181             {
182               next_in2out += next0 == NAT44_CLASSIFY_NEXT_IN2OUT;
183               next_out2in += next0 == NAT44_CLASSIFY_NEXT_OUT2IN;
184
185               /* verify speculative enqueue, maybe switch current next frame */
186               vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
187                                                to_next, n_left_to_next,
188                                                bi0, next0);
189             }
190
191           if (n_left_from == 0 && vec_len (fragments_to_loopback))
192             {
193               from = vlib_frame_vector_args (frame);
194               u32 len = vec_len (fragments_to_loopback);
195               if (len <= VLIB_FRAME_SIZE)
196                 {
197                   clib_memcpy_fast (from, fragments_to_loopback,
198                                     sizeof (u32) * len);
199                   n_left_from = len;
200                   vec_reset_length (fragments_to_loopback);
201                 }
202               else
203                 {
204                   clib_memcpy_fast (from, fragments_to_loopback +
205                                     (len - VLIB_FRAME_SIZE),
206                                     sizeof (u32) * VLIB_FRAME_SIZE);
207                   n_left_from = VLIB_FRAME_SIZE;
208                   _vec_len (fragments_to_loopback) = len - VLIB_FRAME_SIZE;
209                 }
210             }
211         }
212
213       vlib_put_next_frame (vm, node, next_index, n_left_to_next);
214     }
215
216   nat_send_all_to_node (vm, fragments_to_drop, node, 0,
217                         NAT44_CLASSIFY_NEXT_DROP);
218
219   vec_free (fragments_to_drop);
220
221   vlib_node_increment_counter (vm, node->node_index,
222                                NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
223   vlib_node_increment_counter (vm, node->node_index,
224                                NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
225   vlib_node_increment_counter (vm, node->node_index,
226                                NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached);
227
228   return frame->n_vectors;
229 }
230
231 static inline uword
232 nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
233                                   vlib_node_runtime_t * node,
234                                   vlib_frame_t * frame)
235 {
236   u32 n_left_from, *from, *to_next;
237   nat44_classify_next_t next_index;
238   snat_main_t *sm = &snat_main;
239   snat_static_mapping_t *m;
240   u32 thread_index = vm->thread_index;
241   snat_main_per_thread_data_t *tsm = &sm->per_thread_data[thread_index];
242   u32 *fragments_to_drop = 0;
243   u32 *fragments_to_loopback = 0;
244   u32 next_in2out = 0, next_out2in = 0, frag_cached = 0;
245   u8 in_loopback = 0;
246
247   from = vlib_frame_vector_args (frame);
248   n_left_from = frame->n_vectors;
249   next_index = node->cached_next_index;
250
251   while (n_left_from > 0)
252     {
253       u32 n_left_to_next;
254
255       vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
256
257       while (n_left_from > 0 && n_left_to_next > 0)
258         {
259           u32 bi0;
260           vlib_buffer_t *b0;
261           u32 next0 =
262             NAT_NEXT_IN2OUT_ED_FAST_PATH, sw_if_index0, rx_fib_index0;
263           ip4_header_t *ip0;
264           snat_address_t *ap;
265           snat_session_key_t m_key0;
266           clib_bihash_kv_8_8_t kv0, value0;
267           clib_bihash_kv_16_8_t ed_kv0, ed_value0;
268           u8 cached0 = 0;
269
270           /* speculatively enqueue b0 to the current next frame */
271           bi0 = from[0];
272           to_next[0] = bi0;
273           from += 1;
274           to_next += 1;
275           n_left_from -= 1;
276           n_left_to_next -= 1;
277
278           b0 = vlib_get_buffer (vm, bi0);
279           ip0 = vlib_buffer_get_current (b0);
280
281           if (!in_loopback)
282             {
283               u32 arc_next = 0;
284
285               vnet_feature_next (&arc_next, b0);
286               nat_buffer_opaque (b0)->arc_next = arc_next;
287             }
288
289           if (ip0->protocol != IP_PROTOCOL_ICMP)
290             {
291               /* process leading fragment/whole packet (with L4 header) */
292               sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
293               rx_fib_index0 =
294                 fib_table_get_index_for_sw_if_index (FIB_PROTOCOL_IP4,
295                                                      sw_if_index0);
296               make_ed_kv (&ed_kv0, &ip0->src_address,
297                           &ip0->dst_address, ip0->protocol,
298                           rx_fib_index0,
299                           vnet_buffer (b0)->ip.reass.l4_src_port,
300                           vnet_buffer (b0)->ip.reass.l4_dst_port);
301               /* process whole packet */
302               if (!clib_bihash_search_16_8
303                   (&tsm->in2out_ed, &ed_kv0, &ed_value0))
304                 goto enqueue0;
305               /* session doesn't exist so continue in code */
306             }
307
308           /* *INDENT-OFF* */
309           vec_foreach (ap, sm->addresses)
310             {
311               if (ip0->dst_address.as_u32 == ap->addr.as_u32)
312                 {
313                   next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
314                   goto enqueue0;
315                 }
316             }
317           /* *INDENT-ON* */
318
319           if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
320             {
321               m_key0.addr = ip0->dst_address;
322               m_key0.port = 0;
323               m_key0.protocol = 0;
324               m_key0.fib_index = 0;
325               kv0.key = m_key0.as_u64;
326               /* try to classify the fragment based on IP header alone */
327               if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
328                                            &kv0, &value0))
329                 {
330                   m = pool_elt_at_index (sm->static_mappings, value0.value);
331                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
332                     next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
333                   goto enqueue0;
334                 }
335               m_key0.port =
336                 clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
337               m_key0.protocol = ip_proto_to_snat_proto (ip0->protocol);
338               kv0.key = m_key0.as_u64;
339               if (!clib_bihash_search_8_8
340                   (&sm->static_mapping_by_external, &kv0, &value0))
341                 {
342                   m = pool_elt_at_index (sm->static_mappings, value0.value);
343                   if (m->local_addr.as_u32 != m->external_addr.as_u32)
344                     next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
345                 }
346             }
347
348         enqueue0:
349           if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
350                              && (b0->flags & VLIB_BUFFER_IS_TRACED)))
351             {
352               nat44_classify_trace_t *t =
353                 vlib_add_trace (vm, node, b0, sizeof (*t));
354               t->cached = cached0;
355               if (!cached0)
356                 t->next_in2out =
357                   next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH ? 1 : 0;
358             }
359
360           if (cached0)
361             {
362               n_left_to_next++;
363               to_next--;
364               frag_cached++;
365             }
366           else
367             {
368               next_in2out += next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH;
369               next_out2in += next0 == NAT_NEXT_OUT2IN_ED_FAST_PATH;
370
371               /* verify speculative enqueue, maybe switch current next frame */
372               vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
373                                                to_next, n_left_to_next,
374                                                bi0, next0);
375             }
376
377           if (n_left_from == 0 && vec_len (fragments_to_loopback))
378             {
379               in_loopback = 1;
380               from = vlib_frame_vector_args (frame);
381               u32 len = vec_len (fragments_to_loopback);
382               if (len <= VLIB_FRAME_SIZE)
383                 {
384                   clib_memcpy_fast (from, fragments_to_loopback,
385                                     sizeof (u32) * len);
386                   n_left_from = len;
387                   vec_reset_length (fragments_to_loopback);
388                 }
389               else
390                 {
391                   clib_memcpy_fast (from, fragments_to_loopback +
392                                     (len - VLIB_FRAME_SIZE),
393                                     sizeof (u32) * VLIB_FRAME_SIZE);
394                   n_left_from = VLIB_FRAME_SIZE;
395                   _vec_len (fragments_to_loopback) = len - VLIB_FRAME_SIZE;
396                 }
397             }
398         }
399
400       vlib_put_next_frame (vm, node, next_index, n_left_to_next);
401     }
402
403   nat_send_all_to_node (vm, fragments_to_drop, node, 0,
404                         NAT44_CLASSIFY_NEXT_DROP);
405
406   vec_free (fragments_to_drop);
407
408   vlib_node_increment_counter (vm, node->node_index,
409                                NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
410   vlib_node_increment_counter (vm, node->node_index,
411                                NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
412   vlib_node_increment_counter (vm, node->node_index,
413                                NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached);
414
415   return frame->n_vectors;
416 }
417
418 VLIB_NODE_FN (nat44_classify_node) (vlib_main_t * vm,
419                                     vlib_node_runtime_t * node,
420                                     vlib_frame_t * frame)
421 {
422   return nat44_classify_node_fn_inline (vm, node, frame);
423 }
424
425 /* *INDENT-OFF* */
426 VLIB_REGISTER_NODE (nat44_classify_node) = {
427   .name = "nat44-classify",
428   .vector_size = sizeof (u32),
429   .format_trace = format_nat44_classify_trace,
430   .type = VLIB_NODE_TYPE_INTERNAL,
431   .n_errors = ARRAY_LEN(nat44_classify_error_strings),
432   .error_strings = nat44_classify_error_strings,
433   .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
434   .next_nodes = {
435     [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out",
436     [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in",
437     [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
438   },
439 };
440 /* *INDENT-ON* */
441
442 VLIB_NODE_FN (nat44_ed_classify_node) (vlib_main_t * vm,
443                                        vlib_node_runtime_t * node,
444                                        vlib_frame_t * frame)
445 {
446   return nat44_ed_classify_node_fn_inline (vm, node, frame);
447 }
448
449 /* *INDENT-OFF* */
450 VLIB_REGISTER_NODE (nat44_ed_classify_node) = {
451   .name = "nat44-ed-classify",
452   .vector_size = sizeof (u32),
453   .sibling_of = "nat-default",
454   .format_trace = format_nat44_classify_trace,
455   .type = VLIB_NODE_TYPE_INTERNAL,
456 };
457 /* *INDENT-ON* */
458
459 VLIB_NODE_FN (nat44_det_classify_node) (vlib_main_t * vm,
460                                         vlib_node_runtime_t * node,
461                                         vlib_frame_t * frame)
462 {
463   return nat44_classify_node_fn_inline (vm, node, frame);
464 }
465
466 /* *INDENT-OFF* */
467 VLIB_REGISTER_NODE (nat44_det_classify_node) = {
468   .name = "nat44-det-classify",
469   .vector_size = sizeof (u32),
470   .format_trace = format_nat44_classify_trace,
471   .type = VLIB_NODE_TYPE_INTERNAL,
472   .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
473   .next_nodes = {
474     [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-det-in2out",
475     [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-det-out2in",
476     [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
477   },
478 };
479 /* *INDENT-ON* */
480
481 VLIB_NODE_FN (nat44_handoff_classify_node) (vlib_main_t * vm,
482                                             vlib_node_runtime_t * node,
483                                             vlib_frame_t * frame)
484 {
485   return nat44_classify_node_fn_inline (vm, node, frame);
486 }
487
488 /* *INDENT-OFF* */
489 VLIB_REGISTER_NODE (nat44_handoff_classify_node) = {
490   .name = "nat44-handoff-classify",
491   .vector_size = sizeof (u32),
492   .format_trace = format_nat44_classify_trace,
493   .type = VLIB_NODE_TYPE_INTERNAL,
494   .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
495   .next_nodes = {
496     [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out-worker-handoff",
497     [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in-worker-handoff",
498     [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
499   },
500 };
501
502 /* *INDENT-ON* */
503
504 /*
505  * fd.io coding-style-patch-verification: ON
506  *
507  * Local Variables:
508  * eval: (c-set-style "gnu")
509  * End:
510  */