2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
17 * @brief Classify for one armed NAT44 (in+out interface)
20 #include <vlib/vlib.h>
21 #include <vnet/vnet.h>
22 #include <vnet/fib/ip4_fib.h>
24 #include <nat/nat_inlines.h>
26 #define foreach_nat44_classify_error \
27 _(NEXT_IN2OUT, "next in2out") \
28 _(NEXT_OUT2IN, "next out2in") \
29 _(FRAG_CACHED, "fragment cached")
33 #define _(sym,str) NAT44_CLASSIFY_ERROR_##sym,
34 foreach_nat44_classify_error
36 NAT44_CLASSIFY_N_ERROR,
37 } nat44_classify_error_t;
39 static char *nat44_classify_error_strings[] = {
40 #define _(sym,string) string,
41 foreach_nat44_classify_error
47 NAT44_CLASSIFY_NEXT_IN2OUT,
48 NAT44_CLASSIFY_NEXT_OUT2IN,
49 NAT44_CLASSIFY_NEXT_DROP,
50 NAT44_CLASSIFY_N_NEXT,
51 } nat44_classify_next_t;
57 } nat44_classify_trace_t;
60 format_nat44_classify_trace (u8 * s, va_list * args)
62 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
63 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
64 nat44_classify_trace_t *t = va_arg (*args, nat44_classify_trace_t *);
68 s = format (s, "nat44-classify: fragment cached");
71 next = t->next_in2out ? "nat44-in2out" : "nat44-out2in";
72 s = format (s, "nat44-classify: next %s", next);
79 nat44_classify_node_fn_inline (vlib_main_t * vm,
80 vlib_node_runtime_t * node,
83 u32 n_left_from, *from, *to_next;
84 nat44_classify_next_t next_index;
85 snat_main_t *sm = &snat_main;
86 snat_static_mapping_t *m;
87 u32 *fragments_to_drop = 0;
88 u32 next_in2out = 0, next_out2in = 0;
90 from = vlib_frame_vector_args (frame);
91 n_left_from = frame->n_vectors;
92 next_index = node->cached_next_index;
94 while (n_left_from > 0)
98 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
100 while (n_left_from > 0 && n_left_to_next > 0)
104 u32 next0 = NAT44_CLASSIFY_NEXT_IN2OUT;
107 clib_bihash_kv_8_8_t kv0, value0;
109 /* speculatively enqueue b0 to the current next frame */
117 b0 = vlib_get_buffer (vm, bi0);
118 ip0 = vlib_buffer_get_current (b0);
121 vec_foreach (ap, sm->addresses)
123 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
125 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
131 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
133 init_nat_k (&kv0, ip0->dst_address, 0, 0, 0);
134 /* try to classify the fragment based on IP header alone */
135 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
138 m = pool_elt_at_index (sm->static_mappings, value0.value);
139 if (m->local_addr.as_u32 != m->external_addr.as_u32)
140 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
143 init_nat_k (&kv0, ip0->dst_address,
144 vnet_buffer (b0)->ip.reass.l4_dst_port, 0,
145 ip_proto_to_nat_proto (ip0->protocol));
146 if (!clib_bihash_search_8_8
147 (&sm->static_mapping_by_external, &kv0, &value0))
149 m = pool_elt_at_index (sm->static_mappings, value0.value);
150 if (m->local_addr.as_u32 != m->external_addr.as_u32)
151 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
156 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
157 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
159 nat44_classify_trace_t *t =
160 vlib_add_trace (vm, node, b0, sizeof (*t));
162 t->next_in2out = next0 == NAT44_CLASSIFY_NEXT_IN2OUT ? 1 : 0;
165 next_in2out += next0 == NAT44_CLASSIFY_NEXT_IN2OUT;
166 next_out2in += next0 == NAT44_CLASSIFY_NEXT_OUT2IN;
168 /* verify speculative enqueue, maybe switch current next frame */
169 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
170 to_next, n_left_to_next,
174 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
177 nat_send_all_to_node (vm, fragments_to_drop, node, 0,
178 NAT44_CLASSIFY_NEXT_DROP);
180 vec_free (fragments_to_drop);
182 vlib_node_increment_counter (vm, node->node_index,
183 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
184 vlib_node_increment_counter (vm, node->node_index,
185 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
186 return frame->n_vectors;
190 nat44_handoff_classify_node_fn_inline (vlib_main_t * vm,
191 vlib_node_runtime_t * node,
192 vlib_frame_t * frame)
194 u32 n_left_from, *from, *to_next;
195 nat44_classify_next_t next_index;
196 snat_main_t *sm = &snat_main;
197 snat_static_mapping_t *m;
198 u32 *fragments_to_drop = 0;
199 u32 next_in2out = 0, next_out2in = 0;
201 from = vlib_frame_vector_args (frame);
202 n_left_from = frame->n_vectors;
203 next_index = node->cached_next_index;
205 while (n_left_from > 0)
209 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
211 while (n_left_from > 0 && n_left_to_next > 0)
215 u32 next0 = NAT_NEXT_IN2OUT_CLASSIFY;
218 clib_bihash_kv_8_8_t kv0, value0;
220 /* speculatively enqueue b0 to the current next frame */
228 b0 = vlib_get_buffer (vm, bi0);
229 ip0 = vlib_buffer_get_current (b0);
232 vec_foreach (ap, sm->addresses)
234 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
236 next0 = NAT_NEXT_OUT2IN_CLASSIFY;
242 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
244 init_nat_k (&kv0, ip0->dst_address, 0, 0, 0);
245 /* try to classify the fragment based on IP header alone */
246 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
249 m = pool_elt_at_index (sm->static_mappings, value0.value);
250 if (m->local_addr.as_u32 != m->external_addr.as_u32)
251 next0 = NAT_NEXT_OUT2IN_CLASSIFY;
254 init_nat_k (&kv0, ip0->dst_address,
255 vnet_buffer (b0)->ip.reass.l4_dst_port, 0,
256 ip_proto_to_nat_proto (ip0->protocol));
257 if (!clib_bihash_search_8_8
258 (&sm->static_mapping_by_external, &kv0, &value0))
260 m = pool_elt_at_index (sm->static_mappings, value0.value);
261 if (m->local_addr.as_u32 != m->external_addr.as_u32)
262 next0 = NAT_NEXT_OUT2IN_CLASSIFY;
267 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
268 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
270 nat44_classify_trace_t *t =
271 vlib_add_trace (vm, node, b0, sizeof (*t));
273 t->next_in2out = next0 == NAT_NEXT_IN2OUT_CLASSIFY ? 1 : 0;
276 next_in2out += next0 == NAT_NEXT_IN2OUT_CLASSIFY;
277 next_out2in += next0 == NAT_NEXT_OUT2IN_CLASSIFY;
279 /* verify speculative enqueue, maybe switch current next frame */
280 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
281 to_next, n_left_to_next,
285 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
288 nat_send_all_to_node (vm, fragments_to_drop, node, 0, NAT_NEXT_DROP);
290 vec_free (fragments_to_drop);
292 vlib_node_increment_counter (vm, node->node_index,
293 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
294 vlib_node_increment_counter (vm, node->node_index,
295 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
296 return frame->n_vectors;
300 nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
301 vlib_node_runtime_t * node,
302 vlib_frame_t * frame)
304 u32 n_left_from, *from, *to_next;
305 nat44_classify_next_t next_index;
306 snat_main_t *sm = &snat_main;
307 snat_static_mapping_t *m;
308 u32 thread_index = vm->thread_index;
309 snat_main_per_thread_data_t *tsm = &sm->per_thread_data[thread_index];
310 u32 *fragments_to_drop = 0;
311 u32 next_in2out = 0, next_out2in = 0;
313 from = vlib_frame_vector_args (frame);
314 n_left_from = frame->n_vectors;
315 next_index = node->cached_next_index;
317 while (n_left_from > 0)
321 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
323 while (n_left_from > 0 && n_left_to_next > 0)
327 u32 next0 = NAT_NEXT_IN2OUT_ED_FAST_PATH;
328 u32 sw_if_index0, rx_fib_index0;
331 clib_bihash_kv_8_8_t kv0, value0;
332 clib_bihash_kv_16_8_t ed_kv0, ed_value0;
334 /* speculatively enqueue b0 to the current next frame */
342 b0 = vlib_get_buffer (vm, bi0);
343 ip0 = vlib_buffer_get_current (b0);
346 vnet_feature_next (&arc_next, b0);
347 vnet_buffer2 (b0)->nat.arc_next = arc_next;
349 if (ip0->protocol != IP_PROTOCOL_ICMP)
351 /* process leading fragment/whole packet (with L4 header) */
352 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
354 fib_table_get_index_for_sw_if_index (FIB_PROTOCOL_IP4,
356 init_ed_k (&ed_kv0, ip0->src_address,
357 vnet_buffer (b0)->ip.reass.l4_src_port,
359 vnet_buffer (b0)->ip.reass.l4_dst_port,
360 rx_fib_index0, ip0->protocol);
361 /* process whole packet */
362 if (!clib_bihash_search_16_8
363 (&tsm->in2out_ed, &ed_kv0, &ed_value0))
365 /* session doesn't exist so continue in code */
369 vec_foreach (ap, sm->addresses)
371 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
373 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
379 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
381 init_nat_k (&kv0, ip0->dst_address, 0, 0, 0);
382 /* try to classify the fragment based on IP header alone */
383 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
386 m = pool_elt_at_index (sm->static_mappings, value0.value);
387 if (m->local_addr.as_u32 != m->external_addr.as_u32)
388 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
391 init_nat_k (&kv0, ip0->dst_address,
392 vnet_buffer (b0)->ip.reass.l4_dst_port, 0,
393 ip_proto_to_nat_proto (ip0->protocol));
394 if (!clib_bihash_search_8_8
395 (&sm->static_mapping_by_external, &kv0, &value0))
397 m = pool_elt_at_index (sm->static_mappings, value0.value);
398 if (m->local_addr.as_u32 != m->external_addr.as_u32)
399 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
404 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
405 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
407 nat44_classify_trace_t *t =
408 vlib_add_trace (vm, node, b0, sizeof (*t));
410 t->next_in2out = next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH ? 1 : 0;
413 next_in2out += next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH;
414 next_out2in += next0 == NAT_NEXT_OUT2IN_ED_FAST_PATH;
416 /* verify speculative enqueue, maybe switch current next frame */
417 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
418 to_next, n_left_to_next,
422 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
425 nat_send_all_to_node (vm, fragments_to_drop, node, 0,
426 NAT44_CLASSIFY_NEXT_DROP);
428 vec_free (fragments_to_drop);
430 vlib_node_increment_counter (vm, node->node_index,
431 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
432 vlib_node_increment_counter (vm, node->node_index,
433 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
434 return frame->n_vectors;
437 VLIB_NODE_FN (nat44_classify_node) (vlib_main_t * vm,
438 vlib_node_runtime_t * node,
439 vlib_frame_t * frame)
441 return nat44_classify_node_fn_inline (vm, node, frame);
445 VLIB_REGISTER_NODE (nat44_classify_node) = {
446 .name = "nat44-classify",
447 .vector_size = sizeof (u32),
448 .format_trace = format_nat44_classify_trace,
449 .type = VLIB_NODE_TYPE_INTERNAL,
450 .n_errors = ARRAY_LEN(nat44_classify_error_strings),
451 .error_strings = nat44_classify_error_strings,
452 .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
454 [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out",
455 [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in",
456 [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
461 VLIB_NODE_FN (nat44_ed_classify_node) (vlib_main_t * vm,
462 vlib_node_runtime_t * node,
463 vlib_frame_t * frame)
465 return nat44_ed_classify_node_fn_inline (vm, node, frame);
469 VLIB_REGISTER_NODE (nat44_ed_classify_node) = {
470 .name = "nat44-ed-classify",
471 .vector_size = sizeof (u32),
472 .sibling_of = "nat-default",
473 .format_trace = format_nat44_classify_trace,
474 .type = VLIB_NODE_TYPE_INTERNAL,
478 VLIB_NODE_FN (nat44_det_classify_node) (vlib_main_t * vm,
479 vlib_node_runtime_t * node,
480 vlib_frame_t * frame)
482 return nat44_classify_node_fn_inline (vm, node, frame);
486 VLIB_REGISTER_NODE (nat44_det_classify_node) = {
487 .name = "nat44-det-classify",
488 .vector_size = sizeof (u32),
489 .format_trace = format_nat44_classify_trace,
490 .type = VLIB_NODE_TYPE_INTERNAL,
491 .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
493 [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-det-in2out",
494 [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-det-out2in",
495 [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
500 VLIB_NODE_FN (nat44_handoff_classify_node) (vlib_main_t * vm,
501 vlib_node_runtime_t * node,
502 vlib_frame_t * frame)
504 return nat44_handoff_classify_node_fn_inline (vm, node, frame);
508 VLIB_REGISTER_NODE (nat44_handoff_classify_node) = {
509 .name = "nat44-handoff-classify",
510 .vector_size = sizeof (u32),
511 .sibling_of = "nat-default",
512 .format_trace = format_nat44_classify_trace,
513 .type = VLIB_NODE_TYPE_INTERNAL,
519 * fd.io coding-style-patch-verification: ON
522 * eval: (c-set-style "gnu")