2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
17 * @brief Classify for one armed NAT44 (in+out interface)
20 #include <vlib/vlib.h>
21 #include <vnet/vnet.h>
22 #include <vnet/fib/ip4_fib.h>
24 #include <nat/nat_inlines.h>
26 #define foreach_nat44_classify_error \
27 _(NEXT_IN2OUT, "next in2out") \
28 _(NEXT_OUT2IN, "next out2in") \
29 _(FRAG_CACHED, "fragment cached")
33 #define _(sym,str) NAT44_CLASSIFY_ERROR_##sym,
34 foreach_nat44_classify_error
36 NAT44_CLASSIFY_N_ERROR,
37 } nat44_classify_error_t;
39 static char *nat44_classify_error_strings[] = {
40 #define _(sym,string) string,
41 foreach_nat44_classify_error
47 NAT44_CLASSIFY_NEXT_IN2OUT,
48 NAT44_CLASSIFY_NEXT_OUT2IN,
49 NAT44_CLASSIFY_NEXT_DROP,
50 NAT44_CLASSIFY_N_NEXT,
51 } nat44_classify_next_t;
57 } nat44_classify_trace_t;
60 format_nat44_classify_trace (u8 * s, va_list * args)
62 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
63 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
64 nat44_classify_trace_t *t = va_arg (*args, nat44_classify_trace_t *);
68 s = format (s, "nat44-classify: fragment cached");
71 next = t->next_in2out ? "nat44-in2out" : "nat44-out2in";
72 s = format (s, "nat44-classify: next %s", next);
79 nat44_classify_node_fn_inline (vlib_main_t * vm,
80 vlib_node_runtime_t * node,
83 u32 n_left_from, *from, *to_next;
84 nat44_classify_next_t next_index;
85 snat_main_t *sm = &snat_main;
86 snat_static_mapping_t *m;
87 u32 *fragments_to_drop = 0;
88 u32 next_in2out = 0, next_out2in = 0;
90 from = vlib_frame_vector_args (frame);
91 n_left_from = frame->n_vectors;
92 next_index = node->cached_next_index;
94 while (n_left_from > 0)
98 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
100 while (n_left_from > 0 && n_left_to_next > 0)
104 u32 next0 = NAT44_CLASSIFY_NEXT_IN2OUT;
107 snat_session_key_t m_key0;
108 clib_bihash_kv_8_8_t kv0, value0;
110 /* speculatively enqueue b0 to the current next frame */
118 b0 = vlib_get_buffer (vm, bi0);
119 ip0 = vlib_buffer_get_current (b0);
122 vec_foreach (ap, sm->addresses)
124 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
126 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
132 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
134 m_key0.addr = ip0->dst_address;
137 m_key0.fib_index = 0;
138 kv0.key = m_key0.as_u64;
139 /* try to classify the fragment based on IP header alone */
140 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
143 m = pool_elt_at_index (sm->static_mappings, value0.value);
144 if (m->local_addr.as_u32 != m->external_addr.as_u32)
145 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
149 clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
150 m_key0.protocol = ip_proto_to_nat_proto (ip0->protocol);
151 kv0.key = m_key0.as_u64;
152 if (!clib_bihash_search_8_8
153 (&sm->static_mapping_by_external, &kv0, &value0))
155 m = pool_elt_at_index (sm->static_mappings, value0.value);
156 if (m->local_addr.as_u32 != m->external_addr.as_u32)
157 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
162 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
163 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
165 nat44_classify_trace_t *t =
166 vlib_add_trace (vm, node, b0, sizeof (*t));
168 t->next_in2out = next0 == NAT44_CLASSIFY_NEXT_IN2OUT ? 1 : 0;
171 next_in2out += next0 == NAT44_CLASSIFY_NEXT_IN2OUT;
172 next_out2in += next0 == NAT44_CLASSIFY_NEXT_OUT2IN;
174 /* verify speculative enqueue, maybe switch current next frame */
175 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
176 to_next, n_left_to_next,
180 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
183 nat_send_all_to_node (vm, fragments_to_drop, node, 0,
184 NAT44_CLASSIFY_NEXT_DROP);
186 vec_free (fragments_to_drop);
188 vlib_node_increment_counter (vm, node->node_index,
189 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
190 vlib_node_increment_counter (vm, node->node_index,
191 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
192 return frame->n_vectors;
196 nat44_handoff_classify_node_fn_inline (vlib_main_t * vm,
197 vlib_node_runtime_t * node,
198 vlib_frame_t * frame)
200 u32 n_left_from, *from, *to_next;
201 nat44_classify_next_t next_index;
202 snat_main_t *sm = &snat_main;
203 snat_static_mapping_t *m;
204 u32 *fragments_to_drop = 0;
205 u32 next_in2out = 0, next_out2in = 0;
207 from = vlib_frame_vector_args (frame);
208 n_left_from = frame->n_vectors;
209 next_index = node->cached_next_index;
211 while (n_left_from > 0)
215 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
217 while (n_left_from > 0 && n_left_to_next > 0)
221 u32 next0 = NAT_NEXT_IN2OUT_CLASSIFY;
224 snat_session_key_t m_key0;
225 clib_bihash_kv_8_8_t kv0, value0;
227 /* speculatively enqueue b0 to the current next frame */
235 b0 = vlib_get_buffer (vm, bi0);
236 ip0 = vlib_buffer_get_current (b0);
239 vec_foreach (ap, sm->addresses)
241 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
243 next0 = NAT_NEXT_OUT2IN_CLASSIFY;
249 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
251 m_key0.addr = ip0->dst_address;
254 m_key0.fib_index = 0;
255 kv0.key = m_key0.as_u64;
256 /* try to classify the fragment based on IP header alone */
257 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
260 m = pool_elt_at_index (sm->static_mappings, value0.value);
261 if (m->local_addr.as_u32 != m->external_addr.as_u32)
262 next0 = NAT_NEXT_OUT2IN_CLASSIFY;
266 clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
267 m_key0.protocol = ip_proto_to_nat_proto (ip0->protocol);
268 kv0.key = m_key0.as_u64;
269 if (!clib_bihash_search_8_8
270 (&sm->static_mapping_by_external, &kv0, &value0))
272 m = pool_elt_at_index (sm->static_mappings, value0.value);
273 if (m->local_addr.as_u32 != m->external_addr.as_u32)
274 next0 = NAT_NEXT_OUT2IN_CLASSIFY;
279 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
280 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
282 nat44_classify_trace_t *t =
283 vlib_add_trace (vm, node, b0, sizeof (*t));
285 t->next_in2out = next0 == NAT_NEXT_IN2OUT_CLASSIFY ? 1 : 0;
288 next_in2out += next0 == NAT_NEXT_IN2OUT_CLASSIFY;
289 next_out2in += next0 == NAT_NEXT_OUT2IN_CLASSIFY;
291 /* verify speculative enqueue, maybe switch current next frame */
292 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
293 to_next, n_left_to_next,
297 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
300 nat_send_all_to_node (vm, fragments_to_drop, node, 0, NAT_NEXT_DROP);
302 vec_free (fragments_to_drop);
304 vlib_node_increment_counter (vm, node->node_index,
305 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
306 vlib_node_increment_counter (vm, node->node_index,
307 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
308 return frame->n_vectors;
312 nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
313 vlib_node_runtime_t * node,
314 vlib_frame_t * frame)
316 u32 n_left_from, *from, *to_next;
317 nat44_classify_next_t next_index;
318 snat_main_t *sm = &snat_main;
319 snat_static_mapping_t *m;
320 u32 thread_index = vm->thread_index;
321 snat_main_per_thread_data_t *tsm = &sm->per_thread_data[thread_index];
322 u32 *fragments_to_drop = 0;
323 u32 next_in2out = 0, next_out2in = 0;
325 from = vlib_frame_vector_args (frame);
326 n_left_from = frame->n_vectors;
327 next_index = node->cached_next_index;
329 while (n_left_from > 0)
333 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
335 while (n_left_from > 0 && n_left_to_next > 0)
339 u32 next0 = NAT_NEXT_IN2OUT_ED_FAST_PATH;
340 u32 sw_if_index0, rx_fib_index0;
343 snat_session_key_t m_key0;
344 clib_bihash_kv_8_8_t kv0, value0;
345 clib_bihash_kv_16_8_t ed_kv0, ed_value0;
347 /* speculatively enqueue b0 to the current next frame */
355 b0 = vlib_get_buffer (vm, bi0);
356 ip0 = vlib_buffer_get_current (b0);
359 vnet_feature_next (&arc_next, b0);
360 vnet_buffer2 (b0)->nat.arc_next = arc_next;
362 if (ip0->protocol != IP_PROTOCOL_ICMP)
364 /* process leading fragment/whole packet (with L4 header) */
365 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
367 fib_table_get_index_for_sw_if_index (FIB_PROTOCOL_IP4,
369 make_ed_kv (&ip0->src_address, &ip0->dst_address,
370 ip0->protocol, rx_fib_index0,
371 vnet_buffer (b0)->ip.reass.l4_src_port,
372 vnet_buffer (b0)->ip.reass.l4_dst_port, ~0, ~0,
374 /* process whole packet */
375 if (!clib_bihash_search_16_8
376 (&tsm->in2out_ed, &ed_kv0, &ed_value0))
378 /* session doesn't exist so continue in code */
382 vec_foreach (ap, sm->addresses)
384 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
386 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
392 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
394 m_key0.addr = ip0->dst_address;
397 m_key0.fib_index = 0;
398 kv0.key = m_key0.as_u64;
399 /* try to classify the fragment based on IP header alone */
400 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
403 m = pool_elt_at_index (sm->static_mappings, value0.value);
404 if (m->local_addr.as_u32 != m->external_addr.as_u32)
405 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
409 clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
410 m_key0.protocol = ip_proto_to_nat_proto (ip0->protocol);
411 kv0.key = m_key0.as_u64;
412 if (!clib_bihash_search_8_8
413 (&sm->static_mapping_by_external, &kv0, &value0))
415 m = pool_elt_at_index (sm->static_mappings, value0.value);
416 if (m->local_addr.as_u32 != m->external_addr.as_u32)
417 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
422 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
423 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
425 nat44_classify_trace_t *t =
426 vlib_add_trace (vm, node, b0, sizeof (*t));
428 t->next_in2out = next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH ? 1 : 0;
431 next_in2out += next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH;
432 next_out2in += next0 == NAT_NEXT_OUT2IN_ED_FAST_PATH;
434 /* verify speculative enqueue, maybe switch current next frame */
435 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
436 to_next, n_left_to_next,
440 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
443 nat_send_all_to_node (vm, fragments_to_drop, node, 0,
444 NAT44_CLASSIFY_NEXT_DROP);
446 vec_free (fragments_to_drop);
448 vlib_node_increment_counter (vm, node->node_index,
449 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
450 vlib_node_increment_counter (vm, node->node_index,
451 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
452 return frame->n_vectors;
455 VLIB_NODE_FN (nat44_classify_node) (vlib_main_t * vm,
456 vlib_node_runtime_t * node,
457 vlib_frame_t * frame)
459 return nat44_classify_node_fn_inline (vm, node, frame);
463 VLIB_REGISTER_NODE (nat44_classify_node) = {
464 .name = "nat44-classify",
465 .vector_size = sizeof (u32),
466 .format_trace = format_nat44_classify_trace,
467 .type = VLIB_NODE_TYPE_INTERNAL,
468 .n_errors = ARRAY_LEN(nat44_classify_error_strings),
469 .error_strings = nat44_classify_error_strings,
470 .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
472 [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out",
473 [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in",
474 [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
479 VLIB_NODE_FN (nat44_ed_classify_node) (vlib_main_t * vm,
480 vlib_node_runtime_t * node,
481 vlib_frame_t * frame)
483 return nat44_ed_classify_node_fn_inline (vm, node, frame);
487 VLIB_REGISTER_NODE (nat44_ed_classify_node) = {
488 .name = "nat44-ed-classify",
489 .vector_size = sizeof (u32),
490 .sibling_of = "nat-default",
491 .format_trace = format_nat44_classify_trace,
492 .type = VLIB_NODE_TYPE_INTERNAL,
496 VLIB_NODE_FN (nat44_det_classify_node) (vlib_main_t * vm,
497 vlib_node_runtime_t * node,
498 vlib_frame_t * frame)
500 return nat44_classify_node_fn_inline (vm, node, frame);
504 VLIB_REGISTER_NODE (nat44_det_classify_node) = {
505 .name = "nat44-det-classify",
506 .vector_size = sizeof (u32),
507 .format_trace = format_nat44_classify_trace,
508 .type = VLIB_NODE_TYPE_INTERNAL,
509 .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
511 [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-det-in2out",
512 [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-det-out2in",
513 [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
518 VLIB_NODE_FN (nat44_handoff_classify_node) (vlib_main_t * vm,
519 vlib_node_runtime_t * node,
520 vlib_frame_t * frame)
522 return nat44_handoff_classify_node_fn_inline (vm, node, frame);
526 VLIB_REGISTER_NODE (nat44_handoff_classify_node) = {
527 .name = "nat44-handoff-classify",
528 .vector_size = sizeof (u32),
529 .sibling_of = "nat-default",
530 .format_trace = format_nat44_classify_trace,
531 .type = VLIB_NODE_TYPE_INTERNAL,
537 * fd.io coding-style-patch-verification: ON
540 * eval: (c-set-style "gnu")