2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
17 * @brief Classify for one armed NAT44 (in+out interface)
20 #include <vlib/vlib.h>
21 #include <vnet/vnet.h>
22 #include <vnet/fib/ip4_fib.h>
24 #include <nat/nat_inlines.h>
26 #define foreach_nat44_classify_error \
27 _(NEXT_IN2OUT, "next in2out") \
28 _(NEXT_OUT2IN, "next out2in") \
29 _(FRAG_CACHED, "fragment cached")
33 #define _(sym,str) NAT44_CLASSIFY_ERROR_##sym,
34 foreach_nat44_classify_error
36 NAT44_CLASSIFY_N_ERROR,
37 } nat44_classify_error_t;
39 static char *nat44_classify_error_strings[] = {
40 #define _(sym,string) string,
41 foreach_nat44_classify_error
47 NAT44_CLASSIFY_NEXT_IN2OUT,
48 NAT44_CLASSIFY_NEXT_OUT2IN,
49 NAT44_CLASSIFY_NEXT_DROP,
50 NAT44_CLASSIFY_N_NEXT,
51 } nat44_classify_next_t;
57 } nat44_classify_trace_t;
60 format_nat44_classify_trace (u8 * s, va_list * args)
62 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
63 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
64 nat44_classify_trace_t *t = va_arg (*args, nat44_classify_trace_t *);
68 s = format (s, "nat44-classify: fragment cached");
71 next = t->next_in2out ? "nat44-in2out" : "nat44-out2in";
72 s = format (s, "nat44-classify: next %s", next);
79 nat44_classify_node_fn_inline (vlib_main_t * vm,
80 vlib_node_runtime_t * node,
83 u32 n_left_from, *from, *to_next;
84 nat44_classify_next_t next_index;
85 snat_main_t *sm = &snat_main;
86 snat_static_mapping_t *m;
87 u32 *fragments_to_drop = 0;
88 u32 *fragments_to_loopback = 0;
89 u32 next_in2out = 0, next_out2in = 0, frag_cached = 0;
91 from = vlib_frame_vector_args (frame);
92 n_left_from = frame->n_vectors;
93 next_index = node->cached_next_index;
95 while (n_left_from > 0)
99 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
101 while (n_left_from > 0 && n_left_to_next > 0)
105 u32 next0 = NAT44_CLASSIFY_NEXT_IN2OUT;
108 snat_session_key_t m_key0;
109 clib_bihash_kv_8_8_t kv0, value0;
112 /* speculatively enqueue b0 to the current next frame */
120 b0 = vlib_get_buffer (vm, bi0);
121 ip0 = vlib_buffer_get_current (b0);
124 vec_foreach (ap, sm->addresses)
126 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
128 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
134 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
136 m_key0.addr = ip0->dst_address;
139 m_key0.fib_index = 0;
140 kv0.key = m_key0.as_u64;
141 /* try to classify the fragment based on IP header alone */
142 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
145 m = pool_elt_at_index (sm->static_mappings, value0.value);
146 if (m->local_addr.as_u32 != m->external_addr.as_u32)
147 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
151 clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
152 m_key0.protocol = ip_proto_to_snat_proto (ip0->protocol);
153 kv0.key = m_key0.as_u64;
154 if (!clib_bihash_search_8_8
155 (&sm->static_mapping_by_external, &kv0, &value0))
157 m = pool_elt_at_index (sm->static_mappings, value0.value);
158 if (m->local_addr.as_u32 != m->external_addr.as_u32)
159 next0 = NAT44_CLASSIFY_NEXT_OUT2IN;
164 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
165 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
167 nat44_classify_trace_t *t =
168 vlib_add_trace (vm, node, b0, sizeof (*t));
171 t->next_in2out = next0 == NAT44_CLASSIFY_NEXT_IN2OUT ? 1 : 0;
182 next_in2out += next0 == NAT44_CLASSIFY_NEXT_IN2OUT;
183 next_out2in += next0 == NAT44_CLASSIFY_NEXT_OUT2IN;
185 /* verify speculative enqueue, maybe switch current next frame */
186 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
187 to_next, n_left_to_next,
191 if (n_left_from == 0 && vec_len (fragments_to_loopback))
193 from = vlib_frame_vector_args (frame);
194 u32 len = vec_len (fragments_to_loopback);
195 if (len <= VLIB_FRAME_SIZE)
197 clib_memcpy_fast (from, fragments_to_loopback,
200 vec_reset_length (fragments_to_loopback);
204 clib_memcpy_fast (from, fragments_to_loopback +
205 (len - VLIB_FRAME_SIZE),
206 sizeof (u32) * VLIB_FRAME_SIZE);
207 n_left_from = VLIB_FRAME_SIZE;
208 _vec_len (fragments_to_loopback) = len - VLIB_FRAME_SIZE;
213 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
216 nat_send_all_to_node (vm, fragments_to_drop, node, 0,
217 NAT44_CLASSIFY_NEXT_DROP);
219 vec_free (fragments_to_drop);
221 vlib_node_increment_counter (vm, node->node_index,
222 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
223 vlib_node_increment_counter (vm, node->node_index,
224 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
225 vlib_node_increment_counter (vm, node->node_index,
226 NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached);
228 return frame->n_vectors;
232 nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
233 vlib_node_runtime_t * node,
234 vlib_frame_t * frame)
236 u32 n_left_from, *from, *to_next;
237 nat44_classify_next_t next_index;
238 snat_main_t *sm = &snat_main;
239 snat_static_mapping_t *m;
240 u32 thread_index = vm->thread_index;
241 snat_main_per_thread_data_t *tsm = &sm->per_thread_data[thread_index];
242 u32 *fragments_to_drop = 0;
243 u32 *fragments_to_loopback = 0;
244 u32 next_in2out = 0, next_out2in = 0, frag_cached = 0;
247 from = vlib_frame_vector_args (frame);
248 n_left_from = frame->n_vectors;
249 next_index = node->cached_next_index;
251 while (n_left_from > 0)
255 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
257 while (n_left_from > 0 && n_left_to_next > 0)
262 NAT_NEXT_IN2OUT_ED_FAST_PATH, sw_if_index0, rx_fib_index0;
265 snat_session_key_t m_key0;
266 clib_bihash_kv_8_8_t kv0, value0;
267 clib_bihash_kv_16_8_t ed_kv0, ed_value0;
270 /* speculatively enqueue b0 to the current next frame */
278 b0 = vlib_get_buffer (vm, bi0);
279 ip0 = vlib_buffer_get_current (b0);
285 vnet_feature_next (&arc_next, b0);
286 nat_buffer_opaque (b0)->arc_next = arc_next;
289 if (ip0->protocol != IP_PROTOCOL_ICMP)
291 /* process leading fragment/whole packet (with L4 header) */
292 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
294 fib_table_get_index_for_sw_if_index (FIB_PROTOCOL_IP4,
296 make_ed_kv (&ed_kv0, &ip0->src_address,
297 &ip0->dst_address, ip0->protocol,
299 vnet_buffer (b0)->ip.reass.l4_src_port,
300 vnet_buffer (b0)->ip.reass.l4_dst_port);
301 /* process whole packet */
302 if (!clib_bihash_search_16_8
303 (&tsm->in2out_ed, &ed_kv0, &ed_value0))
305 /* session doesn't exist so continue in code */
309 vec_foreach (ap, sm->addresses)
311 if (ip0->dst_address.as_u32 == ap->addr.as_u32)
313 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
319 if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
321 m_key0.addr = ip0->dst_address;
324 m_key0.fib_index = 0;
325 kv0.key = m_key0.as_u64;
326 /* try to classify the fragment based on IP header alone */
327 if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
330 m = pool_elt_at_index (sm->static_mappings, value0.value);
331 if (m->local_addr.as_u32 != m->external_addr.as_u32)
332 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
336 clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
337 m_key0.protocol = ip_proto_to_snat_proto (ip0->protocol);
338 kv0.key = m_key0.as_u64;
339 if (!clib_bihash_search_8_8
340 (&sm->static_mapping_by_external, &kv0, &value0))
342 m = pool_elt_at_index (sm->static_mappings, value0.value);
343 if (m->local_addr.as_u32 != m->external_addr.as_u32)
344 next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH;
349 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
350 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
352 nat44_classify_trace_t *t =
353 vlib_add_trace (vm, node, b0, sizeof (*t));
357 next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH ? 1 : 0;
368 next_in2out += next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH;
369 next_out2in += next0 == NAT_NEXT_OUT2IN_ED_FAST_PATH;
371 /* verify speculative enqueue, maybe switch current next frame */
372 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
373 to_next, n_left_to_next,
377 if (n_left_from == 0 && vec_len (fragments_to_loopback))
380 from = vlib_frame_vector_args (frame);
381 u32 len = vec_len (fragments_to_loopback);
382 if (len <= VLIB_FRAME_SIZE)
384 clib_memcpy_fast (from, fragments_to_loopback,
387 vec_reset_length (fragments_to_loopback);
391 clib_memcpy_fast (from, fragments_to_loopback +
392 (len - VLIB_FRAME_SIZE),
393 sizeof (u32) * VLIB_FRAME_SIZE);
394 n_left_from = VLIB_FRAME_SIZE;
395 _vec_len (fragments_to_loopback) = len - VLIB_FRAME_SIZE;
400 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
403 nat_send_all_to_node (vm, fragments_to_drop, node, 0,
404 NAT44_CLASSIFY_NEXT_DROP);
406 vec_free (fragments_to_drop);
408 vlib_node_increment_counter (vm, node->node_index,
409 NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
410 vlib_node_increment_counter (vm, node->node_index,
411 NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
412 vlib_node_increment_counter (vm, node->node_index,
413 NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached);
415 return frame->n_vectors;
418 VLIB_NODE_FN (nat44_classify_node) (vlib_main_t * vm,
419 vlib_node_runtime_t * node,
420 vlib_frame_t * frame)
422 return nat44_classify_node_fn_inline (vm, node, frame);
426 VLIB_REGISTER_NODE (nat44_classify_node) = {
427 .name = "nat44-classify",
428 .vector_size = sizeof (u32),
429 .format_trace = format_nat44_classify_trace,
430 .type = VLIB_NODE_TYPE_INTERNAL,
431 .n_errors = ARRAY_LEN(nat44_classify_error_strings),
432 .error_strings = nat44_classify_error_strings,
433 .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
435 [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out",
436 [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in",
437 [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
442 VLIB_NODE_FN (nat44_ed_classify_node) (vlib_main_t * vm,
443 vlib_node_runtime_t * node,
444 vlib_frame_t * frame)
446 return nat44_ed_classify_node_fn_inline (vm, node, frame);
450 VLIB_REGISTER_NODE (nat44_ed_classify_node) = {
451 .name = "nat44-ed-classify",
452 .vector_size = sizeof (u32),
453 .sibling_of = "nat-default",
454 .format_trace = format_nat44_classify_trace,
455 .type = VLIB_NODE_TYPE_INTERNAL,
459 VLIB_NODE_FN (nat44_det_classify_node) (vlib_main_t * vm,
460 vlib_node_runtime_t * node,
461 vlib_frame_t * frame)
463 return nat44_classify_node_fn_inline (vm, node, frame);
467 VLIB_REGISTER_NODE (nat44_det_classify_node) = {
468 .name = "nat44-det-classify",
469 .vector_size = sizeof (u32),
470 .format_trace = format_nat44_classify_trace,
471 .type = VLIB_NODE_TYPE_INTERNAL,
472 .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
474 [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-det-in2out",
475 [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-det-out2in",
476 [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
481 VLIB_NODE_FN (nat44_handoff_classify_node) (vlib_main_t * vm,
482 vlib_node_runtime_t * node,
483 vlib_frame_t * frame)
485 return nat44_classify_node_fn_inline (vm, node, frame);
489 VLIB_REGISTER_NODE (nat44_handoff_classify_node) = {
490 .name = "nat44-handoff-classify",
491 .vector_size = sizeof (u32),
492 .format_trace = format_nat44_classify_trace,
493 .type = VLIB_NODE_TYPE_INTERNAL,
494 .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
496 [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out-worker-handoff",
497 [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in-worker-handoff",
498 [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
505 * fd.io coding-style-patch-verification: ON
508 * eval: (c-set-style "gnu")