X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fplugins%2Fnat%2Fnat44_classify.c;h=54c52e3ee4b8272f0030ac43e9dd9d018a9d3ac9;hb=e3621518046ad7f37ccf77c549a93375ab89da19;hp=f339770d8f594f20b16fca30d54a8c557e45f8c0;hpb=f126e746fc01c75bc99329d10ce9127b26b23814;p=vpp.git diff --git a/src/plugins/nat/nat44_classify.c b/src/plugins/nat/nat44_classify.c index f339770d8f5..54c52e3ee4b 100644 --- a/src/plugins/nat/nat44_classify.c +++ b/src/plugins/nat/nat44_classify.c @@ -85,8 +85,7 @@ nat44_classify_node_fn_inline (vlib_main_t * vm, snat_main_t *sm = &snat_main; snat_static_mapping_t *m; u32 *fragments_to_drop = 0; - u32 *fragments_to_loopback = 0; - u32 next_in2out = 0, next_out2in = 0, frag_cached = 0; + u32 next_in2out = 0, next_out2in = 0; from = vlib_frame_vector_args (frame); n_left_from = frame->n_vectors; @@ -105,9 +104,7 @@ nat44_classify_node_fn_inline (vlib_main_t * vm, u32 next0 = NAT44_CLASSIFY_NEXT_IN2OUT; ip4_header_t *ip0; snat_address_t *ap; - snat_session_key_t m_key0; clib_bihash_kv_8_8_t kv0, value0; - u8 cached0 = 0; /* speculatively enqueue b0 to the current next frame */ bi0 = from[0]; @@ -133,11 +130,7 @@ nat44_classify_node_fn_inline (vlib_main_t * vm, if (PREDICT_FALSE (pool_elts (sm->static_mappings))) { - m_key0.addr = ip0->dst_address; - m_key0.port = 0; - m_key0.protocol = 0; - m_key0.fib_index = 0; - kv0.key = m_key0.as_u64; + init_nat_k (&kv0, ip0->dst_address, 0, 0, 0); /* try to classify the fragment based on IP header alone */ if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external, &kv0, &value0)) @@ -147,10 +140,9 @@ nat44_classify_node_fn_inline (vlib_main_t * vm, next0 = NAT44_CLASSIFY_NEXT_OUT2IN; goto enqueue0; } - m_key0.port = - clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port); - m_key0.protocol = ip_proto_to_snat_proto (ip0->protocol); - kv0.key = m_key0.as_u64; + init_nat_k (&kv0, ip0->dst_address, + vnet_buffer (b0)->ip.reass.l4_dst_port, 0, + ip_proto_to_nat_proto (ip0->protocol)); if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external, &kv0, &value0)) { @@ -166,55 +158,134 @@ nat44_classify_node_fn_inline (vlib_main_t * vm, { nat44_classify_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t)); - t->cached = cached0; - if (!cached0) - t->next_in2out = next0 == NAT44_CLASSIFY_NEXT_IN2OUT ? 1 : 0; + t->cached = 0; + t->next_in2out = next0 == NAT44_CLASSIFY_NEXT_IN2OUT ? 1 : 0; } - if (cached0) - { - n_left_to_next++; - to_next--; - frag_cached++; - } - else - { - next_in2out += next0 == NAT44_CLASSIFY_NEXT_IN2OUT; - next_out2in += next0 == NAT44_CLASSIFY_NEXT_OUT2IN; + next_in2out += next0 == NAT44_CLASSIFY_NEXT_IN2OUT; + next_out2in += next0 == NAT44_CLASSIFY_NEXT_OUT2IN; - /* verify speculative enqueue, maybe switch current next frame */ - vlib_validate_buffer_enqueue_x1 (vm, node, next_index, - to_next, n_left_to_next, - bi0, next0); - } + /* verify speculative enqueue, maybe switch current next frame */ + vlib_validate_buffer_enqueue_x1 (vm, node, next_index, + to_next, n_left_to_next, + bi0, next0); + } + + vlib_put_next_frame (vm, node, next_index, n_left_to_next); + } + + nat_send_all_to_node (vm, fragments_to_drop, node, 0, + NAT44_CLASSIFY_NEXT_DROP); + + vec_free (fragments_to_drop); + + vlib_node_increment_counter (vm, node->node_index, + NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out); + vlib_node_increment_counter (vm, node->node_index, + NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in); + return frame->n_vectors; +} + +static inline uword +nat44_handoff_classify_node_fn_inline (vlib_main_t * vm, + vlib_node_runtime_t * node, + vlib_frame_t * frame) +{ + u32 n_left_from, *from, *to_next; + nat44_classify_next_t next_index; + snat_main_t *sm = &snat_main; + snat_static_mapping_t *m; + u32 *fragments_to_drop = 0; + u32 next_in2out = 0, next_out2in = 0; + + from = vlib_frame_vector_args (frame); + n_left_from = frame->n_vectors; + next_index = node->cached_next_index; + + while (n_left_from > 0) + { + u32 n_left_to_next; + + vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next); + + while (n_left_from > 0 && n_left_to_next > 0) + { + u32 bi0; + vlib_buffer_t *b0; + u32 next0 = NAT_NEXT_IN2OUT_CLASSIFY; + ip4_header_t *ip0; + snat_address_t *ap; + clib_bihash_kv_8_8_t kv0, value0; + + /* speculatively enqueue b0 to the current next frame */ + bi0 = from[0]; + to_next[0] = bi0; + from += 1; + to_next += 1; + n_left_from -= 1; + n_left_to_next -= 1; + + b0 = vlib_get_buffer (vm, bi0); + ip0 = vlib_buffer_get_current (b0); - if (n_left_from == 0 && vec_len (fragments_to_loopback)) + /* *INDENT-OFF* */ + vec_foreach (ap, sm->addresses) + { + if (ip0->dst_address.as_u32 == ap->addr.as_u32) + { + next0 = NAT_NEXT_OUT2IN_CLASSIFY; + goto enqueue0; + } + } + /* *INDENT-ON* */ + + if (PREDICT_FALSE (pool_elts (sm->static_mappings))) { - from = vlib_frame_vector_args (frame); - u32 len = vec_len (fragments_to_loopback); - if (len <= VLIB_FRAME_SIZE) + init_nat_k (&kv0, ip0->dst_address, 0, 0, 0); + /* try to classify the fragment based on IP header alone */ + if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external, + &kv0, &value0)) { - clib_memcpy_fast (from, fragments_to_loopback, - sizeof (u32) * len); - n_left_from = len; - vec_reset_length (fragments_to_loopback); + m = pool_elt_at_index (sm->static_mappings, value0.value); + if (m->local_addr.as_u32 != m->external_addr.as_u32) + next0 = NAT_NEXT_OUT2IN_CLASSIFY; + goto enqueue0; } - else + init_nat_k (&kv0, ip0->dst_address, + vnet_buffer (b0)->ip.reass.l4_dst_port, 0, + ip_proto_to_nat_proto (ip0->protocol)); + if (!clib_bihash_search_8_8 + (&sm->static_mapping_by_external, &kv0, &value0)) { - clib_memcpy_fast (from, fragments_to_loopback + - (len - VLIB_FRAME_SIZE), - sizeof (u32) * VLIB_FRAME_SIZE); - n_left_from = VLIB_FRAME_SIZE; - _vec_len (fragments_to_loopback) = len - VLIB_FRAME_SIZE; + m = pool_elt_at_index (sm->static_mappings, value0.value); + if (m->local_addr.as_u32 != m->external_addr.as_u32) + next0 = NAT_NEXT_OUT2IN_CLASSIFY; } } + + enqueue0: + if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE) + && (b0->flags & VLIB_BUFFER_IS_TRACED))) + { + nat44_classify_trace_t *t = + vlib_add_trace (vm, node, b0, sizeof (*t)); + t->cached = 0; + t->next_in2out = next0 == NAT_NEXT_IN2OUT_CLASSIFY ? 1 : 0; + } + + next_in2out += next0 == NAT_NEXT_IN2OUT_CLASSIFY; + next_out2in += next0 == NAT_NEXT_OUT2IN_CLASSIFY; + + /* verify speculative enqueue, maybe switch current next frame */ + vlib_validate_buffer_enqueue_x1 (vm, node, next_index, + to_next, n_left_to_next, + bi0, next0); } vlib_put_next_frame (vm, node, next_index, n_left_to_next); } - nat_send_all_to_node (vm, fragments_to_drop, node, 0, - NAT44_CLASSIFY_NEXT_DROP); + nat_send_all_to_node (vm, fragments_to_drop, node, 0, NAT_NEXT_DROP); vec_free (fragments_to_drop); @@ -222,9 +293,6 @@ nat44_classify_node_fn_inline (vlib_main_t * vm, NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out); vlib_node_increment_counter (vm, node->node_index, NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in); - vlib_node_increment_counter (vm, node->node_index, - NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached); - return frame->n_vectors; } @@ -240,9 +308,7 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, u32 thread_index = vm->thread_index; snat_main_per_thread_data_t *tsm = &sm->per_thread_data[thread_index]; u32 *fragments_to_drop = 0; - u32 *fragments_to_loopback = 0; - u32 next_in2out = 0, next_out2in = 0, frag_cached = 0; - u8 in_loopback = 0; + u32 next_in2out = 0, next_out2in = 0; from = vlib_frame_vector_args (frame); n_left_from = frame->n_vectors; @@ -258,14 +324,12 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, { u32 bi0; vlib_buffer_t *b0; - u32 next0 = - NAT_NEXT_IN2OUT_ED_FAST_PATH, sw_if_index0, rx_fib_index0; + u32 next0 = NAT_NEXT_IN2OUT_ED_FAST_PATH; + u32 sw_if_index0, rx_fib_index0; ip4_header_t *ip0; snat_address_t *ap; - snat_session_key_t m_key0; clib_bihash_kv_8_8_t kv0, value0; clib_bihash_kv_16_8_t ed_kv0, ed_value0; - u8 cached0 = 0; /* speculatively enqueue b0 to the current next frame */ bi0 = from[0]; @@ -278,13 +342,9 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, b0 = vlib_get_buffer (vm, bi0); ip0 = vlib_buffer_get_current (b0); - if (!in_loopback) - { - u32 arc_next = 0; - - vnet_feature_next (&arc_next, b0); - nat_buffer_opaque (b0)->arc_next = arc_next; - } + u32 arc_next; + vnet_feature_next (&arc_next, b0); + vnet_buffer2 (b0)->nat.arc_next = arc_next; if (ip0->protocol != IP_PROTOCOL_ICMP) { @@ -293,11 +353,11 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, rx_fib_index0 = fib_table_get_index_for_sw_if_index (FIB_PROTOCOL_IP4, sw_if_index0); - make_ed_kv (&ed_kv0, &ip0->src_address, - &ip0->dst_address, ip0->protocol, - rx_fib_index0, - vnet_buffer (b0)->ip.reass.l4_src_port, - vnet_buffer (b0)->ip.reass.l4_dst_port); + init_ed_k (&ed_kv0, ip0->src_address, + vnet_buffer (b0)->ip.reass.l4_src_port, + ip0->dst_address, + vnet_buffer (b0)->ip.reass.l4_dst_port, + rx_fib_index0, ip0->protocol); /* process whole packet */ if (!clib_bihash_search_16_8 (&tsm->in2out_ed, &ed_kv0, &ed_value0)) @@ -318,11 +378,7 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, if (PREDICT_FALSE (pool_elts (sm->static_mappings))) { - m_key0.addr = ip0->dst_address; - m_key0.port = 0; - m_key0.protocol = 0; - m_key0.fib_index = 0; - kv0.key = m_key0.as_u64; + init_nat_k (&kv0, ip0->dst_address, 0, 0, 0); /* try to classify the fragment based on IP header alone */ if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external, &kv0, &value0)) @@ -332,10 +388,9 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, next0 = NAT_NEXT_OUT2IN_ED_FAST_PATH; goto enqueue0; } - m_key0.port = - clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port); - m_key0.protocol = ip_proto_to_snat_proto (ip0->protocol); - kv0.key = m_key0.as_u64; + init_nat_k (&kv0, ip0->dst_address, + vnet_buffer (b0)->ip.reass.l4_dst_port, 0, + ip_proto_to_nat_proto (ip0->protocol)); if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external, &kv0, &value0)) { @@ -351,50 +406,17 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, { nat44_classify_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t)); - t->cached = cached0; - if (!cached0) - t->next_in2out = - next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH ? 1 : 0; + t->cached = 0; + t->next_in2out = next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH ? 1 : 0; } - if (cached0) - { - n_left_to_next++; - to_next--; - frag_cached++; - } - else - { - next_in2out += next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH; - next_out2in += next0 == NAT_NEXT_OUT2IN_ED_FAST_PATH; - - /* verify speculative enqueue, maybe switch current next frame */ - vlib_validate_buffer_enqueue_x1 (vm, node, next_index, - to_next, n_left_to_next, - bi0, next0); - } + next_in2out += next0 == NAT_NEXT_IN2OUT_ED_FAST_PATH; + next_out2in += next0 == NAT_NEXT_OUT2IN_ED_FAST_PATH; - if (n_left_from == 0 && vec_len (fragments_to_loopback)) - { - in_loopback = 1; - from = vlib_frame_vector_args (frame); - u32 len = vec_len (fragments_to_loopback); - if (len <= VLIB_FRAME_SIZE) - { - clib_memcpy_fast (from, fragments_to_loopback, - sizeof (u32) * len); - n_left_from = len; - vec_reset_length (fragments_to_loopback); - } - else - { - clib_memcpy_fast (from, fragments_to_loopback + - (len - VLIB_FRAME_SIZE), - sizeof (u32) * VLIB_FRAME_SIZE); - n_left_from = VLIB_FRAME_SIZE; - _vec_len (fragments_to_loopback) = len - VLIB_FRAME_SIZE; - } - } + /* verify speculative enqueue, maybe switch current next frame */ + vlib_validate_buffer_enqueue_x1 (vm, node, next_index, + to_next, n_left_to_next, + bi0, next0); } vlib_put_next_frame (vm, node, next_index, n_left_to_next); @@ -409,9 +431,6 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm, NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out); vlib_node_increment_counter (vm, node->node_index, NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in); - vlib_node_increment_counter (vm, node->node_index, - NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached); - return frame->n_vectors; } @@ -482,21 +501,16 @@ VLIB_NODE_FN (nat44_handoff_classify_node) (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame) { - return nat44_classify_node_fn_inline (vm, node, frame); + return nat44_handoff_classify_node_fn_inline (vm, node, frame); } /* *INDENT-OFF* */ VLIB_REGISTER_NODE (nat44_handoff_classify_node) = { .name = "nat44-handoff-classify", .vector_size = sizeof (u32), + .sibling_of = "nat-default", .format_trace = format_nat44_classify_trace, .type = VLIB_NODE_TYPE_INTERNAL, - .n_next_nodes = NAT44_CLASSIFY_N_NEXT, - .next_nodes = { - [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out-worker-handoff", - [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in-worker-handoff", - [NAT44_CLASSIFY_NEXT_DROP] = "error-drop", - }, }; /* *INDENT-ON* */