1 #ifndef __included_nat44_hairpinning_h__
2 #define __included_nat44_hairpinning_h__
6 #define foreach_nat44_hairpinning_handoff_error \
7 _ (CONGESTION_DROP, "congestion drop")
11 #define _(sym, str) NAT44_HAIRPINNING_HANDOFF_ERROR_##sym,
12 foreach_nat44_hairpinning_handoff_error
14 NAT44_HAIRPINNING_HANDOFF_N_ERROR,
15 } nat44_hairpinning_handoff_error_t;
17 static char *nat44_hairpinning_handoff_error_strings[] = {
18 #define _(sym, string) string,
19 foreach_nat44_hairpinning_handoff_error
25 u32 next_worker_index;
26 } nat44_hairpinning_handoff_trace_t;
29 format_nat44_hairpinning_handoff_trace (u8 *s, va_list *args)
31 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
32 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
33 nat44_hairpinning_handoff_trace_t *t =
34 va_arg (*args, nat44_hairpinning_handoff_trace_t *);
36 s = format (s, "nat-hairpinning-handoff: next-worker %d",
37 t->next_worker_index);
43 nat44_hairpinning_handoff_fn_inline (vlib_main_t *vm,
44 vlib_node_runtime_t *node,
45 vlib_frame_t *frame, u32 fq_index)
47 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
48 u32 n_enq, n_left_from, *from;
49 u16 thread_indices[VLIB_FRAME_SIZE], *ti;
51 from = vlib_frame_vector_args (frame);
52 n_left_from = frame->n_vectors;
53 vlib_get_buffers (vm, from, bufs, n_left_from);
58 while (n_left_from > 0)
60 ti[0] = vnet_buffer (b[0])->snat.required_thread_index;
62 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE) &&
63 (b[0]->flags & VLIB_BUFFER_IS_TRACED)))
65 nat44_hairpinning_handoff_trace_t *t =
66 vlib_add_trace (vm, node, b[0], sizeof (*t));
67 t->next_worker_index = ti[0];
74 n_enq = vlib_buffer_enqueue_to_thread (vm, fq_index, from, thread_indices,
77 if (n_enq < frame->n_vectors)
78 vlib_node_increment_counter (
79 vm, node->node_index, NAT44_HAIRPINNING_HANDOFF_ERROR_CONGESTION_DROP,
80 frame->n_vectors - n_enq);
81 return frame->n_vectors;
84 #endif // __included_nat44_hairpinning_h__
87 * fd.io coding-style-patch-verification: ON
90 * eval: (c-set-style "gnu")