u8 packet_data[64];
} ip4_input_trace_t;
-#ifndef CLIB_MARCH_VARIANT
static u8 *
format_ip4_input_trace (u8 * s, va_list * va)
{
return s;
}
-#endif
static_always_inline u32
ip4_input_set_next (u32 sw_if_index, vlib_buffer_t * b, int arc_enabled)
}
static_always_inline void
-ip4_input_check_sw_if_index (vlib_simple_counter_main_t * cm, u32 sw_if_index,
+ip4_input_check_sw_if_index (vlib_main_t * vm,
+ vlib_simple_counter_main_t * cm, u32 sw_if_index,
u32 * last_sw_if_index, u32 * cnt,
int *arc_enabled)
{
return;
}
- thread_index = vlib_get_thread_index ();
+ thread_index = vm->thread_index;
if (*cnt)
vlib_increment_simple_counter (cm, thread_index, *last_sw_if_index, *cnt);
*cnt = 1;
{
vnet_main_t *vnm = vnet_get_main ();
u32 n_left_from, *from;
- u32 thread_index = vlib_get_thread_index ();
+ u32 thread_index = vm->thread_index;
vlib_node_runtime_t *error_node =
vlib_node_get_runtime (vm, ip4_input_node.index);
vlib_simple_counter_main_t *cm;
}
else
{
- ip4_input_check_sw_if_index (cm, sw_if_index[0], &last_sw_if_index,
- &cnt, &arc_enabled);
- ip4_input_check_sw_if_index (cm, sw_if_index[1], &last_sw_if_index,
- &cnt, &arc_enabled);
- ip4_input_check_sw_if_index (cm, sw_if_index[2], &last_sw_if_index,
- &cnt, &arc_enabled);
- ip4_input_check_sw_if_index (cm, sw_if_index[3], &last_sw_if_index,
- &cnt, &arc_enabled);
+ ip4_input_check_sw_if_index (vm, cm, sw_if_index[0],
+ &last_sw_if_index, &cnt, &arc_enabled);
+ ip4_input_check_sw_if_index (vm, cm, sw_if_index[1],
+ &last_sw_if_index, &cnt, &arc_enabled);
+ ip4_input_check_sw_if_index (vm, cm, sw_if_index[2],
+ &last_sw_if_index, &cnt, &arc_enabled);
+ ip4_input_check_sw_if_index (vm, cm, sw_if_index[3],
+ &last_sw_if_index, &cnt, &arc_enabled);
next[0] = ip4_input_set_next (sw_if_index[0], b[0], 1);
next[1] = ip4_input_set_next (sw_if_index[1], b[1], 1);
u32 next0;
vnet_buffer (b[0])->ip.adj_index[VLIB_RX] = ~0;
sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
- ip4_input_check_sw_if_index (cm, sw_if_index[0], &last_sw_if_index,
+ ip4_input_check_sw_if_index (vm, cm, sw_if_index[0], &last_sw_if_index,
&cnt, &arc_enabled);
next0 = ip4_input_set_next (sw_if_index[0], b[0], arc_enabled);
ip[0] = vlib_buffer_get_current (b[0]);
foreach_ip4_error
#undef _
};
+#endif
/* *INDENT-OFF* */
VLIB_REGISTER_NODE (ip4_input_node) = {
.next_nodes = {
[IP4_INPUT_NEXT_DROP] = "error-drop",
[IP4_INPUT_NEXT_PUNT] = "error-punt",
+ [IP4_INPUT_NEXT_OPTIONS] = "ip4-options",
[IP4_INPUT_NEXT_LOOKUP] = "ip4-lookup",
[IP4_INPUT_NEXT_LOOKUP_MULTICAST] = "ip4-mfib-forward-lookup",
[IP4_INPUT_NEXT_ICMP_ERROR] = "ip4-icmp-error",
.name = "ip4-input-no-checksum",
.vector_size = sizeof (u32),
- .n_next_nodes = IP4_INPUT_N_NEXT,
- .next_nodes = {
- [IP4_INPUT_NEXT_DROP] = "error-drop",
- [IP4_INPUT_NEXT_PUNT] = "error-punt",
- [IP4_INPUT_NEXT_LOOKUP] = "ip4-lookup",
- [IP4_INPUT_NEXT_LOOKUP_MULTICAST] = "ip4-mfib-forward-lookup",
- [IP4_INPUT_NEXT_ICMP_ERROR] = "ip4-icmp-error",
- [IP4_INPUT_NEXT_REASSEMBLY] = "ip4-reassembly",
- },
-
+ .sibling_of = "ip4-input",
.format_buffer = format_ip4_header,
.format_trace = format_ip4_input_trace,
};
hdlc_register_input_protocol (vm, HDLC_PROTOCOL_ip4, ip4_input_node.index);
{
+ extern vlib_node_registration_t ip4_input_no_checksum_node;
pg_node_t *pn;
pn = pg_get_node (ip4_input_node.index);
pn->unformat_edit = unformat_pg_ip4_header;
}
VLIB_INIT_FUNCTION (ip4_init);
-#endif
+
+static clib_error_t *
+ip4_main_loop_enter (vlib_main_t * vm)
+{
+ ip4_main_t *im = &ip4_main;
+ vlib_thread_main_t *tm = &vlib_thread_main;
+ u32 n_vlib_mains = tm->n_vlib_mains;
+ int i;
+
+
+ vec_validate (im->arp_throttle_bitmaps, n_vlib_mains);
+ vec_validate (im->arp_throttle_seeds, n_vlib_mains);
+ vec_validate (im->arp_throttle_last_seed_change_time, n_vlib_mains);
+
+ for (i = 0; i < n_vlib_mains; i++)
+ vec_validate (im->arp_throttle_bitmaps[i],
+ (ARP_THROTTLE_BITS / BITS (uword)) - 1);
+ return 0;
+}
+
+VLIB_MAIN_LOOP_ENTER_FUNCTION (ip4_main_loop_enter);
/*
* fd.io coding-style-patch-verification: ON