#ifndef included_vlib_trace_funcs_h
#define included_vlib_trace_funcs_h
+extern u8 *vnet_trace_dummy;
+
always_inline void
vlib_validate_trace (vlib_trace_main_t * tm, vlib_buffer_t * b)
{
vlib_trace_header_t *h;
u32 n_data_words;
+ ASSERT (vnet_trace_dummy);
+
+ if (PREDICT_FALSE (tm->add_trace_callback != 0))
+ {
+ return tm->add_trace_callback ((struct vlib_main_t *) vm,
+ (struct vlib_node_runtime_t *) r,
+ (struct vlib_buffer_t *) b,
+ n_data_bytes);
+ }
+ else if (PREDICT_FALSE (tm->trace_enable == 0))
+ {
+ ASSERT (vec_len (vnet_trace_dummy) >= n_data_bytes + sizeof (*h));
+ return vnet_trace_dummy;
+ }
+
vlib_validate_trace (tm, b);
n_data_bytes = round_pow2 (n_data_bytes, sizeof (h[0]));
vlib_trace_main_t *tm = &vm->trace_main;
vlib_trace_header_t **h;
+ if (PREDICT_FALSE (tm->trace_enable == 0))
+ return;
+
/*
* Apply filter to existing traces to keep number of allocated traces low.
* Performed each time around the main loop.
{
tm->last_main_loop_count = vm->main_loop_count;
trace_apply_filter (vm);
+
+ if (tm->trace_buffer_callback)
+ (tm->trace_buffer_callback) ((struct vlib_main_t *) vm,
+ (struct vlib_trace_main_t *) tm);
}
vlib_trace_next_frame (vm, r, next_index);