always_inline void
vlib_validate_trace (vlib_trace_main_t * tm, vlib_buffer_t * b)
{
- /*
- * this assert seems right, but goes off constantly.
- * disabling it appears to make the pain go away
- */
- ASSERT (1 || b->flags & VLIB_BUFFER_IS_TRACED);
- ASSERT (!pool_is_free_index (tm->trace_buffer_pool, b->trace_index));
+ ASSERT (!pool_is_free_index (tm->trace_buffer_pool,
+ vlib_buffer_get_trace_index (b)));
}
+void vlib_add_handoff_trace (vlib_main_t * vm, vlib_buffer_t * b);
+
always_inline void *
vlib_add_trace (vlib_main_t * vm,
vlib_node_runtime_t * r, vlib_buffer_t * b, u32 n_data_bytes)
ASSERT (vnet_trace_dummy);
- if (PREDICT_FALSE (tm->trace_enable == 0))
+ if (PREDICT_FALSE ((b->flags & VLIB_BUFFER_IS_TRACED) == 0))
+ return vnet_trace_dummy;
+
+ if (PREDICT_FALSE (tm->add_trace_callback != 0))
+ {
+ return tm->add_trace_callback ((struct vlib_main_t *) vm,
+ (struct vlib_node_runtime_t *) r,
+ (struct vlib_buffer_t *) b,
+ n_data_bytes);
+ }
+ else if (PREDICT_FALSE (tm->trace_enable == 0))
{
ASSERT (vec_len (vnet_trace_dummy) >= n_data_bytes + sizeof (*h));
return vnet_trace_dummy;
}
+ /* Are we trying to trace a handoff case? */
+ if (PREDICT_FALSE (vlib_buffer_get_trace_thread (b) != vm->thread_index))
+ vlib_add_handoff_trace (vm, b);
+
vlib_validate_trace (tm, b);
n_data_bytes = round_pow2 (n_data_bytes, sizeof (h[0]));
n_data_words = n_data_bytes / sizeof (h[0]);
- vec_add2_aligned (tm->trace_buffer_pool[b->trace_index], h,
+ vec_add2_aligned (tm->trace_buffer_pool[vlib_buffer_get_trace_index (b)], h,
1 + n_data_words, sizeof (h[0]));
h->time = vm->cpu_time_last_node_dispatch;
vlib_free_trace (vlib_main_t * vm, vlib_buffer_t * b)
{
vlib_trace_main_t *tm = &vm->trace_main;
+ u32 trace_index = vlib_buffer_get_trace_index (b);
vlib_validate_trace (tm, b);
- _vec_len (tm->trace_buffer_pool[b->trace_index]) = 0;
- pool_put_index (tm->trace_buffer_pool, b->trace_index);
+ _vec_len (tm->trace_buffer_pool[trace_index]) = 0;
+ pool_put_index (tm->trace_buffer_pool, trace_index);
}
always_inline void
}
void trace_apply_filter (vlib_main_t * vm);
+int vnet_is_packet_traced (vlib_buffer_t * b,
+ u32 classify_table_index, int func);
+
/* Mark buffer as traced and allocate trace buffer. */
always_inline void
if (PREDICT_FALSE (tm->trace_enable == 0))
return;
+ /* Classifier filter in use? */
+ if (PREDICT_FALSE (vlib_global_main.trace_filter.trace_filter_enable))
+ {
+ /* See if we're supposed to trace this packet... */
+ if (vnet_is_packet_traced
+ (b, vlib_global_main.trace_filter.trace_classify_table_index,
+ 0 /* full classify */ ) != 1)
+ return;
+ }
+
/*
* Apply filter to existing traces to keep number of allocated traces low.
* Performed each time around the main loop.
{
tm->last_main_loop_count = vm->main_loop_count;
trace_apply_filter (vm);
+
+ if (tm->trace_buffer_callback)
+ (tm->trace_buffer_callback) ((struct vlib_main_t *) vm,
+ (struct vlib_trace_main_t *) tm);
}
vlib_trace_next_frame (vm, r, next_index);
do
{
b->flags |= VLIB_BUFFER_IS_TRACED;
- b->trace_index = h - tm->trace_buffer_pool;
+ b->trace_handle = vlib_buffer_make_trace_handle
+ (vm->thread_index, h - tm->trace_buffer_pool);
}
while (follow_chain && (b = vlib_get_next_buffer (vm, b)));
}
{
vlib_buffer_t *b_target = vlib_get_buffer (vm, bi_target);
b_target->flags |= b->flags & VLIB_BUFFER_IS_TRACED;
- b_target->trace_index = b->trace_index;
+ b_target->trace_handle = b->trace_handle;
}
always_inline u32