#ifndef included_vlib_trace_funcs_h
#define included_vlib_trace_funcs_h
-extern u8 *vnet_trace_dummy;
+extern u8 *vnet_trace_placeholder;
always_inline void
vlib_validate_trace (vlib_trace_main_t * tm, vlib_buffer_t * b)
{
- /*
- * this assert seems right, but goes off constantly.
- * disabling it appears to make the pain go away
- */
- ASSERT (1 || b->flags & VLIB_BUFFER_IS_TRACED);
- ASSERT (!pool_is_free_index (tm->trace_buffer_pool, b->trace_index));
+ ASSERT (!pool_is_free_index (tm->trace_buffer_pool,
+ vlib_buffer_get_trace_index (b)));
}
+int vlib_add_handoff_trace (vlib_main_t * vm, vlib_buffer_t * b);
+
always_inline void *
-vlib_add_trace (vlib_main_t * vm,
- vlib_node_runtime_t * r, vlib_buffer_t * b, u32 n_data_bytes)
+vlib_add_trace_inline (vlib_main_t * vm,
+ vlib_node_runtime_t * r, vlib_buffer_t * b,
+ u32 n_data_bytes)
{
vlib_trace_main_t *tm = &vm->trace_main;
vlib_trace_header_t *h;
- u32 n_data_words;
+ u32 n_data_words, trace_index;
- ASSERT (vnet_trace_dummy);
+ ASSERT (vnet_trace_placeholder);
+
+ if (PREDICT_FALSE ((b->flags & VLIB_BUFFER_IS_TRACED) == 0))
+ return vnet_trace_placeholder;
if (PREDICT_FALSE (tm->add_trace_callback != 0))
{
}
else if (PREDICT_FALSE (tm->trace_enable == 0))
{
- ASSERT (vec_len (vnet_trace_dummy) >= n_data_bytes + sizeof (*h));
- return vnet_trace_dummy;
+ ASSERT (vec_len (vnet_trace_placeholder) >= n_data_bytes + sizeof (*h));
+ return vnet_trace_placeholder;
}
- vlib_validate_trace (tm, b);
+ /* Are we trying to trace a handoff case? */
+ if (PREDICT_FALSE (vlib_buffer_get_trace_thread (b) != vm->thread_index))
+ if (PREDICT_FALSE (!vlib_add_handoff_trace (vm, b)))
+ return vnet_trace_placeholder;
+
+ /*
+ * there is a small chance of a race condition with 'clear trace' here: if a
+ * buffer was set to be traced before the 'clear trace' and is still going
+ * through the graph after the 'clear trace', its trace_index is staled as
+ * the pool was destroyed.
+ * The pool may have been re-allocated because of a new traced buffer, and
+ * the trace_index might be valid by pure (bad) luck. In that case the trace
+ * will be a mix of both buffer traces, but this should be acceptable.
+ */
+ trace_index = vlib_buffer_get_trace_index (b);
+ if (PREDICT_FALSE (pool_is_free_index (tm->trace_buffer_pool, trace_index)))
+ return vnet_trace_placeholder;
n_data_bytes = round_pow2 (n_data_bytes, sizeof (h[0]));
n_data_words = n_data_bytes / sizeof (h[0]);
- vec_add2_aligned (tm->trace_buffer_pool[b->trace_index], h,
- 1 + n_data_words, sizeof (h[0]));
+ vec_add2_aligned (tm->trace_buffer_pool[trace_index], h, 1 + n_data_words,
+ sizeof (h[0]));
h->time = vm->cpu_time_last_node_dispatch;
h->n_data = n_data_words;
return h->data;
}
+/* Non-inline (typical use-case) version of the above */
+void *vlib_add_trace (vlib_main_t * vm,
+ vlib_node_runtime_t * r, vlib_buffer_t * b,
+ u32 n_data_bytes);
+
always_inline vlib_trace_header_t *
vlib_trace_header_next (vlib_trace_header_t * h)
{
vlib_free_trace (vlib_main_t * vm, vlib_buffer_t * b)
{
vlib_trace_main_t *tm = &vm->trace_main;
+ u32 trace_index = vlib_buffer_get_trace_index (b);
vlib_validate_trace (tm, b);
- _vec_len (tm->trace_buffer_pool[b->trace_index]) = 0;
- pool_put_index (tm->trace_buffer_pool, b->trace_index);
+ vec_set_len (tm->trace_buffer_pool[trace_index], 0);
+ pool_put_index (tm->trace_buffer_pool, trace_index);
}
always_inline void
nf->flags |= VLIB_FRAME_TRACE;
}
-void trace_apply_filter (vlib_main_t * vm);
+void trace_apply_filter (vlib_main_t *vm);
-/* Mark buffer as traced and allocate trace buffer. */
-always_inline void
+/*
+ * Mark buffer as traced and allocate trace buffer.
+ * return 1 if the buffer is successfully traced, 0 if not
+ * A buffer might not be traced if tracing is off or if the packet did not
+ * match the filter.
+ */
+always_inline __clib_warn_unused_result int
vlib_trace_buffer (vlib_main_t * vm,
vlib_node_runtime_t * r,
u32 next_index, vlib_buffer_t * b, int follow_chain)
vlib_trace_header_t **h;
if (PREDICT_FALSE (tm->trace_enable == 0))
- return;
+ return 0;
+
+ /* Classifier filter in use? */
+ if (PREDICT_FALSE (vlib_global_main.trace_filter.trace_filter_enable))
+ {
+ /* See if we're supposed to trace this packet... */
+ if (tm->current_trace_filter_function (
+ b, vlib_global_main.trace_filter.classify_table_index,
+ 0 /* full classify */) != 1)
+ return 0;
+ }
/*
* Apply filter to existing traces to keep number of allocated traces low.
do
{
b->flags |= VLIB_BUFFER_IS_TRACED;
- b->trace_index = h - tm->trace_buffer_pool;
+ b->trace_handle = vlib_buffer_make_trace_handle
+ (vm->thread_index, h - tm->trace_buffer_pool);
}
while (follow_chain && (b = vlib_get_next_buffer (vm, b)));
+
+ return 1;
}
always_inline void
{
vlib_buffer_t *b_target = vlib_get_buffer (vm, bi_target);
b_target->flags |= b->flags & VLIB_BUFFER_IS_TRACED;
- b_target->trace_index = b->trace_index;
+ b_target->trace_handle = b->trace_handle;
}
always_inline u32
{
vlib_trace_main_t *tm = &vm->trace_main;
vlib_trace_node_t *tn;
- int n;
if (rt->node_index >= vec_len (tm->nodes))
return 0;
tn = tm->nodes + rt->node_index;
- n = tn->limit - tn->count;
- ASSERT (n >= 0);
+ ASSERT (tn->count <= tn->limit);
- return n;
+ return tn->limit - tn->count;
}
always_inline void