1 /* SPDX-License-Identifier: Apache-2.0
2 * Copyright(c) 2021 Cisco Systems, Inc.
5 #include <vppinfra/clib.h>
7 #include <vppinfra/vector/mask_compare.h>
8 #include <vppinfra/vector/compress.h>
10 static_always_inline u32
11 enqueue_one (vlib_main_t *vm, vlib_node_runtime_t *node,
12 vlib_frame_bitmap_t used_elt_bmp, u16 next_index, u32 *buffers,
13 u16 *nexts, u32 n_buffers, u32 n_left, u32 *tmp, u8 maybe_aux,
14 u32 *aux_data, u32 *tmp_aux)
16 vlib_frame_bitmap_t match_bmp;
18 u32 n_extracted, n_free;
21 f = vlib_get_next_frame_internal (vm, node, next_index, 0);
23 maybe_aux = maybe_aux && f->aux_offset;
25 n_free = VLIB_FRAME_SIZE - f->n_vectors;
27 /* if frame contains enough space for worst case scenario, we can avoid
31 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
33 to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
41 clib_mask_compare_u16 (next_index, nexts, match_bmp, n_buffers);
42 n_extracted = clib_compress_u32 (to, buffers, match_bmp, n_buffers);
44 clib_compress_u32 (to_aux, aux_data, match_bmp, n_buffers);
45 vlib_frame_bitmap_or (used_elt_bmp, match_bmp);
49 /* indices already written to frame, just close it */
50 vlib_put_next_frame (vm, node, next_index, n_free - n_extracted);
52 else if (n_free >= n_extracted)
54 /* enough space in the existing frame */
55 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
56 vlib_buffer_copy_indices (to, tmp, n_extracted);
59 to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
60 vlib_buffer_copy_indices (to_aux, tmp_aux, n_extracted);
62 vlib_put_next_frame (vm, node, next_index, n_free - n_extracted);
67 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
68 vlib_buffer_copy_indices (to, tmp, n_free);
71 to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
72 vlib_buffer_copy_indices (to_aux, tmp_aux, n_free);
74 vlib_put_next_frame (vm, node, next_index, 0);
77 u32 n_2nd_frame = n_extracted - n_free;
78 f = vlib_get_next_frame_internal (vm, node, next_index, 1);
79 to = vlib_frame_vector_args (f);
80 vlib_buffer_copy_indices (to, tmp + n_free, n_2nd_frame);
83 to_aux = vlib_frame_aux_args (f);
84 vlib_buffer_copy_indices (to_aux, tmp_aux + n_free, n_2nd_frame);
86 vlib_put_next_frame (vm, node, next_index,
87 VLIB_FRAME_SIZE - n_2nd_frame);
90 return n_left - n_extracted;
93 static_always_inline void
94 vlib_buffer_enqueue_to_next_fn_inline (vlib_main_t *vm,
95 vlib_node_runtime_t *node, u32 *buffers,
96 u32 *aux_data, u16 *nexts, uword count,
99 u32 tmp[VLIB_FRAME_SIZE];
100 u32 tmp_aux[VLIB_FRAME_SIZE];
104 while (count >= VLIB_FRAME_SIZE)
106 vlib_frame_bitmap_t used_elt_bmp = {};
107 n_left = VLIB_FRAME_SIZE;
110 next_index = nexts[0];
111 n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
112 VLIB_FRAME_SIZE, n_left, tmp, maybe_aux, aux_data,
117 while (PREDICT_FALSE (used_elt_bmp[off] == ~0))
120 ASSERT (off < ARRAY_LEN (used_elt_bmp));
124 nexts[off * 64 + count_trailing_zeros (~used_elt_bmp[off])];
125 n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers,
126 nexts, VLIB_FRAME_SIZE, n_left, tmp, maybe_aux,
130 buffers += VLIB_FRAME_SIZE;
132 aux_data += VLIB_FRAME_SIZE;
133 nexts += VLIB_FRAME_SIZE;
134 count -= VLIB_FRAME_SIZE;
139 vlib_frame_bitmap_t used_elt_bmp = {};
140 next_index = nexts[0];
144 n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
145 count, n_left, tmp, maybe_aux, aux_data, tmp_aux);
149 while (PREDICT_FALSE (used_elt_bmp[off] == ~0))
152 ASSERT (off < ARRAY_LEN (used_elt_bmp));
156 nexts[off * 64 + count_trailing_zeros (~used_elt_bmp[off])];
158 enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
159 count, n_left, tmp, maybe_aux, aux_data, tmp_aux);
164 void __clib_section (".vlib_buffer_enqueue_to_next_fn")
165 CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
166 (vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts,
169 vlib_buffer_enqueue_to_next_fn_inline (vm, node, buffers, NULL, nexts, count,
173 CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_next_fn);
175 void __clib_section (".vlib_buffer_enqueue_to_next_with_aux_fn")
176 CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_with_aux_fn)
177 (vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
178 u16 *nexts, uword count)
180 vlib_buffer_enqueue_to_next_fn_inline (vm, node, buffers, aux_data, nexts,
181 count, 1 /* maybe_aux */);
184 CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_next_with_aux_fn);
186 static_always_inline void
187 vlib_buffer_enqueue_to_single_next_fn_inline (vlib_main_t *vm,
188 vlib_node_runtime_t *node,
189 u32 *buffers, u32 *aux_data,
190 u16 next_index, u32 count,
193 u32 *to_next, *to_next_aux, n_left_to_next, n_enq;
196 vlib_get_next_frame_with_aux (vm, node, next_index, to_next, to_next_aux,
199 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
201 if (PREDICT_TRUE (n_left_to_next >= count))
203 vlib_buffer_copy_indices (to_next, buffers, count);
205 vlib_buffer_copy_indices (to_next_aux, aux_data, count);
206 n_left_to_next -= count;
207 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
211 n_enq = n_left_to_next;
213 vlib_buffer_copy_indices (to_next, buffers, n_enq);
215 vlib_buffer_copy_indices (to_next_aux, aux_data, n_enq);
216 n_left_to_next -= n_enq;
218 if (PREDICT_FALSE (count > n_enq))
225 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
227 vlib_get_next_frame_with_aux (vm, node, next_index, to_next,
228 to_next_aux, n_left_to_next);
230 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
231 n_enq = clib_min (n_left_to_next, count);
234 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
237 void __clib_section (".vlib_buffer_enqueue_to_single_next_fn")
238 CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_fn)
239 (vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 next_index,
242 vlib_buffer_enqueue_to_single_next_fn_inline (
243 vm, node, buffers, NULL, next_index, count, 0 /* with_aux */);
245 CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_single_next_fn);
247 void __clib_section (".vlib_buffer_enqueue_to_single_next_with_aux_fn")
248 CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_with_aux_fn)
249 (vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
250 u16 next_index, u32 count)
252 vlib_buffer_enqueue_to_single_next_fn_inline (
253 vm, node, buffers, aux_data, next_index, count, 1 /* with_aux */);
255 CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_single_next_with_aux_fn);
257 static inline vlib_frame_queue_elt_t *
258 vlib_get_frame_queue_elt (vlib_frame_queue_main_t *fqm, u32 index,
261 vlib_frame_queue_t *fq;
262 u64 nelts, tail, new_tail;
264 fq = vec_elt (fqm->vlib_frame_queues, index);
269 tail = __atomic_load_n (&fq->tail, __ATOMIC_ACQUIRE);
272 if (new_tail >= fq->head + nelts)
277 /* Wait until a ring slot is available */
278 while (new_tail >= fq->head + nelts)
279 vlib_worker_thread_barrier_check ();
282 if (!__atomic_compare_exchange_n (&fq->tail, &tail, new_tail, 0 /* weak */,
283 __ATOMIC_RELAXED, __ATOMIC_RELAXED))
286 return fq->elts + (new_tail & (nelts - 1));
289 static_always_inline u32
290 vlib_buffer_enqueue_to_thread_inline (vlib_main_t *vm,
291 vlib_node_runtime_t *node,
292 vlib_frame_queue_main_t *fqm,
293 u32 *buffer_indices, u16 *thread_indices,
294 u32 n_packets, int drop_on_congestion,
295 int with_aux, u32 *aux_data)
297 u32 drop_list[VLIB_FRAME_SIZE], n_drop = 0;
298 vlib_frame_bitmap_t mask, used_elts = {};
299 vlib_frame_queue_elt_t *hf = 0;
301 u32 n_comp, off = 0, n_left = n_packets;
303 thread_index = thread_indices[0];
306 clib_mask_compare_u16 (thread_index, thread_indices, mask, n_packets);
307 hf = vlib_get_frame_queue_elt (fqm, thread_index, drop_on_congestion);
309 n_comp = clib_compress_u32 (hf ? hf->buffer_index : drop_list + n_drop,
310 buffer_indices, mask, n_packets);
312 clib_compress_u32 (hf ? hf->aux_data : drop_list + n_drop, aux_data, mask,
317 if (node->flags & VLIB_NODE_FLAG_TRACE)
319 hf->n_vectors = n_comp;
320 __atomic_store_n (&hf->valid, 1, __ATOMIC_RELEASE);
321 vlib_get_main_by_index (thread_index)->check_frame_queues = 1;
330 vlib_frame_bitmap_or (used_elts, mask);
332 while (PREDICT_FALSE (used_elts[off] == ~0))
335 ASSERT (off < ARRAY_LEN (used_elts));
339 thread_indices[off * 64 + count_trailing_zeros (~used_elts[off])];
343 if (drop_on_congestion && n_drop)
344 vlib_buffer_free (vm, drop_list, n_drop);
346 return n_packets - n_drop;
349 u32 __clib_section (".vlib_buffer_enqueue_to_thread_fn")
350 CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_thread_fn)
351 (vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
352 u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
353 int drop_on_congestion)
355 vlib_thread_main_t *tm = vlib_get_thread_main ();
356 vlib_frame_queue_main_t *fqm;
359 fqm = vec_elt_at_index (tm->frame_queue_mains, frame_queue_index);
361 while (n_packets >= VLIB_FRAME_SIZE)
363 n_enq += vlib_buffer_enqueue_to_thread_inline (
364 vm, node, fqm, buffer_indices, thread_indices, VLIB_FRAME_SIZE,
365 drop_on_congestion, 0 /* with_aux */, NULL);
366 buffer_indices += VLIB_FRAME_SIZE;
367 thread_indices += VLIB_FRAME_SIZE;
368 n_packets -= VLIB_FRAME_SIZE;
374 n_enq += vlib_buffer_enqueue_to_thread_inline (
375 vm, node, fqm, buffer_indices, thread_indices, n_packets,
376 drop_on_congestion, 0 /* with_aux */, NULL);
381 u32 __clib_section (".vlib_buffer_enqueue_to_thread_with_aux_fn")
382 CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_thread_with_aux_fn)
383 (vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
384 u32 *buffer_indices, u32 *aux, u16 *thread_indices, u32 n_packets,
385 int drop_on_congestion)
387 vlib_thread_main_t *tm = vlib_get_thread_main ();
388 vlib_frame_queue_main_t *fqm;
391 fqm = vec_elt_at_index (tm->frame_queue_mains, frame_queue_index);
393 while (n_packets >= VLIB_FRAME_SIZE)
395 n_enq += vlib_buffer_enqueue_to_thread_inline (
396 vm, node, fqm, buffer_indices, thread_indices, VLIB_FRAME_SIZE,
397 drop_on_congestion, 1 /* with_aux */, aux);
398 buffer_indices += VLIB_FRAME_SIZE;
399 thread_indices += VLIB_FRAME_SIZE;
400 n_packets -= VLIB_FRAME_SIZE;
406 n_enq += vlib_buffer_enqueue_to_thread_inline (
407 vm, node, fqm, buffer_indices, thread_indices, n_packets,
408 drop_on_congestion, 1 /* with_aux */, aux);
413 CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_thread_fn);
414 CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_thread_with_aux_fn);
416 static_always_inline u32
417 vlib_frame_queue_dequeue_inline (vlib_main_t *vm, vlib_frame_queue_main_t *fqm,
420 u32 thread_id = vm->thread_index;
421 vlib_frame_queue_t *fq = fqm->vlib_frame_queues[thread_id];
422 u32 mask = fq->nelts - 1;
423 vlib_frame_queue_elt_t *elt;
424 u32 n_free, n_copy, *from, *from_aux, *to = 0, *to_aux = 0, processed = 0,
429 ASSERT (vm == vlib_global_main.vlib_mains[thread_id]);
431 if (PREDICT_FALSE (fqm->node_index == ~0))
434 * Gather trace data for frame queues
436 if (PREDICT_FALSE (fq->trace))
438 frame_queue_trace_t *fqt;
439 frame_queue_nelt_counter_t *fqh;
442 fqt = &fqm->frame_queue_traces[thread_id];
444 fqt->nelts = fq->nelts;
445 fqt->head = fq->head;
446 fqt->tail = fq->tail;
447 fqt->threshold = fq->vector_threshold;
448 fqt->n_in_use = fqt->tail - fqt->head;
449 if (fqt->n_in_use >= fqt->nelts)
451 // if beyond max then use max
452 fqt->n_in_use = fqt->nelts - 1;
455 /* Record the number of elements in use in the histogram */
456 fqh = &fqm->frame_queue_histogram[thread_id];
457 fqh->count[fqt->n_in_use]++;
459 /* Record a snapshot of the elements in use */
460 for (elix = 0; elix < fqt->nelts; elix++)
462 elt = fq->elts + ((fq->head + 1 + elix) & (mask));
465 fqt->n_vectors[elix] = elt->n_vectors;
473 if (fq->head == fq->tail)
476 elt = fq->elts + ((fq->head + 1) & mask);
478 if (!__atomic_load_n (&elt->valid, __ATOMIC_ACQUIRE))
481 from = elt->buffer_index + elt->offset;
483 from_aux = elt->aux_data + elt->offset;
484 ASSERT (elt->offset + elt->n_vectors <= VLIB_FRAME_SIZE);
488 f = vlib_get_frame_to_node (vm, fqm->node_index);
489 to = vlib_frame_vector_args (f);
491 to_aux = vlib_frame_aux_args (f);
492 n_free = VLIB_FRAME_SIZE;
495 if (elt->maybe_trace)
496 f->frame_flags |= VLIB_NODE_FLAG_TRACE;
498 n_copy = clib_min (n_free, elt->n_vectors);
500 vlib_buffer_copy_indices (to, from, n_copy);
504 vlib_buffer_copy_indices (to_aux, from_aux, n_copy);
513 f->n_vectors = VLIB_FRAME_SIZE;
514 vlib_put_frame_to_node (vm, fqm->node_index, f);
518 if (n_copy < elt->n_vectors)
520 /* not empty - leave it on the ring */
521 elt->n_vectors -= n_copy;
522 elt->offset += n_copy;
526 /* empty - reset and bump head */
527 u32 sz = STRUCT_OFFSET_OF (vlib_frame_queue_elt_t, end_of_reset);
528 clib_memset (elt, 0, sz);
529 __atomic_store_n (&fq->head, fq->head + 1, __ATOMIC_RELEASE);
533 /* Limit the number of packets pushed into the graph */
534 if (vectors >= fq->vector_threshold)
540 f->n_vectors = VLIB_FRAME_SIZE - n_free;
541 vlib_put_frame_to_node (vm, fqm->node_index, f);
547 u32 __clib_section (".vlib_frame_queue_dequeue_fn")
548 CLIB_MULTIARCH_FN (vlib_frame_queue_dequeue_fn)
549 (vlib_main_t *vm, vlib_frame_queue_main_t *fqm)
551 return vlib_frame_queue_dequeue_inline (vm, fqm, 0 /* with_aux */);
554 CLIB_MARCH_FN_REGISTRATION (vlib_frame_queue_dequeue_fn);
556 u32 __clib_section (".vlib_frame_queue_dequeue_with_aux_fn")
557 CLIB_MULTIARCH_FN (vlib_frame_queue_dequeue_with_aux_fn)
558 (vlib_main_t *vm, vlib_frame_queue_main_t *fqm)
560 return vlib_frame_queue_dequeue_inline (vm, fqm, 1 /* with_aux */);
563 CLIB_MARCH_FN_REGISTRATION (vlib_frame_queue_dequeue_with_aux_fn);
565 #ifndef CLIB_MARCH_VARIANT
566 vlib_buffer_func_main_t vlib_buffer_func_main;
568 static clib_error_t *
569 vlib_buffer_funcs_init (vlib_main_t *vm)
571 vlib_buffer_func_main_t *bfm = &vlib_buffer_func_main;
572 bfm->buffer_enqueue_to_next_fn =
573 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_next_fn);
574 bfm->buffer_enqueue_to_next_with_aux_fn =
575 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_next_with_aux_fn);
576 bfm->buffer_enqueue_to_single_next_fn =
577 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_single_next_fn);
578 bfm->buffer_enqueue_to_single_next_with_aux_fn =
579 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_single_next_with_aux_fn);
580 bfm->buffer_enqueue_to_thread_fn =
581 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_thread_fn);
582 bfm->buffer_enqueue_to_thread_with_aux_fn =
583 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_thread_with_aux_fn);
587 VLIB_INIT_FUNCTION (vlib_buffer_funcs_init);