2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
45 #include <vppinfra/vector/index_to_ptr.h>
46 #include <vlib/buffer.h>
47 #include <vlib/physmem_funcs.h>
48 #include <vlib/main.h>
49 #include <vlib/node.h>
52 vlib buffer access methods.
55 typedef void (vlib_buffer_enqueue_to_next_fn_t) (vlib_main_t *vm,
56 vlib_node_runtime_t *node,
57 u32 *buffers, u16 *nexts,
59 typedef void (vlib_buffer_enqueue_to_next_with_aux_fn_t) (
60 vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
61 u16 *nexts, uword count);
62 typedef void (vlib_buffer_enqueue_to_single_next_fn_t) (
63 vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u16 next_index,
66 typedef void (vlib_buffer_enqueue_to_single_next_with_aux_fn_t) (
67 vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u32 *aux_data,
68 u16 next_index, u32 count);
70 typedef u32 (vlib_buffer_enqueue_to_thread_fn_t) (
71 vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
72 u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
73 int drop_on_congestion);
75 typedef u32 (vlib_buffer_enqueue_to_thread_with_aux_fn_t) (
76 vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
77 u32 *buffer_indices, u32 *aux, u16 *thread_indices, u32 n_packets,
78 int drop_on_congestion);
82 vlib_buffer_enqueue_to_next_fn_t *buffer_enqueue_to_next_fn;
83 vlib_buffer_enqueue_to_next_with_aux_fn_t
84 *buffer_enqueue_to_next_with_aux_fn;
85 vlib_buffer_enqueue_to_single_next_fn_t *buffer_enqueue_to_single_next_fn;
86 vlib_buffer_enqueue_to_single_next_with_aux_fn_t
87 *buffer_enqueue_to_single_next_with_aux_fn;
88 vlib_buffer_enqueue_to_thread_fn_t *buffer_enqueue_to_thread_fn;
89 vlib_buffer_enqueue_to_thread_with_aux_fn_t
90 *buffer_enqueue_to_thread_with_aux_fn;
91 } vlib_buffer_func_main_t;
93 extern vlib_buffer_func_main_t vlib_buffer_func_main;
96 vlib_buffer_validate (vlib_main_t * vm, vlib_buffer_t * b)
98 vlib_buffer_main_t *bm = vm->buffer_main;
99 vlib_buffer_pool_t *bp;
101 /* reference count in allocated buffer always must be 1 or higher */
102 ASSERT (b->ref_count > 0);
104 /* verify that buffer pool index is valid */
105 bp = vec_elt_at_index (bm->buffer_pools, b->buffer_pool_index);
106 ASSERT (pointer_to_uword (b) >= bp->start);
107 ASSERT (pointer_to_uword (b) < bp->start + bp->size -
108 (bp->data_size + sizeof (vlib_buffer_t)));
112 vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
115 offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
116 return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
119 /** \brief Translate buffer index into buffer pointer
121 @param vm - (vlib_main_t *) vlib main data structure pointer
122 @param buffer_index - (u32) buffer index
123 @return - (vlib_buffer_t *) buffer pointer
125 always_inline vlib_buffer_t *
126 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
128 vlib_buffer_main_t *bm = vm->buffer_main;
131 b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
132 vlib_buffer_validate (vm, b);
136 static_always_inline u32
137 vlib_buffer_get_default_data_size (vlib_main_t * vm)
139 return vm->buffer_main->default_data_size;
142 static_always_inline void
143 vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
145 clib_memcpy_u32 (dst, src, n_indices);
149 vlib_buffer_copy_indices_from_ring (u32 * dst, u32 * ring, u32 start,
150 u32 ring_size, u32 n_buffers)
152 ASSERT (n_buffers <= ring_size);
154 if (PREDICT_TRUE (start + n_buffers <= ring_size))
156 vlib_buffer_copy_indices (dst, ring + start, n_buffers);
160 u32 n = ring_size - start;
161 vlib_buffer_copy_indices (dst, ring + start, n);
162 vlib_buffer_copy_indices (dst + n, ring, n_buffers - n);
167 vlib_buffer_copy_indices_to_ring (u32 * ring, u32 * src, u32 start,
168 u32 ring_size, u32 n_buffers)
170 ASSERT (n_buffers <= ring_size);
172 if (PREDICT_TRUE (start + n_buffers <= ring_size))
174 vlib_buffer_copy_indices (ring + start, src, n_buffers);
178 u32 n = ring_size - start;
179 vlib_buffer_copy_indices (ring + start, src, n);
180 vlib_buffer_copy_indices (ring, src + n, n_buffers - n);
184 static_always_inline void
185 vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
187 #if defined CLIB_HAVE_VEC512
188 b->as_u8x64[0] = bt->as_u8x64[0];
189 #elif defined (CLIB_HAVE_VEC256)
190 b->as_u8x32[0] = bt->as_u8x32[0];
191 b->as_u8x32[1] = bt->as_u8x32[1];
192 #elif defined (CLIB_HAVE_VEC128)
193 b->as_u8x16[0] = bt->as_u8x16[0];
194 b->as_u8x16[1] = bt->as_u8x16[1];
195 b->as_u8x16[2] = bt->as_u8x16[2];
196 b->as_u8x16[3] = bt->as_u8x16[3];
198 clib_memcpy_fast (b, bt, 64);
203 vlib_buffer_pool_get_default_for_numa (vlib_main_t * vm, u32 numa_node)
205 ASSERT (numa_node < VLIB_BUFFER_MAX_NUMA_NODES);
206 return vm->buffer_main->default_buffer_pool_index_for_numa[numa_node];
209 /** \brief Translate array of buffer indices into buffer pointers with offset
211 @param vm - (vlib_main_t *) vlib main data structure pointer
212 @param bi - (u32 *) array of buffer indices
213 @param b - (void **) array to store buffer pointers
214 @param count - (uword) number of elements
215 @param offset - (i32) offset applied to each pointer
217 static_always_inline void
218 vlib_get_buffers_with_offset (vlib_main_t *vm, u32 *bi, void **b, u32 count,
221 uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
222 void *base = (void *) (buffer_mem_start + offset);
223 int objsize = __builtin_object_size (b, 0);
224 const int sh = CLIB_LOG2_CACHE_LINE_BYTES;
226 if (COMPILE_TIME_CONST (count) == 0 && objsize >= 64 * sizeof (b[0]) &&
227 (objsize & ((8 * sizeof (b[0])) - 1)) == 0)
229 u32 n = round_pow2 (count, 8);
230 ASSERT (objsize >= count);
231 CLIB_ASSUME (objsize >= count);
234 clib_index_to_ptr_u32 (bi, base, sh, b, 64);
242 clib_index_to_ptr_u32 (bi, base, sh, b, 8);
249 clib_index_to_ptr_u32 (bi, base, sh, b, count);
252 /** \brief Translate array of buffer indices into buffer pointers
254 @param vm - (vlib_main_t *) vlib main data structure pointer
255 @param bi - (u32 *) array of buffer indices
256 @param b - (vlib_buffer_t **) array to store buffer pointers
257 @param count - (uword) number of elements
260 static_always_inline void
261 vlib_get_buffers (vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, u32 count)
263 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
266 /** \brief Translate buffer pointer into buffer index
268 @param vm - (vlib_main_t *) vlib main data structure pointer
269 @param p - (void *) buffer pointer
270 @return - (u32) buffer index
274 vlib_get_buffer_index (vlib_main_t * vm, void *p)
276 vlib_buffer_main_t *bm = vm->buffer_main;
277 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
278 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
279 ASSERT (offset < bm->buffer_mem_size);
280 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
281 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
284 /** \brief Translate array of buffer pointers into buffer indices with offset
286 @param vm - (vlib_main_t *) vlib main data structure pointer
287 @param b - (void **) array of buffer pointers
288 @param bi - (u32 *) array to store buffer indices
289 @param count - (uword) number of elements
290 @param offset - (i32) offset applied to each pointer
292 static_always_inline void
293 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
294 uword count, i32 offset)
296 #ifdef CLIB_HAVE_VEC256
297 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
298 u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
302 /* load 4 pointers into 256-bit register */
303 u64x4 v0 = u64x4_load_unaligned (b);
304 u64x4 v1 = u64x4_load_unaligned (b + 4);
310 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
311 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
313 /* permute 256-bit register so lower u32s of each buffer index are
314 * placed into lower 128-bits */
315 v2 = u32x8_permute ((u32x8) v0, mask);
316 v3 = u32x8_permute ((u32x8) v1, mask);
318 /* extract lower 128-bits and save them to the array of buffer indices */
319 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
320 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
328 /* equivalent non-nector implementation */
329 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
330 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
331 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
332 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
339 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
346 /** \brief Translate array of buffer pointers into buffer indices
348 @param vm - (vlib_main_t *) vlib main data structure pointer
349 @param b - (vlib_buffer_t **) array of buffer pointers
350 @param bi - (u32 *) array to store buffer indices
351 @param count - (uword) number of elements
353 static_always_inline void
354 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
357 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
360 /** \brief Get next buffer in buffer linklist, or zero for end of list.
362 @param vm - (vlib_main_t *) vlib main data structure pointer
363 @param b - (void *) buffer pointer
364 @return - (vlib_buffer_t *) next buffer, or NULL
366 always_inline vlib_buffer_t *
367 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
369 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
370 ? vlib_get_buffer (vm, b->next_buffer) : 0);
373 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
374 vlib_buffer_t * b_first);
376 /** \brief Get length in bytes of the buffer chain
378 @param vm - (vlib_main_t *) vlib main data structure pointer
379 @param b - (void *) buffer pointer
380 @return - (uword) length of buffer chain
383 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
385 uword len = b->current_length;
387 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
390 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
391 return len + b->total_length_not_including_first_buffer;
393 return vlib_buffer_length_in_chain_slow_path (vm, b);
396 /** \brief Get length in bytes of the buffer index buffer chain
398 @param vm - (vlib_main_t *) vlib main data structure pointer
399 @param bi - (u32) buffer index
400 @return - (uword) length of buffer chain
403 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
405 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
406 return vlib_buffer_length_in_chain (vm, b);
409 /** \brief Copy buffer contents to memory
411 @param vm - (vlib_main_t *) vlib main data structure pointer
412 @param buffer_index - (u32) buffer index
413 @param contents - (u8 *) memory, <strong>must be large enough</strong>
414 @return - (uword) length of buffer chain
417 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
419 uword content_len = 0;
425 b = vlib_get_buffer (vm, buffer_index);
426 l = b->current_length;
427 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
429 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
431 buffer_index = b->next_buffer;
438 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
440 return vlib_physmem_get_pa (vm, b->data);
444 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
446 return vlib_buffer_get_pa (vm, b) + b->current_data;
449 /** \brief Prefetch buffer metadata by buffer index
450 The first 64 bytes of buffer contains most header information
452 @param vm - (vlib_main_t *) vlib main data structure pointer
453 @param bi - (u32) buffer index
454 @param type - LOAD, STORE. In most cases, STORE is the right answer
456 /* Prefetch buffer header given index. */
457 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
459 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
460 vlib_prefetch_buffer_header (_b, type); \
465 /* Index is unknown. */
468 /* Index is known and free/allocated. */
469 VLIB_BUFFER_KNOWN_FREE,
470 VLIB_BUFFER_KNOWN_ALLOCATED,
471 } vlib_buffer_known_state_t;
473 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
475 vlib_buffer_known_state_t
478 always_inline vlib_buffer_known_state_t
479 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
481 vlib_buffer_main_t *bm = vm->buffer_main;
483 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
484 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
485 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
486 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
489 /* Validates sanity of a single buffer.
490 Returns format'ed vector with error message if any. */
491 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
494 u8 *vlib_validate_buffers (vlib_main_t * vm,
496 uword next_buffer_stride,
498 vlib_buffer_known_state_t known_state,
499 uword follow_buffer_next);
501 static_always_inline vlib_buffer_pool_t *
502 vlib_get_buffer_pool (vlib_main_t * vm, u8 buffer_pool_index)
504 vlib_buffer_main_t *bm = vm->buffer_main;
505 return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
508 static_always_inline __clib_warn_unused_result uword
509 vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index, u32 * buffers,
512 vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
515 ASSERT (bp->buffers);
517 clib_spinlock_lock (&bp->lock);
519 if (PREDICT_TRUE (n_buffers < len))
522 vlib_buffer_copy_indices (buffers, bp->buffers + len, n_buffers);
524 clib_spinlock_unlock (&bp->lock);
529 vlib_buffer_copy_indices (buffers, bp->buffers, len);
531 clib_spinlock_unlock (&bp->lock);
537 /** \brief Allocate buffers from specific pool into supplied array
539 @param vm - (vlib_main_t *) vlib main data structure pointer
540 @param buffers - (u32 * ) buffer index array
541 @param n_buffers - (u32) number of buffers requested
542 @return - (u32) number of buffers actually allocated, may be
543 less than the number requested or zero
546 always_inline __clib_warn_unused_result u32
547 vlib_buffer_alloc_from_pool (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
548 u8 buffer_pool_index)
550 vlib_buffer_main_t *bm = vm->buffer_main;
551 vlib_buffer_pool_t *bp;
552 vlib_buffer_pool_thread_t *bpt;
553 u32 *src, *dst, len, n_left;
555 /* If buffer allocation fault injection is configured */
556 if (VLIB_BUFFER_ALLOC_FAULT_INJECTOR > 0)
558 u32 vlib_buffer_alloc_may_fail (vlib_main_t *, u32);
560 /* See how many buffers we're willing to allocate */
561 n_buffers = vlib_buffer_alloc_may_fail (vm, n_buffers);
566 bp = vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
567 bpt = vec_elt_at_index (bp->threads, vm->thread_index);
573 /* per-thread cache contains enough buffers */
574 if (len >= n_buffers)
576 src = bpt->cached_buffers + len - n_buffers;
577 vlib_buffer_copy_indices (dst, src, n_buffers);
578 bpt->n_cached -= n_buffers;
582 /* alloc bigger than cache - take buffers directly from main pool */
583 if (n_buffers >= VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ)
585 n_buffers = vlib_buffer_pool_get (vm, buffer_pool_index, buffers,
590 /* take everything available in the cache */
593 vlib_buffer_copy_indices (dst, bpt->cached_buffers, len);
599 len = round_pow2 (n_left, 32);
600 len = vlib_buffer_pool_get (vm, buffer_pool_index, bpt->cached_buffers,
606 u32 n_copy = clib_min (len, n_left);
607 src = bpt->cached_buffers + len - n_copy;
608 vlib_buffer_copy_indices (dst, src, n_copy);
609 bpt->n_cached -= n_copy;
616 /* Verify that buffers are known free. */
618 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
619 VLIB_BUFFER_KNOWN_FREE);
620 if (PREDICT_FALSE (bm->alloc_callback_fn != 0))
621 bm->alloc_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
625 /** \brief Allocate buffers from specific numa node into supplied array
627 @param vm - (vlib_main_t *) vlib main data structure pointer
628 @param buffers - (u32 * ) buffer index array
629 @param n_buffers - (u32) number of buffers requested
630 @param numa_node - (u32) numa node
631 @return - (u32) number of buffers actually allocated, may be
632 less than the number requested or zero
634 always_inline __clib_warn_unused_result u32
635 vlib_buffer_alloc_on_numa (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
638 u8 index = vlib_buffer_pool_get_default_for_numa (vm, numa_node);
639 return vlib_buffer_alloc_from_pool (vm, buffers, n_buffers, index);
642 /** \brief Allocate buffers into supplied array
644 @param vm - (vlib_main_t *) vlib main data structure pointer
645 @param buffers - (u32 * ) buffer index array
646 @param n_buffers - (u32) number of buffers requested
647 @return - (u32) number of buffers actually allocated, may be
648 less than the number requested or zero
651 always_inline __clib_warn_unused_result u32
652 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
654 return vlib_buffer_alloc_on_numa (vm, buffers, n_buffers, vm->numa_node);
657 /** \brief Allocate buffers into ring
659 @param vm - (vlib_main_t *) vlib main data structure pointer
660 @param buffers - (u32 * ) buffer index ring
661 @param start - (u32) first slot in the ring
662 @param ring_size - (u32) ring size
663 @param n_buffers - (u32) number of buffers requested
664 @return - (u32) number of buffers actually allocated, may be
665 less than the number requested or zero
667 always_inline __clib_warn_unused_result u32
668 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
669 u32 ring_size, u32 n_buffers)
673 ASSERT (n_buffers <= ring_size);
675 if (PREDICT_TRUE (start + n_buffers <= ring_size))
676 return vlib_buffer_alloc (vm, ring + start, n_buffers);
678 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
680 if (PREDICT_TRUE (n_alloc == ring_size - start))
681 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
686 /** \brief Allocate buffers into ring from specific buffer pool
688 @param vm - (vlib_main_t *) vlib main data structure pointer
689 @param buffers - (u32 * ) buffer index ring
690 @param start - (u32) first slot in the ring
691 @param ring_size - (u32) ring size
692 @param n_buffers - (u32) number of buffers requested
693 @return - (u32) number of buffers actually allocated, may be
694 less than the number requested or zero
696 always_inline __clib_warn_unused_result u32
697 vlib_buffer_alloc_to_ring_from_pool (vlib_main_t * vm, u32 * ring, u32 start,
698 u32 ring_size, u32 n_buffers,
699 u8 buffer_pool_index)
703 ASSERT (n_buffers <= ring_size);
705 if (PREDICT_TRUE (start + n_buffers <= ring_size))
706 return vlib_buffer_alloc_from_pool (vm, ring + start, n_buffers,
709 n_alloc = vlib_buffer_alloc_from_pool (vm, ring + start, ring_size - start,
712 if (PREDICT_TRUE (n_alloc == ring_size - start))
713 n_alloc += vlib_buffer_alloc_from_pool (vm, ring, n_buffers - n_alloc,
719 static_always_inline void
720 vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
721 u32 * buffers, u32 n_buffers)
723 vlib_buffer_main_t *bm = vm->buffer_main;
724 vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
725 vlib_buffer_pool_thread_t *bpt = vec_elt_at_index (bp->threads,
727 u32 n_cached, n_empty;
730 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
731 VLIB_BUFFER_KNOWN_ALLOCATED);
732 if (PREDICT_FALSE (bm->free_callback_fn != 0))
733 bm->free_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
735 n_cached = bpt->n_cached;
736 n_empty = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ - n_cached;
737 if (n_buffers <= n_empty)
739 vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
741 bpt->n_cached = n_cached + n_buffers;
745 vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
746 buffers + n_buffers - n_empty, n_empty);
747 bpt->n_cached = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ;
749 clib_spinlock_lock (&bp->lock);
750 vlib_buffer_copy_indices (bp->buffers + bp->n_avail, buffers,
751 n_buffers - n_empty);
752 bp->n_avail += n_buffers - n_empty;
753 clib_spinlock_unlock (&bp->lock);
756 /** \brief return unused buffers back to pool
757 This function can be used to return buffers back to pool without going
758 through vlib_buffer_free. Buffer metadata must not be modified in any
759 way before buffers are returned.
761 @param vm - (vlib_main_t *) vlib main data structure pointer
762 @param buffers - (u32 * ) buffer index array
763 @param n_buffers - (u32) number of buffers to free
764 @param buffer_pool_index - (u8) buffer pool index
767 vlib_buffer_unalloc_to_pool (vlib_main_t *vm, u32 *buffers, u32 n_buffers,
768 u8 buffer_pool_index)
770 vlib_buffer_pool_put (vm, buffer_pool_index, buffers, n_buffers);
773 static_always_inline void
774 vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
777 const int queue_size = 128;
778 vlib_buffer_pool_t *bp = 0;
779 u8 buffer_pool_index = ~0;
780 u32 n_queue = 0, queue[queue_size + 8];
781 vlib_buffer_template_t bt = {};
782 #if defined(CLIB_HAVE_VEC128)
783 vlib_buffer_t bpi_mask = {.buffer_pool_index = ~0 };
784 vlib_buffer_t bpi_vec = {};
785 vlib_buffer_t flags_refs_mask = {
786 .flags = VLIB_BUFFER_NEXT_PRESENT,
791 if (PREDICT_FALSE (n_buffers == 0))
794 vlib_buffer_t *b = vlib_get_buffer (vm, buffers[0]);
795 buffer_pool_index = b->buffer_pool_index;
796 bp = vlib_get_buffer_pool (vm, buffer_pool_index);
797 bt = bp->buffer_template;
798 #if defined(CLIB_HAVE_VEC128)
799 bpi_vec.buffer_pool_index = buffer_pool_index;
805 u32 bi, sum = 0, flags, next;
807 #if defined(CLIB_HAVE_VEC512)
814 #if defined(CLIB_HAVE_VEC512)
815 vlib_get_buffers (vm, buffers, b, 8);
817 vlib_get_buffers (vm, buffers, b, 4);
821 vlib_get_buffers (vm, buffers + 8, b + 4, 4);
822 vlib_prefetch_buffer_header (b[4], LOAD);
823 vlib_prefetch_buffer_header (b[5], LOAD);
824 vlib_prefetch_buffer_header (b[6], LOAD);
825 vlib_prefetch_buffer_header (b[7], LOAD);
829 #if defined(CLIB_HAVE_VEC512)
830 u8x16 p0, p1, p2, p3, p4, p5, p6, p7, r;
831 p0 = u8x16_load_unaligned (b[0]);
832 p1 = u8x16_load_unaligned (b[1]);
833 p2 = u8x16_load_unaligned (b[2]);
834 p3 = u8x16_load_unaligned (b[3]);
835 p4 = u8x16_load_unaligned (b[4]);
836 p5 = u8x16_load_unaligned (b[5]);
837 p6 = u8x16_load_unaligned (b[6]);
838 p7 = u8x16_load_unaligned (b[7]);
840 r = p0 ^ bpi_vec.as_u8x16[0];
841 r |= p1 ^ bpi_vec.as_u8x16[0];
842 r |= p2 ^ bpi_vec.as_u8x16[0];
843 r |= p3 ^ bpi_vec.as_u8x16[0];
844 r |= p4 ^ bpi_vec.as_u8x16[0];
845 r |= p5 ^ bpi_vec.as_u8x16[0];
846 r |= p6 ^ bpi_vec.as_u8x16[0];
847 r |= p7 ^ bpi_vec.as_u8x16[0];
848 r &= bpi_mask.as_u8x16[0];
850 (p0 | p1 | p2 | p3 | p4 | p5 | p6 | p7) & flags_refs_mask.as_u8x16[0];
852 sum = !u8x16_is_all_zero (r);
853 #elif defined(CLIB_HAVE_VEC128)
854 u8x16 p0, p1, p2, p3, r;
855 p0 = u8x16_load_unaligned (b[0]);
856 p1 = u8x16_load_unaligned (b[1]);
857 p2 = u8x16_load_unaligned (b[2]);
858 p3 = u8x16_load_unaligned (b[3]);
860 r = p0 ^ bpi_vec.as_u8x16[0];
861 r |= p1 ^ bpi_vec.as_u8x16[0];
862 r |= p2 ^ bpi_vec.as_u8x16[0];
863 r |= p3 ^ bpi_vec.as_u8x16[0];
864 r &= bpi_mask.as_u8x16[0];
865 r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
867 sum = !u8x16_is_all_zero (r);
873 sum &= VLIB_BUFFER_NEXT_PRESENT;
874 sum += b[0]->ref_count - 1;
875 sum += b[1]->ref_count - 1;
876 sum += b[2]->ref_count - 1;
877 sum += b[3]->ref_count - 1;
878 sum |= b[0]->buffer_pool_index ^ buffer_pool_index;
879 sum |= b[1]->buffer_pool_index ^ buffer_pool_index;
880 sum |= b[2]->buffer_pool_index ^ buffer_pool_index;
881 sum |= b[3]->buffer_pool_index ^ buffer_pool_index;
887 #if defined(CLIB_HAVE_VEC512)
888 vlib_buffer_copy_indices (queue + n_queue, buffers, 8);
899 vlib_buffer_validate (vm, b[0]);
900 vlib_buffer_validate (vm, b[1]);
901 vlib_buffer_validate (vm, b[2]);
902 vlib_buffer_validate (vm, b[3]);
903 vlib_buffer_validate (vm, b[4]);
904 vlib_buffer_validate (vm, b[5]);
905 vlib_buffer_validate (vm, b[6]);
906 vlib_buffer_validate (vm, b[7]);
908 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
909 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
910 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
911 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
912 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[4]);
913 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[5]);
914 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[6]);
915 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[7]);
917 vlib_buffer_copy_indices (queue + n_queue, buffers, 4);
924 vlib_buffer_validate (vm, b[0]);
925 vlib_buffer_validate (vm, b[1]);
926 vlib_buffer_validate (vm, b[2]);
927 vlib_buffer_validate (vm, b[3]);
929 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
930 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
931 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
932 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
935 if (n_queue >= queue_size)
937 vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
940 #if defined(CLIB_HAVE_VEC512)
953 b[0] = vlib_get_buffer (vm, bi);
955 next = b[0]->next_buffer;
957 if (PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
962 vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
966 buffer_pool_index = b[0]->buffer_pool_index;
967 #if defined(CLIB_HAVE_VEC128)
968 bpi_vec.buffer_pool_index = buffer_pool_index;
970 bp = vlib_get_buffer_pool (vm, buffer_pool_index);
971 bt = bp->buffer_template;
974 vlib_buffer_validate (vm, b[0]);
976 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
978 if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
981 queue[n_queue++] = bi;
984 if (n_queue == queue_size)
986 vlib_buffer_pool_put (vm, buffer_pool_index, queue, queue_size);
990 if (maybe_next && (flags & VLIB_BUFFER_NEXT_PRESENT))
1001 vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
1005 /** \brief Free buffers
1006 Frees the entire buffer chain for each buffer
1008 @param vm - (vlib_main_t *) vlib main data structure pointer
1009 @param buffers - (u32 * ) buffer index array
1010 @param n_buffers - (u32) number of buffers to free
1014 vlib_buffer_free (vlib_main_t * vm,
1015 /* pointer to first buffer */
1017 /* number of buffers to free */
1020 vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 1);
1023 /** \brief Free buffers, does not free the buffer chain for each buffer
1025 @param vm - (vlib_main_t *) vlib main data structure pointer
1026 @param buffers - (u32 * ) buffer index array
1027 @param n_buffers - (u32) number of buffers to free
1031 vlib_buffer_free_no_next (vlib_main_t * vm,
1032 /* pointer to first buffer */
1034 /* number of buffers to free */
1037 vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 0);
1040 /** \brief Free one buffer
1041 Shorthand to free a single buffer chain.
1043 @param vm - (vlib_main_t *) vlib main data structure pointer
1044 @param buffer_index - (u32) buffer index to free
1047 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
1049 vlib_buffer_free_inline (vm, &buffer_index, 1, /* maybe next */ 1);
1052 /** \brief Free buffers from ring
1054 @param vm - (vlib_main_t *) vlib main data structure pointer
1055 @param buffers - (u32 * ) buffer index ring
1056 @param start - (u32) first slot in the ring
1057 @param ring_size - (u32) ring size
1058 @param n_buffers - (u32) number of buffers
1061 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
1062 u32 ring_size, u32 n_buffers)
1064 ASSERT (n_buffers <= ring_size);
1066 if (PREDICT_TRUE (start + n_buffers <= ring_size))
1068 vlib_buffer_free (vm, ring + start, n_buffers);
1072 vlib_buffer_free (vm, ring + start, ring_size - start);
1073 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
1077 /** \brief Free buffers from ring without freeing tail buffers
1079 @param vm - (vlib_main_t *) vlib main data structure pointer
1080 @param buffers - (u32 * ) buffer index ring
1081 @param start - (u32) first slot in the ring
1082 @param ring_size - (u32) ring size
1083 @param n_buffers - (u32) number of buffers
1086 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
1087 u32 ring_size, u32 n_buffers)
1089 ASSERT (n_buffers <= ring_size);
1091 if (PREDICT_TRUE (start + n_buffers <= ring_size))
1093 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
1097 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
1098 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
1102 /* Append given data to end of buffer, possibly allocating new buffers. */
1103 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
1106 /* Define vlib_buffer and vnet_buffer flags bits preserved for copy/clone */
1107 #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \
1108 (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \
1109 VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL)
1111 /* duplicate all buffers in chain */
1112 always_inline vlib_buffer_t *
1113 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
1115 vlib_buffer_t *s, *d, *fd;
1116 uword n_alloc, n_buffers = 1;
1117 u32 flag_mask = VLIB_BUFFER_COPY_CLONE_FLAGS_MASK;
1121 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1124 s = vlib_get_buffer (vm, s->next_buffer);
1126 u32 new_buffers[n_buffers];
1128 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1130 /* No guarantee that we'll get all the buffers we asked for */
1131 if (PREDICT_FALSE (n_alloc < n_buffers))
1134 vlib_buffer_free (vm, new_buffers, n_alloc);
1140 fd = d = vlib_get_buffer (vm, new_buffers[0]);
1141 d->current_data = s->current_data;
1142 d->current_length = s->current_length;
1143 d->flags = s->flags & flag_mask;
1144 d->trace_handle = s->trace_handle;
1145 d->total_length_not_including_first_buffer =
1146 s->total_length_not_including_first_buffer;
1147 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1148 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1149 clib_memcpy_fast (vlib_buffer_get_current (d),
1150 vlib_buffer_get_current (s), s->current_length);
1153 for (i = 1; i < n_buffers; i++)
1156 d->next_buffer = new_buffers[i];
1158 s = vlib_get_buffer (vm, s->next_buffer);
1159 d = vlib_get_buffer (vm, new_buffers[i]);
1160 d->current_data = s->current_data;
1161 d->current_length = s->current_length;
1162 clib_memcpy_fast (vlib_buffer_get_current (d),
1163 vlib_buffer_get_current (s), s->current_length);
1164 d->flags = s->flags & flag_mask;
1170 /* duplicate first buffer in chain */
1171 always_inline vlib_buffer_t *
1172 vlib_buffer_copy_no_chain (vlib_main_t * vm, vlib_buffer_t * b, u32 * di)
1176 if ((vlib_buffer_alloc (vm, di, 1)) != 1)
1179 d = vlib_get_buffer (vm, *di);
1181 d->current_data = b->current_data;
1182 d->current_length = b->current_length;
1183 clib_memcpy_fast (d->opaque, b->opaque, sizeof (b->opaque));
1184 clib_memcpy_fast (d->opaque2, b->opaque2, sizeof (b->opaque2));
1185 clib_memcpy_fast (vlib_buffer_get_current (d),
1186 vlib_buffer_get_current (b), b->current_length);
1191 /* \brief Move packet from current position to offset position in buffer.
1192 Only work for small packet using one buffer with room to fit the move
1193 @param vm - (vlib_main_t *) vlib main data structure pointer
1194 @param b - (vlib_buffer_t *) pointer to buffer
1195 @param offset - (i16) position to move the packet in buffer
1198 vlib_buffer_move (vlib_main_t * vm, vlib_buffer_t * b, i16 offset)
1200 ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1201 ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1202 ASSERT (offset + b->current_length <
1203 vlib_buffer_get_default_data_size (vm));
1205 u8 *source = vlib_buffer_get_current (b);
1206 b->current_data = offset;
1207 u8 *destination = vlib_buffer_get_current (b);
1208 u16 length = b->current_length;
1210 if (source + length <= destination) /* no overlap */
1211 clib_memcpy_fast (destination, source, length);
1213 memmove (destination, source, length);
1216 /** \brief Create a maximum of 256 clones of buffer and store them
1217 in the supplied array
1219 @param vm - (vlib_main_t *) vlib main data structure pointer
1220 @param src_buffer - (u32) source buffer index
1221 @param buffers - (u32 * ) buffer index array
1222 @param n_buffers - (u16) number of buffer clones requested (<=256)
1223 @param head_end_offset - (u16) offset relative to current position
1224 where packet head ends
1225 @param offset - (i16) copy packet head at current position if 0,
1226 else at offset position to change headroom space as specified
1227 @return - (u16) number of buffers actually cloned, may be
1228 less than the number requested or zero
1231 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1232 u16 n_buffers, u16 head_end_offset, i16 offset)
1235 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1237 ASSERT (s->ref_count == 1);
1239 ASSERT (n_buffers <= 256);
1240 ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1241 ASSERT ((offset + head_end_offset) <
1242 vlib_buffer_get_default_data_size (vm));
1244 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
1246 buffers[0] = src_buffer;
1248 vlib_buffer_move (vm, s, offset);
1250 for (i = 1; i < n_buffers; i++)
1253 d = vlib_buffer_copy (vm, s);
1256 buffers[i] = vlib_get_buffer_index (vm, d);
1262 if (PREDICT_FALSE ((n_buffers == 1) && (offset == 0)))
1264 buffers[0] = src_buffer;
1268 n_buffers = vlib_buffer_alloc_from_pool (vm, buffers, n_buffers,
1269 s->buffer_pool_index);
1271 for (i = 0; i < n_buffers; i++)
1273 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
1275 d->current_data = offset;
1277 d->current_data = s->current_data;
1279 d->current_length = head_end_offset;
1280 ASSERT (d->buffer_pool_index == s->buffer_pool_index);
1282 d->total_length_not_including_first_buffer = s->current_length -
1284 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
1286 d->total_length_not_including_first_buffer +=
1287 s->total_length_not_including_first_buffer;
1289 d->flags = (s->flags & VLIB_BUFFER_COPY_CLONE_FLAGS_MASK) |
1290 VLIB_BUFFER_NEXT_PRESENT;
1291 d->trace_handle = s->trace_handle;
1292 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1293 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1294 clib_memcpy_fast (vlib_buffer_get_current (d),
1295 vlib_buffer_get_current (s), head_end_offset);
1296 d->next_buffer = src_buffer;
1298 vlib_buffer_advance (s, head_end_offset);
1299 s->ref_count = n_buffers ? n_buffers : s->ref_count;
1300 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1302 s = vlib_get_buffer (vm, s->next_buffer);
1303 s->ref_count = n_buffers ? n_buffers : s->ref_count;
1309 /** \brief Create multiple clones of buffer and store them
1310 in the supplied array
1312 @param vm - (vlib_main_t *) vlib main data structure pointer
1313 @param src_buffer - (u32) source buffer index
1314 @param buffers - (u32 * ) buffer index array
1315 @param n_buffers - (u16) number of buffer clones requested (<=256)
1316 @param head_end_offset - (u16) offset relative to current position
1317 where packet head ends
1318 @param offset - (i16) copy packet head at current position if 0,
1319 else at offset position to change headroom space as specified
1320 @return - (u16) number of buffers actually cloned, may be
1321 less than the number requested or zero
1324 vlib_buffer_clone_at_offset (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1325 u16 n_buffers, u16 head_end_offset, i16 offset)
1327 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1330 while (n_buffers > 256)
1332 vlib_buffer_t *copy;
1333 copy = vlib_buffer_copy (vm, s);
1334 n_cloned += vlib_buffer_clone_256 (vm,
1335 vlib_get_buffer_index (vm, copy),
1336 (buffers + n_cloned),
1337 256, head_end_offset, offset);
1340 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
1342 n_buffers, head_end_offset, offset);
1347 /** \brief Create multiple clones of buffer and store them
1348 in the supplied array
1350 @param vm - (vlib_main_t *) vlib main data structure pointer
1351 @param src_buffer - (u32) source buffer index
1352 @param buffers - (u32 * ) buffer index array
1353 @param n_buffers - (u16) number of buffer clones requested (<=256)
1354 @param head_end_offset - (u16) offset relative to current position
1355 where packet head ends
1356 @return - (u16) number of buffers actually cloned, may be
1357 less than the number requested or zero
1360 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1361 u16 n_buffers, u16 head_end_offset)
1363 return vlib_buffer_clone_at_offset (vm, src_buffer, buffers, n_buffers,
1364 head_end_offset, 0);
1367 /** \brief Attach cloned tail to the buffer
1369 @param vm - (vlib_main_t *) vlib main data structure pointer
1370 @param head - (vlib_buffer_t *) head buffer
1371 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
1375 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
1376 vlib_buffer_t * tail)
1378 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1379 ASSERT (head->buffer_pool_index == tail->buffer_pool_index);
1381 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
1382 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1383 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1384 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1385 head->next_buffer = vlib_get_buffer_index (vm, tail);
1386 head->total_length_not_including_first_buffer = tail->current_length +
1387 tail->total_length_not_including_first_buffer;
1390 clib_atomic_add_fetch (&tail->ref_count, 1);
1392 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
1394 tail = vlib_get_buffer (vm, tail->next_buffer);
1399 /* Initializes the buffer as an empty packet with no chained buffers. */
1401 vlib_buffer_chain_init (vlib_buffer_t * first)
1403 first->total_length_not_including_first_buffer = 0;
1404 first->current_length = 0;
1405 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1406 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1409 /* The provided next_bi buffer index is appended to the end of the packet. */
1410 always_inline vlib_buffer_t *
1411 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
1413 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
1414 last->next_buffer = next_bi;
1415 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
1416 next_buffer->current_length = 0;
1417 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1421 /* Increases or decreases the packet length.
1422 * It does not allocate or deallocate new buffers.
1423 * Therefore, the added length must be compatible
1424 * with the last buffer. */
1426 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
1427 vlib_buffer_t * last, i32 len)
1429 last->current_length += len;
1431 first->total_length_not_including_first_buffer += len;
1434 /* Copy data to the end of the packet and increases its length.
1435 * It does not allocate new buffers.
1436 * Returns the number of copied bytes. */
1438 vlib_buffer_chain_append_data (vlib_main_t * vm,
1439 vlib_buffer_t * first,
1440 vlib_buffer_t * last, void *data, u16 data_len)
1442 u32 n_buffer_bytes = vlib_buffer_get_default_data_size (vm);
1443 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
1444 u16 len = clib_min (data_len,
1445 n_buffer_bytes - last->current_length -
1446 last->current_data);
1447 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
1449 vlib_buffer_chain_increase_length (first, last, len);
1453 /* Copy data to the end of the packet and increases its length.
1454 * Allocates additional buffers from the free list if necessary.
1455 * Returns the number of copied bytes.
1456 * 'last' value is modified whenever new buffers are allocated and
1457 * chained and points to the last buffer in the chain. */
1459 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
1460 vlib_buffer_t * first,
1461 vlib_buffer_t ** last, void *data,
1463 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1465 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1466 format_vlib_buffer_contents, format_vlib_buffer_no_chain;
1470 /* Vector of packet data. */
1473 /* Number of buffers to allocate in each call to allocator. */
1474 u32 min_n_buffers_each_alloc;
1477 } vlib_packet_template_t;
1479 void vlib_packet_template_init (vlib_main_t * vm,
1480 vlib_packet_template_t * t,
1482 uword n_packet_data_bytes,
1483 uword min_n_buffers_each_alloc,
1486 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1487 vlib_packet_template_t * t,
1491 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1493 vec_free (t->packet_data);
1497 vlib_buffer_space_left_at_end (vlib_main_t * vm, vlib_buffer_t * b)
1499 return b->data + vlib_buffer_get_default_data_size (vm) -
1500 ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1503 #define VLIB_BUFFER_LINEARIZE_MAX 64
1506 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * b)
1508 vlib_buffer_t *dst_b;
1509 u32 n_buffers = 1, to_free = 0;
1510 u16 rem_len, dst_len, data_size, src_len = 0;
1513 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1516 ASSERT (1 == b->ref_count);
1517 if (PREDICT_FALSE (1 != b->ref_count))
1520 data_size = vlib_buffer_get_default_data_size (vm);
1521 rem_len = vlib_buffer_length_in_chain (vm, b) - b->current_length;
1524 dst = vlib_buffer_get_tail (dst_b);
1525 dst_len = vlib_buffer_space_left_at_end (vm, dst_b);
1527 b->total_length_not_including_first_buffer -= dst_len;
1533 while (0 == src_len)
1535 ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
1536 if (PREDICT_FALSE (!(b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1537 break; /* malformed chained buffer */
1539 b = vlib_get_buffer (vm, b->next_buffer);
1540 src = vlib_buffer_get_current (b);
1541 src_len = b->current_length;
1546 ASSERT (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT);
1547 if (PREDICT_FALSE (!(dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1548 break; /* malformed chained buffer */
1550 vlib_buffer_t *next_dst_b = vlib_get_buffer (vm, dst_b->next_buffer);
1552 if (PREDICT_TRUE (1 == next_dst_b->ref_count))
1554 /* normal case: buffer is not cloned, just use it */
1559 /* cloned buffer, build a new dest chain from there */
1560 vlib_buffer_t *bufs[VLIB_BUFFER_LINEARIZE_MAX];
1561 u32 bis[VLIB_BUFFER_LINEARIZE_MAX + 1];
1562 const int n = (rem_len + data_size - 1) / data_size;
1566 ASSERT (n <= VLIB_BUFFER_LINEARIZE_MAX);
1567 if (PREDICT_FALSE (n > VLIB_BUFFER_LINEARIZE_MAX))
1570 n_alloc = vlib_buffer_alloc (vm, bis, n);
1571 if (PREDICT_FALSE (n_alloc != n))
1573 vlib_buffer_free (vm, bis, n_alloc);
1577 vlib_get_buffers (vm, bis, bufs, n);
1579 for (i = 0; i < n - 1; i++)
1581 bufs[i]->flags |= VLIB_BUFFER_NEXT_PRESENT;
1582 bufs[i]->next_buffer = bis[i + 1];
1585 to_free = dst_b->next_buffer;
1586 dst_b->next_buffer = bis[0];
1592 dst_b->current_data = clib_min (0, dst_b->current_data);
1593 dst_b->current_length = 0;
1595 dst = dst_b->data + dst_b->current_data;
1596 dst_len = data_size - dst_b->current_data;
1599 copy_len = clib_min (src_len, dst_len);
1601 if (PREDICT_TRUE (src == dst))
1605 else if (src + copy_len > dst && dst + copy_len > src)
1607 /* src and dst overlap */
1608 ASSERT (b == dst_b);
1609 memmove (dst, src, copy_len);
1613 clib_memcpy_fast (dst, src, copy_len);
1616 dst_b->current_length += copy_len;
1620 dst_len -= copy_len;
1621 src_len -= copy_len;
1622 rem_len -= copy_len;
1625 /* in case of a malformed chain buffer, we'll exit early from the loop. */
1626 ASSERT (0 == rem_len);
1627 b->total_length_not_including_first_buffer -= rem_len;
1630 vlib_buffer_free_one (vm, to_free);
1632 if (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)
1634 /* the resulting chain is smaller than the original, cut it there */
1635 dst_b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1636 vlib_buffer_free_one (vm, dst_b->next_buffer);
1639 /* no longer a chained buffer */
1640 dst_b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1641 dst_b->total_length_not_including_first_buffer = 0;
1648 #endif /* included_vlib_buffer_funcs_h */
1651 * fd.io coding-style-patch-verification: ON
1654 * eval: (c-set-style "gnu")