2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
45 #include <vlib/buffer.h>
46 #include <vlib/physmem_funcs.h>
47 #include <vlib/main.h>
48 #include <vlib/node.h>
51 vlib buffer access methods.
54 typedef void (vlib_buffer_enqueue_to_next_fn_t) (vlib_main_t *vm,
55 vlib_node_runtime_t *node,
56 u32 *buffers, u16 *nexts,
58 typedef void (vlib_buffer_enqueue_to_single_next_fn_t) (
59 vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u16 next_index,
62 typedef u32 (vlib_buffer_enqueue_to_thread_fn_t) (
63 vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
64 u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
65 int drop_on_congestion);
67 typedef u32 (vlib_frame_queue_dequeue_fn_t) (vlib_main_t *vm,
68 vlib_frame_queue_main_t *fqm);
72 vlib_buffer_enqueue_to_next_fn_t *buffer_enqueue_to_next_fn;
73 vlib_buffer_enqueue_to_single_next_fn_t *buffer_enqueue_to_single_next_fn;
74 vlib_buffer_enqueue_to_thread_fn_t *buffer_enqueue_to_thread_fn;
75 vlib_frame_queue_dequeue_fn_t *frame_queue_dequeue_fn;
76 } vlib_buffer_func_main_t;
78 extern vlib_buffer_func_main_t vlib_buffer_func_main;
81 vlib_buffer_validate (vlib_main_t * vm, vlib_buffer_t * b)
83 vlib_buffer_main_t *bm = vm->buffer_main;
84 vlib_buffer_pool_t *bp;
86 /* reference count in allocated buffer always must be 1 or higher */
87 ASSERT (b->ref_count > 0);
89 /* verify that buffer pool index is valid */
90 bp = vec_elt_at_index (bm->buffer_pools, b->buffer_pool_index);
91 ASSERT (pointer_to_uword (b) >= bp->start);
92 ASSERT (pointer_to_uword (b) < bp->start + bp->size -
93 (bp->data_size + sizeof (vlib_buffer_t)));
97 vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
100 offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
101 return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
104 /** \brief Translate buffer index into buffer pointer
106 @param vm - (vlib_main_t *) vlib main data structure pointer
107 @param buffer_index - (u32) buffer index
108 @return - (vlib_buffer_t *) buffer pointer
110 always_inline vlib_buffer_t *
111 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
113 vlib_buffer_main_t *bm = vm->buffer_main;
116 b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
117 vlib_buffer_validate (vm, b);
121 static_always_inline u32
122 vlib_buffer_get_default_data_size (vlib_main_t * vm)
124 return vm->buffer_main->default_data_size;
127 static_always_inline void
128 vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
130 clib_memcpy_u32 (dst, src, n_indices);
134 vlib_buffer_copy_indices_from_ring (u32 * dst, u32 * ring, u32 start,
135 u32 ring_size, u32 n_buffers)
137 ASSERT (n_buffers <= ring_size);
139 if (PREDICT_TRUE (start + n_buffers <= ring_size))
141 vlib_buffer_copy_indices (dst, ring + start, n_buffers);
145 u32 n = ring_size - start;
146 vlib_buffer_copy_indices (dst, ring + start, n);
147 vlib_buffer_copy_indices (dst + n, ring, n_buffers - n);
152 vlib_buffer_copy_indices_to_ring (u32 * ring, u32 * src, u32 start,
153 u32 ring_size, u32 n_buffers)
155 ASSERT (n_buffers <= ring_size);
157 if (PREDICT_TRUE (start + n_buffers <= ring_size))
159 vlib_buffer_copy_indices (ring + start, src, n_buffers);
163 u32 n = ring_size - start;
164 vlib_buffer_copy_indices (ring + start, src, n);
165 vlib_buffer_copy_indices (ring, src + n, n_buffers - n);
169 STATIC_ASSERT_OFFSET_OF (vlib_buffer_t, template_end, 64);
170 static_always_inline void
171 vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
173 #if defined CLIB_HAVE_VEC512
174 b->as_u8x64[0] = bt->as_u8x64[0];
175 #elif defined (CLIB_HAVE_VEC256)
176 b->as_u8x32[0] = bt->as_u8x32[0];
177 b->as_u8x32[1] = bt->as_u8x32[1];
178 #elif defined (CLIB_HAVE_VEC128)
179 b->as_u8x16[0] = bt->as_u8x16[0];
180 b->as_u8x16[1] = bt->as_u8x16[1];
181 b->as_u8x16[2] = bt->as_u8x16[2];
182 b->as_u8x16[3] = bt->as_u8x16[3];
184 clib_memcpy_fast (b, bt, 64);
189 vlib_buffer_pool_get_default_for_numa (vlib_main_t * vm, u32 numa_node)
191 ASSERT (numa_node < VLIB_BUFFER_MAX_NUMA_NODES);
192 return vm->buffer_main->default_buffer_pool_index_for_numa[numa_node];
195 /** \brief Translate array of buffer indices into buffer pointers with offset
197 @param vm - (vlib_main_t *) vlib main data structure pointer
198 @param bi - (u32 *) array of buffer indices
199 @param b - (void **) array to store buffer pointers
200 @param count - (uword) number of elements
201 @param offset - (i32) offset applied to each pointer
203 static_always_inline void
204 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
207 uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
208 #ifdef CLIB_HAVE_VEC512
209 u64x8 of8 = u64x8_splat (buffer_mem_start + offset);
210 u64x4 off = u64x8_extract_lo (of8);
211 /* if count is not const, compiler will not unroll while loop
212 se we maintain two-in-parallel variant */
215 u64x8 b0 = u64x8_from_u32x8 (u32x8_load_unaligned (bi));
216 u64x8 b1 = u64x8_from_u32x8 (u32x8_load_unaligned (bi + 8));
217 u64x8 b2 = u64x8_from_u32x8 (u32x8_load_unaligned (bi + 16));
218 u64x8 b3 = u64x8_from_u32x8 (u32x8_load_unaligned (bi + 24));
219 /* shift and add to get vlib_buffer_t pointer */
220 u64x8_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b);
221 u64x8_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b + 8);
222 u64x8_store_unaligned ((b2 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b + 16);
223 u64x8_store_unaligned ((b3 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b + 24);
230 u64x8 b0 = u64x8_from_u32x8 (u32x8_load_unaligned (bi));
231 /* shift and add to get vlib_buffer_t pointer */
232 u64x8_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + of8, b);
237 #elif defined CLIB_HAVE_VEC256
238 u64x4 off = u64x4_splat (buffer_mem_start + offset);
239 /* if count is not const, compiler will not unroll while loop
240 se we maintain two-in-parallel variant */
243 u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
244 u64x4 b1 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 4));
245 u64x4 b2 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 8));
246 u64x4 b3 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 12));
247 u64x4 b4 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 16));
248 u64x4 b5 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 20));
249 u64x4 b6 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 24));
250 u64x4 b7 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 28));
251 /* shift and add to get vlib_buffer_t pointer */
252 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
253 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
254 u64x4_store_unaligned ((b2 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 8);
255 u64x4_store_unaligned ((b3 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 12);
256 u64x4_store_unaligned ((b4 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 16);
257 u64x4_store_unaligned ((b5 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 20);
258 u64x4_store_unaligned ((b6 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 24);
259 u64x4_store_unaligned ((b7 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 28);
267 #ifdef CLIB_HAVE_VEC256
268 u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
269 /* shift and add to get vlib_buffer_t pointer */
270 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
271 #elif defined (CLIB_HAVE_VEC128)
272 u64x2 off = u64x2_splat (buffer_mem_start + offset);
273 u32x4 bi4 = u32x4_load_unaligned (bi);
274 u64x2 b0 = u64x2_from_u32x4 ((u32x4) bi4);
275 #if defined (__aarch64__)
276 u64x2 b1 = u64x2_from_u32x4_high ((u32x4) bi4);
278 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
279 u64x2 b1 = u64x2_from_u32x4 ((u32x4) bi4);
281 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
282 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
284 b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
285 b[1] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[1], offset);
286 b[2] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[2], offset);
287 b[3] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[3], offset);
295 b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
302 /** \brief Translate array of buffer indices into buffer pointers
304 @param vm - (vlib_main_t *) vlib main data structure pointer
305 @param bi - (u32 *) array of buffer indices
306 @param b - (vlib_buffer_t **) array to store buffer pointers
307 @param count - (uword) number of elements
310 static_always_inline void
311 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
313 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
316 /** \brief Translate buffer pointer into buffer index
318 @param vm - (vlib_main_t *) vlib main data structure pointer
319 @param p - (void *) buffer pointer
320 @return - (u32) buffer index
324 vlib_get_buffer_index (vlib_main_t * vm, void *p)
326 vlib_buffer_main_t *bm = vm->buffer_main;
327 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
328 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
329 ASSERT (offset < bm->buffer_mem_size);
330 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
331 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
334 /** \brief Translate array of buffer pointers into buffer indices with offset
336 @param vm - (vlib_main_t *) vlib main data structure pointer
337 @param b - (void **) array of buffer pointers
338 @param bi - (u32 *) array to store buffer indices
339 @param count - (uword) number of elements
340 @param offset - (i32) offset applied to each pointer
342 static_always_inline void
343 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
344 uword count, i32 offset)
346 #ifdef CLIB_HAVE_VEC256
347 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
348 u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
352 /* load 4 pointers into 256-bit register */
353 u64x4 v0 = u64x4_load_unaligned (b);
354 u64x4 v1 = u64x4_load_unaligned (b + 4);
360 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
361 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
363 /* permute 256-bit register so lower u32s of each buffer index are
364 * placed into lower 128-bits */
365 v2 = u32x8_permute ((u32x8) v0, mask);
366 v3 = u32x8_permute ((u32x8) v1, mask);
368 /* extract lower 128-bits and save them to the array of buffer indices */
369 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
370 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
378 /* equivalent non-nector implementation */
379 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
380 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
381 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
382 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
389 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
396 /** \brief Translate array of buffer pointers into buffer indices
398 @param vm - (vlib_main_t *) vlib main data structure pointer
399 @param b - (vlib_buffer_t **) array of buffer pointers
400 @param bi - (u32 *) array to store buffer indices
401 @param count - (uword) number of elements
403 static_always_inline void
404 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
407 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
410 /** \brief Get next buffer in buffer linklist, or zero for end of list.
412 @param vm - (vlib_main_t *) vlib main data structure pointer
413 @param b - (void *) buffer pointer
414 @return - (vlib_buffer_t *) next buffer, or NULL
416 always_inline vlib_buffer_t *
417 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
419 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
420 ? vlib_get_buffer (vm, b->next_buffer) : 0);
423 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
424 vlib_buffer_t * b_first);
426 /** \brief Get length in bytes of the buffer chain
428 @param vm - (vlib_main_t *) vlib main data structure pointer
429 @param b - (void *) buffer pointer
430 @return - (uword) length of buffer chain
433 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
435 uword len = b->current_length;
437 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
440 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
441 return len + b->total_length_not_including_first_buffer;
443 return vlib_buffer_length_in_chain_slow_path (vm, b);
446 /** \brief Get length in bytes of the buffer index buffer chain
448 @param vm - (vlib_main_t *) vlib main data structure pointer
449 @param bi - (u32) buffer index
450 @return - (uword) length of buffer chain
453 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
455 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
456 return vlib_buffer_length_in_chain (vm, b);
459 /** \brief Copy buffer contents to memory
461 @param vm - (vlib_main_t *) vlib main data structure pointer
462 @param buffer_index - (u32) buffer index
463 @param contents - (u8 *) memory, <strong>must be large enough</strong>
464 @return - (uword) length of buffer chain
467 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
469 uword content_len = 0;
475 b = vlib_get_buffer (vm, buffer_index);
476 l = b->current_length;
477 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
479 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
481 buffer_index = b->next_buffer;
488 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
490 return vlib_physmem_get_pa (vm, b->data);
494 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
496 return vlib_buffer_get_pa (vm, b) + b->current_data;
499 /** \brief Prefetch buffer metadata by buffer index
500 The first 64 bytes of buffer contains most header information
502 @param vm - (vlib_main_t *) vlib main data structure pointer
503 @param bi - (u32) buffer index
504 @param type - LOAD, STORE. In most cases, STORE is the right answer
506 /* Prefetch buffer header given index. */
507 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
509 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
510 vlib_prefetch_buffer_header (_b, type); \
515 /* Index is unknown. */
518 /* Index is known and free/allocated. */
519 VLIB_BUFFER_KNOWN_FREE,
520 VLIB_BUFFER_KNOWN_ALLOCATED,
521 } vlib_buffer_known_state_t;
523 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
525 vlib_buffer_known_state_t
528 always_inline vlib_buffer_known_state_t
529 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
531 vlib_buffer_main_t *bm = vm->buffer_main;
533 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
534 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
535 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
536 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
539 /* Validates sanity of a single buffer.
540 Returns format'ed vector with error message if any. */
541 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
544 u8 *vlib_validate_buffers (vlib_main_t * vm,
546 uword next_buffer_stride,
548 vlib_buffer_known_state_t known_state,
549 uword follow_buffer_next);
551 static_always_inline vlib_buffer_pool_t *
552 vlib_get_buffer_pool (vlib_main_t * vm, u8 buffer_pool_index)
554 vlib_buffer_main_t *bm = vm->buffer_main;
555 return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
558 static_always_inline __clib_warn_unused_result uword
559 vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index, u32 * buffers,
562 vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
565 ASSERT (bp->buffers);
567 clib_spinlock_lock (&bp->lock);
569 if (PREDICT_TRUE (n_buffers < len))
572 vlib_buffer_copy_indices (buffers, bp->buffers + len, n_buffers);
574 clib_spinlock_unlock (&bp->lock);
579 vlib_buffer_copy_indices (buffers, bp->buffers, len);
581 clib_spinlock_unlock (&bp->lock);
587 /** \brief Allocate buffers from specific pool into supplied array
589 @param vm - (vlib_main_t *) vlib main data structure pointer
590 @param buffers - (u32 * ) buffer index array
591 @param n_buffers - (u32) number of buffers requested
592 @return - (u32) number of buffers actually allocated, may be
593 less than the number requested or zero
596 always_inline __clib_warn_unused_result u32
597 vlib_buffer_alloc_from_pool (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
598 u8 buffer_pool_index)
600 vlib_buffer_main_t *bm = vm->buffer_main;
601 vlib_buffer_pool_t *bp;
602 vlib_buffer_pool_thread_t *bpt;
603 u32 *src, *dst, len, n_left;
605 /* If buffer allocation fault injection is configured */
606 if (VLIB_BUFFER_ALLOC_FAULT_INJECTOR > 0)
608 u32 vlib_buffer_alloc_may_fail (vlib_main_t *, u32);
610 /* See how many buffers we're willing to allocate */
611 n_buffers = vlib_buffer_alloc_may_fail (vm, n_buffers);
616 bp = vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
617 bpt = vec_elt_at_index (bp->threads, vm->thread_index);
623 /* per-thread cache contains enough buffers */
624 if (len >= n_buffers)
626 src = bpt->cached_buffers + len - n_buffers;
627 vlib_buffer_copy_indices (dst, src, n_buffers);
628 bpt->n_cached -= n_buffers;
632 /* alloc bigger than cache - take buffers directly from main pool */
633 if (n_buffers >= VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ)
635 n_buffers = vlib_buffer_pool_get (vm, buffer_pool_index, buffers,
640 /* take everything available in the cache */
643 vlib_buffer_copy_indices (dst, bpt->cached_buffers, len);
649 len = round_pow2 (n_left, 32);
650 len = vlib_buffer_pool_get (vm, buffer_pool_index, bpt->cached_buffers,
656 u32 n_copy = clib_min (len, n_left);
657 src = bpt->cached_buffers + len - n_copy;
658 vlib_buffer_copy_indices (dst, src, n_copy);
659 bpt->n_cached -= n_copy;
666 /* Verify that buffers are known free. */
668 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
669 VLIB_BUFFER_KNOWN_FREE);
670 if (PREDICT_FALSE (bm->alloc_callback_fn != 0))
671 bm->alloc_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
675 /** \brief Allocate buffers from specific numa node into supplied array
677 @param vm - (vlib_main_t *) vlib main data structure pointer
678 @param buffers - (u32 * ) buffer index array
679 @param n_buffers - (u32) number of buffers requested
680 @param numa_node - (u32) numa node
681 @return - (u32) number of buffers actually allocated, may be
682 less than the number requested or zero
684 always_inline __clib_warn_unused_result u32
685 vlib_buffer_alloc_on_numa (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
688 u8 index = vlib_buffer_pool_get_default_for_numa (vm, numa_node);
689 return vlib_buffer_alloc_from_pool (vm, buffers, n_buffers, index);
692 /** \brief Allocate buffers into supplied array
694 @param vm - (vlib_main_t *) vlib main data structure pointer
695 @param buffers - (u32 * ) buffer index array
696 @param n_buffers - (u32) number of buffers requested
697 @return - (u32) number of buffers actually allocated, may be
698 less than the number requested or zero
701 always_inline __clib_warn_unused_result u32
702 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
704 return vlib_buffer_alloc_on_numa (vm, buffers, n_buffers, vm->numa_node);
707 /** \brief Allocate buffers into ring
709 @param vm - (vlib_main_t *) vlib main data structure pointer
710 @param buffers - (u32 * ) buffer index ring
711 @param start - (u32) first slot in the ring
712 @param ring_size - (u32) ring size
713 @param n_buffers - (u32) number of buffers requested
714 @return - (u32) number of buffers actually allocated, may be
715 less than the number requested or zero
717 always_inline __clib_warn_unused_result u32
718 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
719 u32 ring_size, u32 n_buffers)
723 ASSERT (n_buffers <= ring_size);
725 if (PREDICT_TRUE (start + n_buffers <= ring_size))
726 return vlib_buffer_alloc (vm, ring + start, n_buffers);
728 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
730 if (PREDICT_TRUE (n_alloc == ring_size - start))
731 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
736 /** \brief Allocate buffers into ring from specific buffer pool
738 @param vm - (vlib_main_t *) vlib main data structure pointer
739 @param buffers - (u32 * ) buffer index ring
740 @param start - (u32) first slot in the ring
741 @param ring_size - (u32) ring size
742 @param n_buffers - (u32) number of buffers requested
743 @return - (u32) number of buffers actually allocated, may be
744 less than the number requested or zero
746 always_inline __clib_warn_unused_result u32
747 vlib_buffer_alloc_to_ring_from_pool (vlib_main_t * vm, u32 * ring, u32 start,
748 u32 ring_size, u32 n_buffers,
749 u8 buffer_pool_index)
753 ASSERT (n_buffers <= ring_size);
755 if (PREDICT_TRUE (start + n_buffers <= ring_size))
756 return vlib_buffer_alloc_from_pool (vm, ring + start, n_buffers,
759 n_alloc = vlib_buffer_alloc_from_pool (vm, ring + start, ring_size - start,
762 if (PREDICT_TRUE (n_alloc == ring_size - start))
763 n_alloc += vlib_buffer_alloc_from_pool (vm, ring, n_buffers - n_alloc,
769 static_always_inline void
770 vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
771 u32 * buffers, u32 n_buffers)
773 vlib_buffer_main_t *bm = vm->buffer_main;
774 vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
775 vlib_buffer_pool_thread_t *bpt = vec_elt_at_index (bp->threads,
777 u32 n_cached, n_empty;
780 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
781 VLIB_BUFFER_KNOWN_ALLOCATED);
782 if (PREDICT_FALSE (bm->free_callback_fn != 0))
783 bm->free_callback_fn (vm, buffer_pool_index, buffers, n_buffers);
785 n_cached = bpt->n_cached;
786 n_empty = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ - n_cached;
787 if (n_buffers <= n_empty)
789 vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
791 bpt->n_cached = n_cached + n_buffers;
795 vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
796 buffers + n_buffers - n_empty, n_empty);
797 bpt->n_cached = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ;
799 clib_spinlock_lock (&bp->lock);
800 vlib_buffer_copy_indices (bp->buffers + bp->n_avail, buffers,
801 n_buffers - n_empty);
802 bp->n_avail += n_buffers - n_empty;
803 clib_spinlock_unlock (&bp->lock);
806 static_always_inline void
807 vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
810 const int queue_size = 128;
811 vlib_buffer_pool_t *bp = 0;
812 u8 buffer_pool_index = ~0;
813 u32 n_queue = 0, queue[queue_size + 4];
814 vlib_buffer_t bt = { };
815 #if defined(CLIB_HAVE_VEC128)
816 vlib_buffer_t bpi_mask = {.buffer_pool_index = ~0 };
817 vlib_buffer_t bpi_vec = {};
818 vlib_buffer_t flags_refs_mask = {
819 .flags = VLIB_BUFFER_NEXT_PRESENT,
824 if (PREDICT_FALSE (n_buffers == 0))
827 vlib_buffer_t *b = vlib_get_buffer (vm, buffers[0]);
828 buffer_pool_index = b->buffer_pool_index;
829 bp = vlib_get_buffer_pool (vm, buffer_pool_index);
830 vlib_buffer_copy_template (&bt, &bp->buffer_template);
831 #if defined(CLIB_HAVE_VEC128)
832 bpi_vec.buffer_pool_index = buffer_pool_index;
838 u32 bi, sum = 0, flags, next;
843 vlib_get_buffers (vm, buffers, b, 4);
847 vlib_get_buffers (vm, buffers + 8, b + 4, 4);
848 vlib_prefetch_buffer_header (b[4], LOAD);
849 vlib_prefetch_buffer_header (b[5], LOAD);
850 vlib_prefetch_buffer_header (b[6], LOAD);
851 vlib_prefetch_buffer_header (b[7], LOAD);
854 #if defined(CLIB_HAVE_VEC128)
855 u8x16 p0, p1, p2, p3, r;
856 p0 = u8x16_load_unaligned (b[0]);
857 p1 = u8x16_load_unaligned (b[1]);
858 p2 = u8x16_load_unaligned (b[2]);
859 p3 = u8x16_load_unaligned (b[3]);
861 r = p0 ^ bpi_vec.as_u8x16[0];
862 r |= p1 ^ bpi_vec.as_u8x16[0];
863 r |= p2 ^ bpi_vec.as_u8x16[0];
864 r |= p3 ^ bpi_vec.as_u8x16[0];
865 r &= bpi_mask.as_u8x16[0];
866 r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
868 sum = !u8x16_is_all_zero (r);
874 sum &= VLIB_BUFFER_NEXT_PRESENT;
875 sum += b[0]->ref_count - 1;
876 sum += b[1]->ref_count - 1;
877 sum += b[2]->ref_count - 1;
878 sum += b[3]->ref_count - 1;
879 sum |= b[0]->buffer_pool_index ^ buffer_pool_index;
880 sum |= b[1]->buffer_pool_index ^ buffer_pool_index;
881 sum |= b[2]->buffer_pool_index ^ buffer_pool_index;
882 sum |= b[3]->buffer_pool_index ^ buffer_pool_index;
888 vlib_buffer_copy_indices (queue + n_queue, buffers, 4);
889 vlib_buffer_copy_template (b[0], &bt);
890 vlib_buffer_copy_template (b[1], &bt);
891 vlib_buffer_copy_template (b[2], &bt);
892 vlib_buffer_copy_template (b[3], &bt);
895 vlib_buffer_validate (vm, b[0]);
896 vlib_buffer_validate (vm, b[1]);
897 vlib_buffer_validate (vm, b[2]);
898 vlib_buffer_validate (vm, b[3]);
900 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
901 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
902 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
903 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
905 if (n_queue >= queue_size)
907 vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
918 b[0] = vlib_get_buffer (vm, bi);
920 next = b[0]->next_buffer;
922 if (PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
927 vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
931 buffer_pool_index = b[0]->buffer_pool_index;
932 #if defined(CLIB_HAVE_VEC128)
933 bpi_vec.buffer_pool_index = buffer_pool_index;
935 bp = vlib_get_buffer_pool (vm, buffer_pool_index);
936 vlib_buffer_copy_template (&bt, &bp->buffer_template);
939 vlib_buffer_validate (vm, b[0]);
941 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
943 if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
945 vlib_buffer_copy_template (b[0], &bt);
946 queue[n_queue++] = bi;
949 if (n_queue == queue_size)
951 vlib_buffer_pool_put (vm, buffer_pool_index, queue, queue_size);
955 if (maybe_next && (flags & VLIB_BUFFER_NEXT_PRESENT))
966 vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
970 /** \brief Free buffers
971 Frees the entire buffer chain for each buffer
973 @param vm - (vlib_main_t *) vlib main data structure pointer
974 @param buffers - (u32 * ) buffer index array
975 @param n_buffers - (u32) number of buffers to free
979 vlib_buffer_free (vlib_main_t * vm,
980 /* pointer to first buffer */
982 /* number of buffers to free */
985 vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 1);
988 /** \brief Free buffers, does not free the buffer chain for each buffer
990 @param vm - (vlib_main_t *) vlib main data structure pointer
991 @param buffers - (u32 * ) buffer index array
992 @param n_buffers - (u32) number of buffers to free
996 vlib_buffer_free_no_next (vlib_main_t * vm,
997 /* pointer to first buffer */
999 /* number of buffers to free */
1002 vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 0);
1005 /** \brief Free one buffer
1006 Shorthand to free a single buffer chain.
1008 @param vm - (vlib_main_t *) vlib main data structure pointer
1009 @param buffer_index - (u32) buffer index to free
1012 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
1014 vlib_buffer_free_inline (vm, &buffer_index, 1, /* maybe next */ 1);
1017 /** \brief Free buffers from ring
1019 @param vm - (vlib_main_t *) vlib main data structure pointer
1020 @param buffers - (u32 * ) buffer index ring
1021 @param start - (u32) first slot in the ring
1022 @param ring_size - (u32) ring size
1023 @param n_buffers - (u32) number of buffers
1026 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
1027 u32 ring_size, u32 n_buffers)
1029 ASSERT (n_buffers <= ring_size);
1031 if (PREDICT_TRUE (start + n_buffers <= ring_size))
1033 vlib_buffer_free (vm, ring + start, n_buffers);
1037 vlib_buffer_free (vm, ring + start, ring_size - start);
1038 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
1042 /** \brief Free buffers from ring without freeing tail buffers
1044 @param vm - (vlib_main_t *) vlib main data structure pointer
1045 @param buffers - (u32 * ) buffer index ring
1046 @param start - (u32) first slot in the ring
1047 @param ring_size - (u32) ring size
1048 @param n_buffers - (u32) number of buffers
1051 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
1052 u32 ring_size, u32 n_buffers)
1054 ASSERT (n_buffers <= ring_size);
1056 if (PREDICT_TRUE (start + n_buffers <= ring_size))
1058 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
1062 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
1063 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
1067 /* Append given data to end of buffer, possibly allocating new buffers. */
1068 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
1071 /* Define vlib_buffer and vnet_buffer flags bits preserved for copy/clone */
1072 #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \
1073 (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \
1074 VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL)
1076 /* duplicate all buffers in chain */
1077 always_inline vlib_buffer_t *
1078 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
1080 vlib_buffer_t *s, *d, *fd;
1081 uword n_alloc, n_buffers = 1;
1082 u32 flag_mask = VLIB_BUFFER_COPY_CLONE_FLAGS_MASK;
1086 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1089 s = vlib_get_buffer (vm, s->next_buffer);
1091 u32 new_buffers[n_buffers];
1093 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1095 /* No guarantee that we'll get all the buffers we asked for */
1096 if (PREDICT_FALSE (n_alloc < n_buffers))
1099 vlib_buffer_free (vm, new_buffers, n_alloc);
1105 fd = d = vlib_get_buffer (vm, new_buffers[0]);
1106 d->current_data = s->current_data;
1107 d->current_length = s->current_length;
1108 d->flags = s->flags & flag_mask;
1109 d->trace_handle = s->trace_handle;
1110 d->total_length_not_including_first_buffer =
1111 s->total_length_not_including_first_buffer;
1112 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1113 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1114 clib_memcpy_fast (vlib_buffer_get_current (d),
1115 vlib_buffer_get_current (s), s->current_length);
1118 for (i = 1; i < n_buffers; i++)
1121 d->next_buffer = new_buffers[i];
1123 s = vlib_get_buffer (vm, s->next_buffer);
1124 d = vlib_get_buffer (vm, new_buffers[i]);
1125 d->current_data = s->current_data;
1126 d->current_length = s->current_length;
1127 clib_memcpy_fast (vlib_buffer_get_current (d),
1128 vlib_buffer_get_current (s), s->current_length);
1129 d->flags = s->flags & flag_mask;
1135 /* duplicate first buffer in chain */
1136 always_inline vlib_buffer_t *
1137 vlib_buffer_copy_no_chain (vlib_main_t * vm, vlib_buffer_t * b, u32 * di)
1141 if ((vlib_buffer_alloc (vm, di, 1)) != 1)
1144 d = vlib_get_buffer (vm, *di);
1146 d->current_data = b->current_data;
1147 d->current_length = b->current_length;
1148 clib_memcpy_fast (d->opaque, b->opaque, sizeof (b->opaque));
1149 clib_memcpy_fast (d->opaque2, b->opaque2, sizeof (b->opaque2));
1150 clib_memcpy_fast (vlib_buffer_get_current (d),
1151 vlib_buffer_get_current (b), b->current_length);
1156 /* \brief Move packet from current position to offset position in buffer.
1157 Only work for small packet using one buffer with room to fit the move
1158 @param vm - (vlib_main_t *) vlib main data structure pointer
1159 @param b - (vlib_buffer_t *) pointer to buffer
1160 @param offset - (i16) position to move the packet in buffer
1163 vlib_buffer_move (vlib_main_t * vm, vlib_buffer_t * b, i16 offset)
1165 ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1166 ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1167 ASSERT (offset + b->current_length <
1168 vlib_buffer_get_default_data_size (vm));
1170 u8 *source = vlib_buffer_get_current (b);
1171 b->current_data = offset;
1172 u8 *destination = vlib_buffer_get_current (b);
1173 u16 length = b->current_length;
1175 if (source + length <= destination) /* no overlap */
1176 clib_memcpy_fast (destination, source, length);
1178 memmove (destination, source, length);
1181 /** \brief Create a maximum of 256 clones of buffer and store them
1182 in the supplied array
1184 @param vm - (vlib_main_t *) vlib main data structure pointer
1185 @param src_buffer - (u32) source buffer index
1186 @param buffers - (u32 * ) buffer index array
1187 @param n_buffers - (u16) number of buffer clones requested (<=256)
1188 @param head_end_offset - (u16) offset relative to current position
1189 where packet head ends
1190 @param offset - (i16) copy packet head at current position if 0,
1191 else at offset position to change headroom space as specified
1192 @return - (u16) number of buffers actually cloned, may be
1193 less than the number requested or zero
1196 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1197 u16 n_buffers, u16 head_end_offset, i16 offset)
1200 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1202 ASSERT (s->ref_count == 1);
1204 ASSERT (n_buffers <= 256);
1205 ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1206 ASSERT ((offset + head_end_offset) <
1207 vlib_buffer_get_default_data_size (vm));
1209 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
1211 buffers[0] = src_buffer;
1213 vlib_buffer_move (vm, s, offset);
1215 for (i = 1; i < n_buffers; i++)
1218 d = vlib_buffer_copy (vm, s);
1221 buffers[i] = vlib_get_buffer_index (vm, d);
1227 if (PREDICT_FALSE ((n_buffers == 1) && (offset == 0)))
1229 buffers[0] = src_buffer;
1233 n_buffers = vlib_buffer_alloc_from_pool (vm, buffers, n_buffers,
1234 s->buffer_pool_index);
1236 for (i = 0; i < n_buffers; i++)
1238 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
1240 d->current_data = offset;
1242 d->current_data = s->current_data;
1244 d->current_length = head_end_offset;
1245 ASSERT (d->buffer_pool_index == s->buffer_pool_index);
1247 d->total_length_not_including_first_buffer = s->current_length -
1249 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
1251 d->total_length_not_including_first_buffer +=
1252 s->total_length_not_including_first_buffer;
1254 d->flags = (s->flags & VLIB_BUFFER_COPY_CLONE_FLAGS_MASK) |
1255 VLIB_BUFFER_NEXT_PRESENT;
1256 d->trace_handle = s->trace_handle;
1257 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1258 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1259 clib_memcpy_fast (vlib_buffer_get_current (d),
1260 vlib_buffer_get_current (s), head_end_offset);
1261 d->next_buffer = src_buffer;
1263 vlib_buffer_advance (s, head_end_offset);
1264 s->ref_count = n_buffers ? n_buffers : s->ref_count;
1265 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1267 s = vlib_get_buffer (vm, s->next_buffer);
1268 s->ref_count = n_buffers ? n_buffers : s->ref_count;
1274 /** \brief Create multiple clones of buffer and store them
1275 in the supplied array
1277 @param vm - (vlib_main_t *) vlib main data structure pointer
1278 @param src_buffer - (u32) source buffer index
1279 @param buffers - (u32 * ) buffer index array
1280 @param n_buffers - (u16) number of buffer clones requested (<=256)
1281 @param head_end_offset - (u16) offset relative to current position
1282 where packet head ends
1283 @param offset - (i16) copy packet head at current position if 0,
1284 else at offset position to change headroom space as specified
1285 @return - (u16) number of buffers actually cloned, may be
1286 less than the number requested or zero
1289 vlib_buffer_clone_at_offset (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1290 u16 n_buffers, u16 head_end_offset, i16 offset)
1292 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1295 while (n_buffers > 256)
1297 vlib_buffer_t *copy;
1298 copy = vlib_buffer_copy (vm, s);
1299 n_cloned += vlib_buffer_clone_256 (vm,
1300 vlib_get_buffer_index (vm, copy),
1301 (buffers + n_cloned),
1302 256, head_end_offset, offset);
1305 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
1307 n_buffers, head_end_offset, offset);
1312 /** \brief Create multiple clones of buffer and store them
1313 in the supplied array
1315 @param vm - (vlib_main_t *) vlib main data structure pointer
1316 @param src_buffer - (u32) source buffer index
1317 @param buffers - (u32 * ) buffer index array
1318 @param n_buffers - (u16) number of buffer clones requested (<=256)
1319 @param head_end_offset - (u16) offset relative to current position
1320 where packet head ends
1321 @return - (u16) number of buffers actually cloned, may be
1322 less than the number requested or zero
1325 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1326 u16 n_buffers, u16 head_end_offset)
1328 return vlib_buffer_clone_at_offset (vm, src_buffer, buffers, n_buffers,
1329 head_end_offset, 0);
1332 /** \brief Attach cloned tail to the buffer
1334 @param vm - (vlib_main_t *) vlib main data structure pointer
1335 @param head - (vlib_buffer_t *) head buffer
1336 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
1340 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
1341 vlib_buffer_t * tail)
1343 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1344 ASSERT (head->buffer_pool_index == tail->buffer_pool_index);
1346 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
1347 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1348 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1349 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1350 head->next_buffer = vlib_get_buffer_index (vm, tail);
1351 head->total_length_not_including_first_buffer = tail->current_length +
1352 tail->total_length_not_including_first_buffer;
1355 clib_atomic_add_fetch (&tail->ref_count, 1);
1357 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
1359 tail = vlib_get_buffer (vm, tail->next_buffer);
1364 /* Initializes the buffer as an empty packet with no chained buffers. */
1366 vlib_buffer_chain_init (vlib_buffer_t * first)
1368 first->total_length_not_including_first_buffer = 0;
1369 first->current_length = 0;
1370 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1371 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1374 /* The provided next_bi buffer index is appended to the end of the packet. */
1375 always_inline vlib_buffer_t *
1376 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
1378 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
1379 last->next_buffer = next_bi;
1380 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
1381 next_buffer->current_length = 0;
1382 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1386 /* Increases or decreases the packet length.
1387 * It does not allocate or deallocate new buffers.
1388 * Therefore, the added length must be compatible
1389 * with the last buffer. */
1391 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
1392 vlib_buffer_t * last, i32 len)
1394 last->current_length += len;
1396 first->total_length_not_including_first_buffer += len;
1399 /* Copy data to the end of the packet and increases its length.
1400 * It does not allocate new buffers.
1401 * Returns the number of copied bytes. */
1403 vlib_buffer_chain_append_data (vlib_main_t * vm,
1404 vlib_buffer_t * first,
1405 vlib_buffer_t * last, void *data, u16 data_len)
1407 u32 n_buffer_bytes = vlib_buffer_get_default_data_size (vm);
1408 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
1409 u16 len = clib_min (data_len,
1410 n_buffer_bytes - last->current_length -
1411 last->current_data);
1412 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
1414 vlib_buffer_chain_increase_length (first, last, len);
1418 /* Copy data to the end of the packet and increases its length.
1419 * Allocates additional buffers from the free list if necessary.
1420 * Returns the number of copied bytes.
1421 * 'last' value is modified whenever new buffers are allocated and
1422 * chained and points to the last buffer in the chain. */
1424 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
1425 vlib_buffer_t * first,
1426 vlib_buffer_t ** last, void *data,
1428 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1430 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1431 format_vlib_buffer_contents, format_vlib_buffer_no_chain;
1435 /* Vector of packet data. */
1438 /* Number of buffers to allocate in each call to allocator. */
1439 u32 min_n_buffers_each_alloc;
1442 } vlib_packet_template_t;
1444 void vlib_packet_template_init (vlib_main_t * vm,
1445 vlib_packet_template_t * t,
1447 uword n_packet_data_bytes,
1448 uword min_n_buffers_each_alloc,
1451 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1452 vlib_packet_template_t * t,
1456 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1458 vec_free (t->packet_data);
1462 vlib_buffer_space_left_at_end (vlib_main_t * vm, vlib_buffer_t * b)
1464 return b->data + vlib_buffer_get_default_data_size (vm) -
1465 ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1468 #define VLIB_BUFFER_LINEARIZE_MAX 64
1471 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * b)
1473 vlib_buffer_t *dst_b;
1474 u32 n_buffers = 1, to_free = 0;
1475 u16 rem_len, dst_len, data_size, src_len = 0;
1478 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1481 ASSERT (1 == b->ref_count);
1482 if (PREDICT_FALSE (1 != b->ref_count))
1485 data_size = vlib_buffer_get_default_data_size (vm);
1486 rem_len = vlib_buffer_length_in_chain (vm, b) - b->current_length;
1489 dst = vlib_buffer_get_tail (dst_b);
1490 dst_len = vlib_buffer_space_left_at_end (vm, dst_b);
1492 b->total_length_not_including_first_buffer -= dst_len;
1498 while (0 == src_len)
1500 ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
1501 if (PREDICT_FALSE (!(b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1502 break; /* malformed chained buffer */
1504 b = vlib_get_buffer (vm, b->next_buffer);
1505 src = vlib_buffer_get_current (b);
1506 src_len = b->current_length;
1511 ASSERT (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT);
1512 if (PREDICT_FALSE (!(dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)))
1513 break; /* malformed chained buffer */
1515 vlib_buffer_t *next_dst_b = vlib_get_buffer (vm, dst_b->next_buffer);
1517 if (PREDICT_TRUE (1 == next_dst_b->ref_count))
1519 /* normal case: buffer is not cloned, just use it */
1524 /* cloned buffer, build a new dest chain from there */
1525 vlib_buffer_t *bufs[VLIB_BUFFER_LINEARIZE_MAX];
1526 u32 bis[VLIB_BUFFER_LINEARIZE_MAX + 1];
1527 const int n = (rem_len + data_size - 1) / data_size;
1531 ASSERT (n <= VLIB_BUFFER_LINEARIZE_MAX);
1532 if (PREDICT_FALSE (n > VLIB_BUFFER_LINEARIZE_MAX))
1535 n_alloc = vlib_buffer_alloc (vm, bis, n);
1536 if (PREDICT_FALSE (n_alloc != n))
1538 vlib_buffer_free (vm, bis, n_alloc);
1542 vlib_get_buffers (vm, bis, bufs, n);
1544 for (i = 0; i < n - 1; i++)
1546 bufs[i]->flags |= VLIB_BUFFER_NEXT_PRESENT;
1547 bufs[i]->next_buffer = bis[i + 1];
1550 to_free = dst_b->next_buffer;
1551 dst_b->next_buffer = bis[0];
1557 dst_b->current_data = clib_min (0, dst_b->current_data);
1558 dst_b->current_length = 0;
1560 dst = dst_b->data + dst_b->current_data;
1561 dst_len = data_size - dst_b->current_data;
1564 copy_len = clib_min (src_len, dst_len);
1566 if (PREDICT_TRUE (src == dst))
1570 else if (src + copy_len > dst && dst + copy_len > src)
1572 /* src and dst overlap */
1573 ASSERT (b == dst_b);
1574 memmove (dst, src, copy_len);
1578 clib_memcpy_fast (dst, src, copy_len);
1581 dst_b->current_length += copy_len;
1585 dst_len -= copy_len;
1586 src_len -= copy_len;
1587 rem_len -= copy_len;
1590 /* in case of a malformed chain buffer, we'll exit early from the loop. */
1591 ASSERT (0 == rem_len);
1592 b->total_length_not_including_first_buffer -= rem_len;
1595 vlib_buffer_free_one (vm, to_free);
1597 if (dst_b->flags & VLIB_BUFFER_NEXT_PRESENT)
1599 /* the resulting chain is smaller than the original, cut it there */
1600 dst_b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1601 vlib_buffer_free_one (vm, dst_b->next_buffer);
1604 /* no longer a chained buffer */
1605 dst_b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1606 dst_b->total_length_not_including_first_buffer = 0;
1613 #endif /* included_vlib_buffer_funcs_h */
1616 * fd.io coding-style-patch-verification: ON
1619 * eval: (c-set-style "gnu")