2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = &buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate array of buffer indices into buffer pointers with offset
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param bi - (u32 *) array of buffer indices
70 @param b - (void **) array to store buffer pointers
71 @param count - (uword) number of elements
72 @param offset - (i32) offset applied to each pointer
74 static_always_inline void
75 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
78 #ifdef CLIB_HAVE_VEC256
79 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
80 /* if count is not const, compiler will not unroll while loop
81 se we maintain two-in-parallel variant */
84 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
85 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
86 /* shift and add to get vlib_buffer_t pointer */
87 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
88 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
96 #ifdef CLIB_HAVE_VEC256
97 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
98 /* shift and add to get vlib_buffer_t pointer */
99 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
100 #elif defined (CLIB_HAVE_VEC128)
101 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
102 u32x4 bi4 = u32x4_load_unaligned (bi);
103 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
104 #if defined (__aarch64__)
105 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
107 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
108 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
110 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
111 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
113 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
114 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
115 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
116 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
124 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
131 /** \brief Translate array of buffer indices into buffer pointers
133 @param vm - (vlib_main_t *) vlib main data structure pointer
134 @param bi - (u32 *) array of buffer indices
135 @param b - (vlib_buffer_t **) array to store buffer pointers
136 @param count - (uword) number of elements
139 static_always_inline void
140 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
142 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
145 /** \brief Translate buffer pointer into buffer index
147 @param vm - (vlib_main_t *) vlib main data structure pointer
148 @param p - (void *) buffer pointer
149 @return - (u32) buffer index
153 vlib_get_buffer_index (vlib_main_t * vm, void *p)
155 vlib_buffer_main_t *bm = &buffer_main;
156 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
157 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
158 ASSERT (offset < bm->buffer_mem_size);
159 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
160 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
163 /** \brief Translate array of buffer pointers into buffer indices with offset
165 @param vm - (vlib_main_t *) vlib main data structure pointer
166 @param b - (void **) array of buffer pointers
167 @param bi - (u32 *) array to store buffer indices
168 @param count - (uword) number of elements
169 @param offset - (i32) offset applied to each pointer
171 static_always_inline void
172 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
173 uword count, i32 offset)
175 #ifdef CLIB_HAVE_VEC256
176 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
177 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
181 /* load 4 pointers into 256-bit register */
182 u64x4 v0 = u64x4_load_unaligned (b);
183 u64x4 v1 = u64x4_load_unaligned (b + 4);
189 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
190 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
192 /* permute 256-bit register so lower u32s of each buffer index are
193 * placed into lower 128-bits */
194 v2 = u32x8_permute ((u32x8) v0, mask);
195 v3 = u32x8_permute ((u32x8) v1, mask);
197 /* extract lower 128-bits and save them to the array of buffer indices */
198 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
199 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
207 /* equivalent non-nector implementation */
208 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
209 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
210 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
211 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
218 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
225 /** \brief Translate array of buffer pointers into buffer indices
227 @param vm - (vlib_main_t *) vlib main data structure pointer
228 @param b - (vlib_buffer_t **) array of buffer pointers
229 @param bi - (u32 *) array to store buffer indices
230 @param count - (uword) number of elements
232 static_always_inline void
233 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
236 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
239 /** \brief Get next buffer in buffer linklist, or zero for end of list.
241 @param vm - (vlib_main_t *) vlib main data structure pointer
242 @param b - (void *) buffer pointer
243 @return - (vlib_buffer_t *) next buffer, or NULL
245 always_inline vlib_buffer_t *
246 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
248 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
249 ? vlib_get_buffer (vm, b->next_buffer) : 0);
252 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
253 vlib_buffer_t * b_first);
255 /** \brief Get length in bytes of the buffer chain
257 @param vm - (vlib_main_t *) vlib main data structure pointer
258 @param b - (void *) buffer pointer
259 @return - (uword) length of buffer chain
262 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
264 uword len = b->current_length;
266 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
269 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
270 return len + b->total_length_not_including_first_buffer;
272 return vlib_buffer_length_in_chain_slow_path (vm, b);
275 /** \brief Get length in bytes of the buffer index buffer chain
277 @param vm - (vlib_main_t *) vlib main data structure pointer
278 @param bi - (u32) buffer index
279 @return - (uword) length of buffer chain
282 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
284 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
285 return vlib_buffer_length_in_chain (vm, b);
288 /** \brief Copy buffer contents to memory
290 @param vm - (vlib_main_t *) vlib main data structure pointer
291 @param buffer_index - (u32) buffer index
292 @param contents - (u8 *) memory, <strong>must be large enough</strong>
293 @return - (uword) length of buffer chain
296 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
298 uword content_len = 0;
304 b = vlib_get_buffer (vm, buffer_index);
305 l = b->current_length;
306 clib_memcpy (contents + content_len, b->data + b->current_data, l);
308 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
310 buffer_index = b->next_buffer;
316 /* Return physical address of buffer->data start. */
318 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
320 vlib_buffer_main_t *bm = &buffer_main;
321 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
322 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
323 b->buffer_pool_index);
325 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
328 /** \brief Prefetch buffer metadata by buffer index
329 The first 64 bytes of buffer contains most header information
331 @param vm - (vlib_main_t *) vlib main data structure pointer
332 @param bi - (u32) buffer index
333 @param type - LOAD, STORE. In most cases, STORE is the right answer
335 /* Prefetch buffer header given index. */
336 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
338 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
339 vlib_prefetch_buffer_header (_b, type); \
343 /* Iterate over known allocated vlib bufs. You probably do not want
345 @param vm the vlib_main_t
346 @param bi found allocated buffer index
347 @param body operation to perform on buffer index
348 function executes body for each allocated buffer index
350 #define vlib_buffer_foreach_allocated(vm,bi,body) \
352 vlib_main_t * _vmain = (vm); \
353 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
354 hash_pair_t * _vbpair; \
355 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
356 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
357 (bi) = _vbpair->key; \
366 /* Index is unknown. */
369 /* Index is known and free/allocated. */
370 VLIB_BUFFER_KNOWN_FREE,
371 VLIB_BUFFER_KNOWN_ALLOCATED,
372 } vlib_buffer_known_state_t;
374 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
376 vlib_buffer_known_state_t
379 always_inline vlib_buffer_known_state_t
380 vlib_buffer_is_known (u32 buffer_index)
382 vlib_buffer_main_t *bm = &buffer_main;
384 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
385 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
386 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
387 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
391 vlib_buffer_set_known_state (u32 buffer_index,
392 vlib_buffer_known_state_t state)
394 vlib_buffer_main_t *bm = &buffer_main;
396 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
397 hash_set (bm->buffer_known_hash, buffer_index, state);
398 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
401 /* Validates sanity of a single buffer.
402 Returns format'ed vector with error message if any. */
403 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
407 vlib_buffer_round_size (u32 size)
409 return round_pow2 (size, sizeof (vlib_buffer_t));
412 always_inline vlib_buffer_free_list_index_t
413 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
415 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
416 return b->free_list_index;
422 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
423 vlib_buffer_free_list_index_t index)
425 if (PREDICT_FALSE (index))
427 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
428 b->free_list_index = index;
431 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
434 /** \brief Allocate buffers from specific freelist into supplied array
436 @param vm - (vlib_main_t *) vlib main data structure pointer
437 @param buffers - (u32 * ) buffer index array
438 @param n_buffers - (u32) number of buffers requested
439 @return - (u32) number of buffers actually allocated, may be
440 less than the number requested or zero
443 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
446 vlib_buffer_free_list_index_t index)
448 vlib_buffer_main_t *bm = &buffer_main;
449 vlib_buffer_free_list_t *fl;
453 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
455 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
457 len = vec_len (fl->buffers);
459 if (PREDICT_FALSE (len < n_buffers))
461 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
462 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
465 /* even if fill free list didn't manage to refill free list
466 we should give what we have */
467 n_buffers = clib_min (len, n_buffers);
469 /* following code is intentionaly duplicated to allow compiler
470 to optimize fast path when n_buffers is constant value */
471 src = fl->buffers + len - n_buffers;
472 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
473 _vec_len (fl->buffers) -= n_buffers;
475 /* Verify that buffers are known free. */
476 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
477 VLIB_BUFFER_KNOWN_FREE);
482 src = fl->buffers + len - n_buffers;
483 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
484 _vec_len (fl->buffers) -= n_buffers;
486 /* Verify that buffers are known free. */
487 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
488 VLIB_BUFFER_KNOWN_FREE);
493 /** \brief Allocate buffers into supplied array
495 @param vm - (vlib_main_t *) vlib main data structure pointer
496 @param buffers - (u32 * ) buffer index array
497 @param n_buffers - (u32) number of buffers requested
498 @return - (u32) number of buffers actually allocated, may be
499 less than the number requested or zero
502 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
504 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
505 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
508 /** \brief Allocate buffers into ring
510 @param vm - (vlib_main_t *) vlib main data structure pointer
511 @param buffers - (u32 * ) buffer index ring
512 @param start - (u32) first slot in the ring
513 @param ring_size - (u32) ring size
514 @param n_buffers - (u32) number of buffers requested
515 @return - (u32) number of buffers actually allocated, may be
516 less than the number requested or zero
519 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
520 u32 ring_size, u32 n_buffers)
524 ASSERT (n_buffers <= ring_size);
526 if (PREDICT_TRUE (start + n_buffers <= ring_size))
527 return vlib_buffer_alloc (vm, ring + start, n_buffers);
529 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
531 if (PREDICT_TRUE (n_alloc == ring_size - start))
532 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
537 /** \brief Free buffers
538 Frees the entire buffer chain for each buffer
540 @param vm - (vlib_main_t *) vlib main data structure pointer
541 @param buffers - (u32 * ) buffer index array
542 @param n_buffers - (u32) number of buffers to free
546 vlib_buffer_free (vlib_main_t * vm,
547 /* pointer to first buffer */
549 /* number of buffers to free */
552 vlib_buffer_main_t *bm = &buffer_main;
554 ASSERT (bm->cb.vlib_buffer_free_cb);
556 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
559 /** \brief Free buffers, does not free the buffer chain for each buffer
561 @param vm - (vlib_main_t *) vlib main data structure pointer
562 @param buffers - (u32 * ) buffer index array
563 @param n_buffers - (u32) number of buffers to free
567 vlib_buffer_free_no_next (vlib_main_t * vm,
568 /* pointer to first buffer */
570 /* number of buffers to free */
573 vlib_buffer_main_t *bm = &buffer_main;
575 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
577 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
580 /** \brief Free one buffer
581 Shorthand to free a single buffer chain.
583 @param vm - (vlib_main_t *) vlib main data structure pointer
584 @param buffer_index - (u32) buffer index to free
587 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
589 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
592 /** \brief Free buffers from ring
594 @param vm - (vlib_main_t *) vlib main data structure pointer
595 @param buffers - (u32 * ) buffer index ring
596 @param start - (u32) first slot in the ring
597 @param ring_size - (u32) ring size
598 @param n_buffers - (u32) number of buffers
601 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
602 u32 ring_size, u32 n_buffers)
604 ASSERT (n_buffers <= ring_size);
606 if (PREDICT_TRUE (start + n_buffers <= ring_size))
608 vlib_buffer_free (vm, ring + start, n_buffers);
612 vlib_buffer_free (vm, ring + start, ring_size - start);
613 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
617 /** \brief Free buffers from ring without freeing tail buffers
619 @param vm - (vlib_main_t *) vlib main data structure pointer
620 @param buffers - (u32 * ) buffer index ring
621 @param start - (u32) first slot in the ring
622 @param ring_size - (u32) ring size
623 @param n_buffers - (u32) number of buffers
626 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
627 u32 ring_size, u32 n_buffers)
629 ASSERT (n_buffers <= ring_size);
631 if (PREDICT_TRUE (start + n_buffers <= ring_size))
633 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
637 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
638 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
642 /* Add/delete buffer free lists. */
643 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
647 vlib_buffer_delete_free_list (vlib_main_t * vm,
648 vlib_buffer_free_list_index_t free_list_index)
650 vlib_buffer_main_t *bm = &buffer_main;
652 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
654 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
657 /* Make sure we have at least given number of unaligned buffers. */
658 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
659 vlib_buffer_free_list_t *
661 uword n_unaligned_buffers);
663 always_inline vlib_buffer_free_list_t *
664 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
665 vlib_buffer_free_list_index_t * index)
667 vlib_buffer_free_list_index_t i;
669 *index = i = vlib_buffer_get_free_list_index (b);
670 return pool_elt_at_index (vm->buffer_free_list_pool, i);
673 always_inline vlib_buffer_free_list_t *
674 vlib_buffer_get_free_list (vlib_main_t * vm,
675 vlib_buffer_free_list_index_t free_list_index)
677 vlib_buffer_free_list_t *f;
679 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
681 /* Sanity: indices must match. */
682 ASSERT (f->index == free_list_index);
688 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
689 vlib_buffer_free_list_index_t index)
691 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
692 return f->n_data_bytes;
695 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
697 /* Reasonably fast buffer copy routine. */
699 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
720 /* Append given data to end of buffer, possibly allocating new buffers. */
721 u32 vlib_buffer_add_data (vlib_main_t * vm,
722 vlib_buffer_free_list_index_t free_list_index,
723 u32 buffer_index, void *data, u32 n_data_bytes);
725 /* duplicate all buffers in chain */
726 always_inline vlib_buffer_t *
727 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
729 vlib_buffer_t *s, *d, *fd;
730 uword n_alloc, n_buffers = 1;
731 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
735 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
738 s = vlib_get_buffer (vm, s->next_buffer);
740 u32 new_buffers[n_buffers];
742 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
744 /* No guarantee that we'll get all the buffers we asked for */
745 if (PREDICT_FALSE (n_alloc < n_buffers))
748 vlib_buffer_free (vm, new_buffers, n_alloc);
754 fd = d = vlib_get_buffer (vm, new_buffers[0]);
755 d->current_data = s->current_data;
756 d->current_length = s->current_length;
757 d->flags = s->flags & flag_mask;
758 d->total_length_not_including_first_buffer =
759 s->total_length_not_including_first_buffer;
760 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
761 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
762 clib_memcpy (vlib_buffer_get_current (d),
763 vlib_buffer_get_current (s), s->current_length);
766 for (i = 1; i < n_buffers; i++)
769 d->next_buffer = new_buffers[i];
771 s = vlib_get_buffer (vm, s->next_buffer);
772 d = vlib_get_buffer (vm, new_buffers[i]);
773 d->current_data = s->current_data;
774 d->current_length = s->current_length;
775 clib_memcpy (vlib_buffer_get_current (d),
776 vlib_buffer_get_current (s), s->current_length);
777 d->flags = s->flags & flag_mask;
783 /** \brief Create a maximum of 256 clones of buffer and store them
784 in the supplied array
786 @param vm - (vlib_main_t *) vlib main data structure pointer
787 @param src_buffer - (u32) source buffer index
788 @param buffers - (u32 * ) buffer index array
789 @param n_buffers - (u16) number of buffer clones requested (<=256)
790 @param head_end_offset - (u16) offset relative to current position
791 where packet head ends
792 @return - (u16) number of buffers actually cloned, may be
793 less than the number requested or zero
796 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
797 u16 n_buffers, u16 head_end_offset)
800 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
802 ASSERT (s->n_add_refs == 0);
804 ASSERT (n_buffers <= 256);
806 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
808 buffers[0] = src_buffer;
809 for (i = 1; i < n_buffers; i++)
812 d = vlib_buffer_copy (vm, s);
815 buffers[i] = vlib_get_buffer_index (vm, d);
821 if (PREDICT_FALSE (n_buffers == 1))
823 buffers[0] = src_buffer;
827 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
828 vlib_buffer_get_free_list_index
831 for (i = 0; i < n_buffers; i++)
833 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
834 d->current_data = s->current_data;
835 d->current_length = head_end_offset;
836 vlib_buffer_set_free_list_index (d,
837 vlib_buffer_get_free_list_index (s));
839 d->total_length_not_including_first_buffer = s->current_length -
841 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
843 d->total_length_not_including_first_buffer +=
844 s->total_length_not_including_first_buffer;
846 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
847 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
848 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
849 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
850 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
852 d->next_buffer = src_buffer;
854 vlib_buffer_advance (s, head_end_offset);
855 s->n_add_refs = n_buffers - 1;
856 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
858 s = vlib_get_buffer (vm, s->next_buffer);
859 s->n_add_refs = n_buffers - 1;
865 /** \brief Create multiple clones of buffer and store them
866 in the supplied array
868 @param vm - (vlib_main_t *) vlib main data structure pointer
869 @param src_buffer - (u32) source buffer index
870 @param buffers - (u32 * ) buffer index array
871 @param n_buffers - (u16) number of buffer clones requested (<=256)
872 @param head_end_offset - (u16) offset relative to current position
873 where packet head ends
874 @return - (u16) number of buffers actually cloned, may be
875 less than the number requested or zero
878 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
879 u16 n_buffers, u16 head_end_offset)
881 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
884 while (n_buffers > 256)
887 copy = vlib_buffer_copy (vm, s);
888 n_cloned += vlib_buffer_clone_256 (vm,
889 vlib_get_buffer_index (vm, copy),
890 (buffers + n_cloned),
891 256, head_end_offset);
894 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
896 n_buffers, head_end_offset);
901 /** \brief Attach cloned tail to the buffer
903 @param vm - (vlib_main_t *) vlib main data structure pointer
904 @param head - (vlib_buffer_t *) head buffer
905 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
909 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
910 vlib_buffer_t * tail)
912 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
913 ASSERT (vlib_buffer_get_free_list_index (head) ==
914 vlib_buffer_get_free_list_index (tail));
916 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
917 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
918 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
919 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
920 head->next_buffer = vlib_get_buffer_index (vm, tail);
921 head->total_length_not_including_first_buffer = tail->current_length +
922 tail->total_length_not_including_first_buffer;
925 __sync_add_and_fetch (&tail->n_add_refs, 1);
927 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
929 tail = vlib_get_buffer (vm, tail->next_buffer);
934 /* Initializes the buffer as an empty packet with no chained buffers. */
936 vlib_buffer_chain_init (vlib_buffer_t * first)
938 first->total_length_not_including_first_buffer = 0;
939 first->current_length = 0;
940 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
941 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
944 /* The provided next_bi buffer index is appended to the end of the packet. */
945 always_inline vlib_buffer_t *
946 vlib_buffer_chain_buffer (vlib_main_t * vm,
947 vlib_buffer_t * first,
948 vlib_buffer_t * last, u32 next_bi)
950 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
951 last->next_buffer = next_bi;
952 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
953 next_buffer->current_length = 0;
954 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
958 /* Increases or decreases the packet length.
959 * It does not allocate or deallocate new buffers.
960 * Therefore, the added length must be compatible
961 * with the last buffer. */
963 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
964 vlib_buffer_t * last, i32 len)
966 last->current_length += len;
968 first->total_length_not_including_first_buffer += len;
971 /* Copy data to the end of the packet and increases its length.
972 * It does not allocate new buffers.
973 * Returns the number of copied bytes. */
975 vlib_buffer_chain_append_data (vlib_main_t * vm,
976 vlib_buffer_free_list_index_t free_list_index,
977 vlib_buffer_t * first,
978 vlib_buffer_t * last, void *data, u16 data_len)
981 vlib_buffer_free_list_buffer_size (vm, free_list_index);
982 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
983 u16 len = clib_min (data_len,
984 n_buffer_bytes - last->current_length -
986 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
988 vlib_buffer_chain_increase_length (first, last, len);
992 /* Copy data to the end of the packet and increases its length.
993 * Allocates additional buffers from the free list if necessary.
994 * Returns the number of copied bytes.
995 * 'last' value is modified whenever new buffers are allocated and
996 * chained and points to the last buffer in the chain. */
998 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
999 vlib_buffer_free_list_index_t
1001 vlib_buffer_t * first,
1002 vlib_buffer_t ** last, void *data,
1004 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1006 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1007 format_vlib_buffer_contents;
1011 /* Vector of packet data. */
1014 /* Number of buffers to allocate in each call to allocator. */
1015 u32 min_n_buffers_each_alloc;
1017 /* Buffer free list for this template. */
1018 vlib_buffer_free_list_index_t free_list_index;
1021 } vlib_packet_template_t;
1023 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
1024 vlib_packet_template_t * t);
1026 void vlib_packet_template_init (vlib_main_t * vm,
1027 vlib_packet_template_t * t,
1029 uword n_packet_data_bytes,
1030 uword min_n_buffers_each_alloc,
1033 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1034 vlib_packet_template_t * t,
1038 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1040 vec_free (t->packet_data);
1044 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
1046 serialize_stream_t *s = &m->stream;
1047 vlib_serialize_buffer_main_t *sm
1048 = uword_to_pointer (m->stream.data_function_opaque,
1049 vlib_serialize_buffer_main_t *);
1050 vlib_main_t *vm = sm->vlib_main;
1053 n = s->n_buffer_bytes - s->current_buffer_index;
1054 if (sm->last_buffer != ~0)
1056 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
1057 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1059 b = vlib_get_buffer (vm, b->next_buffer);
1060 n += b->current_length;
1065 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
1066 n += vlib_buffer_index_length_in_chain (vm, f[0]);
1073 /* Set a buffer quickly into "uninitialized" state. We want this to
1074 be extremely cheap and arrange for all fields that need to be
1075 initialized to be in the first 128 bits of the buffer. */
1077 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1078 vlib_buffer_free_list_t * fl)
1080 vlib_buffer_t *src = &fl->buffer_init_template;
1082 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1083 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1084 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1085 CLIB_CACHE_LINE_BYTES);
1086 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1087 CLIB_CACHE_LINE_BYTES * 2);
1089 /* Make sure buffer template is sane. */
1090 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
1092 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
1093 STRUCT_MARK_PTR (src, template_start),
1094 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1095 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1097 /* Not in the first 16 octets. */
1098 dst->n_add_refs = src->n_add_refs;
1099 vlib_buffer_set_free_list_index (dst, fl->index);
1101 /* Make sure it really worked. */
1102 #define _(f) ASSERT (dst->f == src->f);
1107 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1108 /* total_length_not_including_first_buffer is not in the template anymore
1109 * so it may actually not zeroed for some buffers. One option is to
1110 * uncomment the line lower (comes at a cost), the other, is to just not
1112 /* dst->total_length_not_including_first_buffer = 0; */
1113 ASSERT (dst->n_add_refs == 0);
1117 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1118 vlib_buffer_free_list_t * f,
1119 u32 buffer_index, u8 do_init)
1121 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1123 b = vlib_get_buffer (vm, buffer_index);
1124 if (PREDICT_TRUE (do_init))
1125 vlib_buffer_init_for_free_list (b, f);
1126 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1128 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1130 clib_spinlock_lock (&bp->lock);
1131 /* keep last stored buffers, as they are more likely hot in the cache */
1132 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1133 CLIB_CACHE_LINE_BYTES);
1134 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1135 f->n_alloc -= VLIB_FRAME_SIZE;
1136 clib_spinlock_unlock (&bp->lock);
1141 extern u32 *vlib_buffer_state_validation_lock;
1142 extern uword *vlib_buffer_state_validation_hash;
1143 extern void *vlib_buffer_state_heap;
1147 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1153 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1155 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1158 p = hash_get (vlib_buffer_state_validation_hash, b);
1160 /* If we don't know about b, declare it to be in the expected state */
1163 hash_set (vlib_buffer_state_validation_hash, b, expected);
1167 if (p[0] != expected)
1169 void cj_stop (void);
1171 vlib_main_t *vm = &vlib_global_main;
1175 bi = vlib_get_buffer_index (vm, b);
1177 clib_mem_set_heap (oldheap);
1178 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1179 vlib_time_now (vm), bi,
1180 p[0] ? "busy" : "free", expected ? "busy" : "free");
1184 CLIB_MEMORY_BARRIER ();
1185 *vlib_buffer_state_validation_lock = 0;
1186 clib_mem_set_heap (oldheap);
1191 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1196 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1198 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1201 hash_set (vlib_buffer_state_validation_hash, b, expected);
1203 CLIB_MEMORY_BARRIER ();
1204 *vlib_buffer_state_validation_lock = 0;
1205 clib_mem_set_heap (oldheap);
1209 /** minimum data size of first buffer in a buffer chain */
1210 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1213 * @brief compress buffer chain in a way where the first buffer is at least
1214 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1216 * @param[in] vm - vlib_main
1217 * @param[in,out] first - first buffer in chain
1218 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1222 vlib_buffer_chain_compress (vlib_main_t * vm,
1223 vlib_buffer_t * first, u32 ** discard_vector)
1225 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1226 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1228 /* this is already big enough or not a chain */
1231 /* probe free list to find allocated buffer size to avoid overfill */
1232 vlib_buffer_free_list_index_t index;
1233 vlib_buffer_free_list_t *free_list =
1234 vlib_buffer_get_buffer_free_list (vm, first, &index);
1236 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1237 free_list->n_data_bytes -
1238 first->current_data);
1241 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1242 u32 need = want_first_size - first->current_length;
1243 u32 amount_to_copy = clib_min (need, second->current_length);
1244 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1245 first->current_length,
1246 vlib_buffer_get_current (second), amount_to_copy);
1247 first->current_length += amount_to_copy;
1248 vlib_buffer_advance (second, amount_to_copy);
1249 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1251 first->total_length_not_including_first_buffer -= amount_to_copy;
1253 if (!second->current_length)
1255 vec_add1 (*discard_vector, first->next_buffer);
1256 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1258 first->next_buffer = second->next_buffer;
1262 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1264 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1267 while ((first->current_length < want_first_size) &&
1268 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1271 #endif /* included_vlib_buffer_funcs_h */
1274 * fd.io coding-style-patch-verification: ON
1277 * eval: (c-set-style "gnu")