2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = &buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 /** \brief Translate array of buffer indices into buffer pointers with offset
69 @param vm - (vlib_main_t *) vlib main data structure pointer
70 @param bi - (u32 *) array of buffer indices
71 @param b - (void **) array to store buffer pointers
72 @param count - (uword) number of elements
73 @param offset - (i32) offset applied to each pointer
75 static_always_inline void
76 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
79 #ifdef CLIB_HAVE_VEC256
80 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
81 /* if count is not const, compiler will not unroll while loop
82 se we maintain two-in-parallel variant */
85 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
86 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
87 /* shift and add to get vlib_buffer_t pointer */
88 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
89 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
97 #ifdef CLIB_HAVE_VEC256
98 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
99 /* shift and add to get vlib_buffer_t pointer */
100 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 #elif defined (CLIB_HAVE_VEC128)
102 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
103 u32x4 bi4 = u32x4_load_unaligned (bi);
104 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
105 #if defined (__aarch64__)
106 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
108 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
109 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
111 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
112 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
114 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
115 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
116 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
117 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
125 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
132 /** \brief Translate array of buffer indices into buffer pointers
134 @param vm - (vlib_main_t *) vlib main data structure pointer
135 @param bi - (u32 *) array of buffer indices
136 @param b - (vlib_buffer_t **) array to store buffer pointers
137 @param count - (uword) number of elements
140 static_always_inline void
141 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
143 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
146 /** \brief Translate buffer pointer into buffer index
148 @param vm - (vlib_main_t *) vlib main data structure pointer
149 @param p - (void *) buffer pointer
150 @return - (u32) buffer index
154 vlib_get_buffer_index (vlib_main_t * vm, void *p)
156 vlib_buffer_main_t *bm = &buffer_main;
157 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
158 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
159 ASSERT (offset < bm->buffer_mem_size);
160 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
161 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
164 /** \brief Translate array of buffer pointers into buffer indices with offset
166 @param vm - (vlib_main_t *) vlib main data structure pointer
167 @param b - (void **) array of buffer pointers
168 @param bi - (u32 *) array to store buffer indices
169 @param count - (uword) number of elements
170 @param offset - (i32) offset applied to each pointer
172 static_always_inline void
173 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
174 uword count, i32 offset)
176 #ifdef CLIB_HAVE_VEC256
177 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
178 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
182 /* load 4 pointers into 256-bit register */
183 u64x4 v0 = u64x4_load_unaligned (b);
184 u64x4 v1 = u64x4_load_unaligned (b + 4);
190 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
191 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
193 /* permute 256-bit register so lower u32s of each buffer index are
194 * placed into lower 128-bits */
195 v2 = u32x8_permute ((u32x8) v0, mask);
196 v3 = u32x8_permute ((u32x8) v1, mask);
198 /* extract lower 128-bits and save them to the array of buffer indices */
199 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
200 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
208 /* equivalent non-nector implementation */
209 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
210 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
211 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
212 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
219 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
226 /** \brief Translate array of buffer pointers into buffer indices
228 @param vm - (vlib_main_t *) vlib main data structure pointer
229 @param b - (vlib_buffer_t **) array of buffer pointers
230 @param bi - (u32 *) array to store buffer indices
231 @param count - (uword) number of elements
233 static_always_inline void
234 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
237 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
240 /** \brief Get next buffer in buffer linklist, or zero for end of list.
242 @param vm - (vlib_main_t *) vlib main data structure pointer
243 @param b - (void *) buffer pointer
244 @return - (vlib_buffer_t *) next buffer, or NULL
246 always_inline vlib_buffer_t *
247 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
249 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
250 ? vlib_get_buffer (vm, b->next_buffer) : 0);
253 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
254 vlib_buffer_t * b_first);
256 /** \brief Get length in bytes of the buffer chain
258 @param vm - (vlib_main_t *) vlib main data structure pointer
259 @param b - (void *) buffer pointer
260 @return - (uword) length of buffer chain
263 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
265 uword len = b->current_length;
267 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
270 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
271 return len + b->total_length_not_including_first_buffer;
273 return vlib_buffer_length_in_chain_slow_path (vm, b);
276 /** \brief Get length in bytes of the buffer index buffer chain
278 @param vm - (vlib_main_t *) vlib main data structure pointer
279 @param bi - (u32) buffer index
280 @return - (uword) length of buffer chain
283 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
285 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
286 return vlib_buffer_length_in_chain (vm, b);
289 /** \brief Copy buffer contents to memory
291 @param vm - (vlib_main_t *) vlib main data structure pointer
292 @param buffer_index - (u32) buffer index
293 @param contents - (u8 *) memory, <strong>must be large enough</strong>
294 @return - (uword) length of buffer chain
297 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
299 uword content_len = 0;
305 b = vlib_get_buffer (vm, buffer_index);
306 l = b->current_length;
307 clib_memcpy (contents + content_len, b->data + b->current_data, l);
309 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
311 buffer_index = b->next_buffer;
318 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
320 vlib_buffer_main_t *bm = &buffer_main;
321 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
322 b->buffer_pool_index);
323 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
327 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
329 return vlib_buffer_get_pa (vm, b) + b->current_data;
332 /** \brief Prefetch buffer metadata by buffer index
333 The first 64 bytes of buffer contains most header information
335 @param vm - (vlib_main_t *) vlib main data structure pointer
336 @param bi - (u32) buffer index
337 @param type - LOAD, STORE. In most cases, STORE is the right answer
339 /* Prefetch buffer header given index. */
340 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
342 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
343 vlib_prefetch_buffer_header (_b, type); \
347 /* Iterate over known allocated vlib bufs. You probably do not want
349 @param vm the vlib_main_t
350 @param bi found allocated buffer index
351 @param body operation to perform on buffer index
352 function executes body for each allocated buffer index
354 #define vlib_buffer_foreach_allocated(vm,bi,body) \
356 vlib_main_t * _vmain = (vm); \
357 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
358 hash_pair_t * _vbpair; \
359 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
360 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
361 (bi) = _vbpair->key; \
370 /* Index is unknown. */
373 /* Index is known and free/allocated. */
374 VLIB_BUFFER_KNOWN_FREE,
375 VLIB_BUFFER_KNOWN_ALLOCATED,
376 } vlib_buffer_known_state_t;
378 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
380 vlib_buffer_known_state_t
383 always_inline vlib_buffer_known_state_t
384 vlib_buffer_is_known (u32 buffer_index)
386 vlib_buffer_main_t *bm = &buffer_main;
388 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
389 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
390 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
391 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
395 vlib_buffer_set_known_state (u32 buffer_index,
396 vlib_buffer_known_state_t state)
398 vlib_buffer_main_t *bm = &buffer_main;
400 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
401 hash_set (bm->buffer_known_hash, buffer_index, state);
402 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
405 /* Validates sanity of a single buffer.
406 Returns format'ed vector with error message if any. */
407 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
411 vlib_buffer_round_size (u32 size)
413 return round_pow2 (size, sizeof (vlib_buffer_t));
416 always_inline vlib_buffer_free_list_index_t
417 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
419 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
420 return b->free_list_index;
426 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
427 vlib_buffer_free_list_index_t index)
429 if (PREDICT_FALSE (index))
431 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
432 b->free_list_index = index;
435 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
438 /** \brief Allocate buffers from specific freelist into supplied array
440 @param vm - (vlib_main_t *) vlib main data structure pointer
441 @param buffers - (u32 * ) buffer index array
442 @param n_buffers - (u32) number of buffers requested
443 @return - (u32) number of buffers actually allocated, may be
444 less than the number requested or zero
447 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
450 vlib_buffer_free_list_index_t index)
452 vlib_buffer_main_t *bm = &buffer_main;
453 vlib_buffer_free_list_t *fl;
457 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
459 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
461 len = vec_len (fl->buffers);
463 if (PREDICT_FALSE (len < n_buffers))
465 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
466 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
469 /* even if fill free list didn't manage to refill free list
470 we should give what we have */
471 n_buffers = clib_min (len, n_buffers);
473 /* following code is intentionaly duplicated to allow compiler
474 to optimize fast path when n_buffers is constant value */
475 src = fl->buffers + len - n_buffers;
476 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
477 _vec_len (fl->buffers) -= n_buffers;
479 /* Verify that buffers are known free. */
480 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
481 VLIB_BUFFER_KNOWN_FREE);
486 src = fl->buffers + len - n_buffers;
487 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
488 _vec_len (fl->buffers) -= n_buffers;
490 /* Verify that buffers are known free. */
491 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
492 VLIB_BUFFER_KNOWN_FREE);
497 /** \brief Allocate buffers into supplied array
499 @param vm - (vlib_main_t *) vlib main data structure pointer
500 @param buffers - (u32 * ) buffer index array
501 @param n_buffers - (u32) number of buffers requested
502 @return - (u32) number of buffers actually allocated, may be
503 less than the number requested or zero
506 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
508 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
509 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
512 /** \brief Allocate buffers into ring
514 @param vm - (vlib_main_t *) vlib main data structure pointer
515 @param buffers - (u32 * ) buffer index ring
516 @param start - (u32) first slot in the ring
517 @param ring_size - (u32) ring size
518 @param n_buffers - (u32) number of buffers requested
519 @return - (u32) number of buffers actually allocated, may be
520 less than the number requested or zero
523 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
524 u32 ring_size, u32 n_buffers)
528 ASSERT (n_buffers <= ring_size);
530 if (PREDICT_TRUE (start + n_buffers <= ring_size))
531 return vlib_buffer_alloc (vm, ring + start, n_buffers);
533 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
535 if (PREDICT_TRUE (n_alloc == ring_size - start))
536 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
541 /** \brief Free buffers
542 Frees the entire buffer chain for each buffer
544 @param vm - (vlib_main_t *) vlib main data structure pointer
545 @param buffers - (u32 * ) buffer index array
546 @param n_buffers - (u32) number of buffers to free
550 vlib_buffer_free (vlib_main_t * vm,
551 /* pointer to first buffer */
553 /* number of buffers to free */
556 vlib_buffer_main_t *bm = &buffer_main;
558 ASSERT (bm->cb.vlib_buffer_free_cb);
560 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
563 /** \brief Free buffers, does not free the buffer chain for each buffer
565 @param vm - (vlib_main_t *) vlib main data structure pointer
566 @param buffers - (u32 * ) buffer index array
567 @param n_buffers - (u32) number of buffers to free
571 vlib_buffer_free_no_next (vlib_main_t * vm,
572 /* pointer to first buffer */
574 /* number of buffers to free */
577 vlib_buffer_main_t *bm = &buffer_main;
579 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
581 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
584 /** \brief Free one buffer
585 Shorthand to free a single buffer chain.
587 @param vm - (vlib_main_t *) vlib main data structure pointer
588 @param buffer_index - (u32) buffer index to free
591 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
593 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
596 /** \brief Free buffers from ring
598 @param vm - (vlib_main_t *) vlib main data structure pointer
599 @param buffers - (u32 * ) buffer index ring
600 @param start - (u32) first slot in the ring
601 @param ring_size - (u32) ring size
602 @param n_buffers - (u32) number of buffers
605 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
606 u32 ring_size, u32 n_buffers)
608 ASSERT (n_buffers <= ring_size);
610 if (PREDICT_TRUE (start + n_buffers <= ring_size))
612 vlib_buffer_free (vm, ring + start, n_buffers);
616 vlib_buffer_free (vm, ring + start, ring_size - start);
617 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
621 /** \brief Free buffers from ring without freeing tail buffers
623 @param vm - (vlib_main_t *) vlib main data structure pointer
624 @param buffers - (u32 * ) buffer index ring
625 @param start - (u32) first slot in the ring
626 @param ring_size - (u32) ring size
627 @param n_buffers - (u32) number of buffers
630 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
631 u32 ring_size, u32 n_buffers)
633 ASSERT (n_buffers <= ring_size);
635 if (PREDICT_TRUE (start + n_buffers <= ring_size))
637 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
641 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
642 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
646 /* Add/delete buffer free lists. */
647 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
651 vlib_buffer_delete_free_list (vlib_main_t * vm,
652 vlib_buffer_free_list_index_t free_list_index)
654 vlib_buffer_main_t *bm = &buffer_main;
656 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
658 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
661 /* Make sure we have at least given number of unaligned buffers. */
662 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
663 vlib_buffer_free_list_t *
665 uword n_unaligned_buffers);
667 always_inline vlib_buffer_free_list_t *
668 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
669 vlib_buffer_free_list_index_t * index)
671 vlib_buffer_free_list_index_t i;
673 *index = i = vlib_buffer_get_free_list_index (b);
674 return pool_elt_at_index (vm->buffer_free_list_pool, i);
677 always_inline vlib_buffer_free_list_t *
678 vlib_buffer_get_free_list (vlib_main_t * vm,
679 vlib_buffer_free_list_index_t free_list_index)
681 vlib_buffer_free_list_t *f;
683 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
685 /* Sanity: indices must match. */
686 ASSERT (f->index == free_list_index);
692 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
693 vlib_buffer_free_list_index_t index)
695 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
696 return f->n_data_bytes;
699 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
701 /* Reasonably fast buffer copy routine. */
703 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
724 /* Append given data to end of buffer, possibly allocating new buffers. */
725 u32 vlib_buffer_add_data (vlib_main_t * vm,
726 vlib_buffer_free_list_index_t free_list_index,
727 u32 buffer_index, void *data, u32 n_data_bytes);
729 /* duplicate all buffers in chain */
730 always_inline vlib_buffer_t *
731 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
733 vlib_buffer_t *s, *d, *fd;
734 uword n_alloc, n_buffers = 1;
735 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
739 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
742 s = vlib_get_buffer (vm, s->next_buffer);
744 u32 new_buffers[n_buffers];
746 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
748 /* No guarantee that we'll get all the buffers we asked for */
749 if (PREDICT_FALSE (n_alloc < n_buffers))
752 vlib_buffer_free (vm, new_buffers, n_alloc);
758 fd = d = vlib_get_buffer (vm, new_buffers[0]);
759 d->current_data = s->current_data;
760 d->current_length = s->current_length;
761 d->flags = s->flags & flag_mask;
762 d->total_length_not_including_first_buffer =
763 s->total_length_not_including_first_buffer;
764 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
765 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
766 clib_memcpy (vlib_buffer_get_current (d),
767 vlib_buffer_get_current (s), s->current_length);
770 for (i = 1; i < n_buffers; i++)
773 d->next_buffer = new_buffers[i];
775 s = vlib_get_buffer (vm, s->next_buffer);
776 d = vlib_get_buffer (vm, new_buffers[i]);
777 d->current_data = s->current_data;
778 d->current_length = s->current_length;
779 clib_memcpy (vlib_buffer_get_current (d),
780 vlib_buffer_get_current (s), s->current_length);
781 d->flags = s->flags & flag_mask;
787 /** \brief Create a maximum of 256 clones of buffer and store them
788 in the supplied array
790 @param vm - (vlib_main_t *) vlib main data structure pointer
791 @param src_buffer - (u32) source buffer index
792 @param buffers - (u32 * ) buffer index array
793 @param n_buffers - (u16) number of buffer clones requested (<=256)
794 @param head_end_offset - (u16) offset relative to current position
795 where packet head ends
796 @return - (u16) number of buffers actually cloned, may be
797 less than the number requested or zero
800 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
801 u16 n_buffers, u16 head_end_offset)
804 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
806 ASSERT (s->n_add_refs == 0);
808 ASSERT (n_buffers <= 256);
810 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
812 buffers[0] = src_buffer;
813 for (i = 1; i < n_buffers; i++)
816 d = vlib_buffer_copy (vm, s);
819 buffers[i] = vlib_get_buffer_index (vm, d);
825 if (PREDICT_FALSE (n_buffers == 1))
827 buffers[0] = src_buffer;
831 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
832 vlib_buffer_get_free_list_index
835 for (i = 0; i < n_buffers; i++)
837 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
838 d->current_data = s->current_data;
839 d->current_length = head_end_offset;
840 vlib_buffer_set_free_list_index (d,
841 vlib_buffer_get_free_list_index (s));
843 d->total_length_not_including_first_buffer = s->current_length -
845 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
847 d->total_length_not_including_first_buffer +=
848 s->total_length_not_including_first_buffer;
850 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
851 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
852 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
853 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
854 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
856 d->next_buffer = src_buffer;
858 vlib_buffer_advance (s, head_end_offset);
859 s->n_add_refs = n_buffers - 1;
860 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
862 s = vlib_get_buffer (vm, s->next_buffer);
863 s->n_add_refs = n_buffers - 1;
869 /** \brief Create multiple clones of buffer and store them
870 in the supplied array
872 @param vm - (vlib_main_t *) vlib main data structure pointer
873 @param src_buffer - (u32) source buffer index
874 @param buffers - (u32 * ) buffer index array
875 @param n_buffers - (u16) number of buffer clones requested (<=256)
876 @param head_end_offset - (u16) offset relative to current position
877 where packet head ends
878 @return - (u16) number of buffers actually cloned, may be
879 less than the number requested or zero
882 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
883 u16 n_buffers, u16 head_end_offset)
885 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
888 while (n_buffers > 256)
891 copy = vlib_buffer_copy (vm, s);
892 n_cloned += vlib_buffer_clone_256 (vm,
893 vlib_get_buffer_index (vm, copy),
894 (buffers + n_cloned),
895 256, head_end_offset);
898 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
900 n_buffers, head_end_offset);
905 /** \brief Attach cloned tail to the buffer
907 @param vm - (vlib_main_t *) vlib main data structure pointer
908 @param head - (vlib_buffer_t *) head buffer
909 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
913 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
914 vlib_buffer_t * tail)
916 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
917 ASSERT (vlib_buffer_get_free_list_index (head) ==
918 vlib_buffer_get_free_list_index (tail));
920 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
921 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
922 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
923 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
924 head->next_buffer = vlib_get_buffer_index (vm, tail);
925 head->total_length_not_including_first_buffer = tail->current_length +
926 tail->total_length_not_including_first_buffer;
929 clib_atomic_add_fetch (&tail->n_add_refs, 1);
931 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
933 tail = vlib_get_buffer (vm, tail->next_buffer);
938 /* Initializes the buffer as an empty packet with no chained buffers. */
940 vlib_buffer_chain_init (vlib_buffer_t * first)
942 first->total_length_not_including_first_buffer = 0;
943 first->current_length = 0;
944 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
945 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
948 /* The provided next_bi buffer index is appended to the end of the packet. */
949 always_inline vlib_buffer_t *
950 vlib_buffer_chain_buffer (vlib_main_t * vm,
951 vlib_buffer_t * first,
952 vlib_buffer_t * last, u32 next_bi)
954 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
955 last->next_buffer = next_bi;
956 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
957 next_buffer->current_length = 0;
958 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
962 /* Increases or decreases the packet length.
963 * It does not allocate or deallocate new buffers.
964 * Therefore, the added length must be compatible
965 * with the last buffer. */
967 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
968 vlib_buffer_t * last, i32 len)
970 last->current_length += len;
972 first->total_length_not_including_first_buffer += len;
975 /* Copy data to the end of the packet and increases its length.
976 * It does not allocate new buffers.
977 * Returns the number of copied bytes. */
979 vlib_buffer_chain_append_data (vlib_main_t * vm,
980 vlib_buffer_free_list_index_t free_list_index,
981 vlib_buffer_t * first,
982 vlib_buffer_t * last, void *data, u16 data_len)
985 vlib_buffer_free_list_buffer_size (vm, free_list_index);
986 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
987 u16 len = clib_min (data_len,
988 n_buffer_bytes - last->current_length -
990 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
992 vlib_buffer_chain_increase_length (first, last, len);
996 /* Copy data to the end of the packet and increases its length.
997 * Allocates additional buffers from the free list if necessary.
998 * Returns the number of copied bytes.
999 * 'last' value is modified whenever new buffers are allocated and
1000 * chained and points to the last buffer in the chain. */
1002 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
1003 vlib_buffer_free_list_index_t
1005 vlib_buffer_t * first,
1006 vlib_buffer_t ** last, void *data,
1008 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1010 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1011 format_vlib_buffer_contents;
1015 /* Vector of packet data. */
1018 /* Number of buffers to allocate in each call to allocator. */
1019 u32 min_n_buffers_each_alloc;
1021 /* Buffer free list for this template. */
1022 vlib_buffer_free_list_index_t free_list_index;
1025 } vlib_packet_template_t;
1027 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
1028 vlib_packet_template_t * t);
1030 void vlib_packet_template_init (vlib_main_t * vm,
1031 vlib_packet_template_t * t,
1033 uword n_packet_data_bytes,
1034 uword min_n_buffers_each_alloc,
1037 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1038 vlib_packet_template_t * t,
1042 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1044 vec_free (t->packet_data);
1048 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
1050 serialize_stream_t *s = &m->stream;
1051 vlib_serialize_buffer_main_t *sm
1052 = uword_to_pointer (m->stream.data_function_opaque,
1053 vlib_serialize_buffer_main_t *);
1054 vlib_main_t *vm = sm->vlib_main;
1057 n = s->n_buffer_bytes - s->current_buffer_index;
1058 if (sm->last_buffer != ~0)
1060 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
1061 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1063 b = vlib_get_buffer (vm, b->next_buffer);
1064 n += b->current_length;
1069 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
1070 n += vlib_buffer_index_length_in_chain (vm, f[0]);
1077 /* Set a buffer quickly into "uninitialized" state. We want this to
1078 be extremely cheap and arrange for all fields that need to be
1079 initialized to be in the first 128 bits of the buffer. */
1081 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1082 vlib_buffer_free_list_t * fl)
1084 vlib_buffer_t *src = &fl->buffer_init_template;
1086 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1087 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1088 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1089 CLIB_CACHE_LINE_BYTES);
1090 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1091 CLIB_CACHE_LINE_BYTES * 2);
1093 /* Make sure buffer template is sane. */
1094 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
1096 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
1097 STRUCT_MARK_PTR (src, template_start),
1098 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1099 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1101 /* Not in the first 16 octets. */
1102 dst->n_add_refs = src->n_add_refs;
1103 vlib_buffer_set_free_list_index (dst, fl->index);
1105 /* Make sure it really worked. */
1106 #define _(f) ASSERT (dst->f == src->f);
1111 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1112 /* total_length_not_including_first_buffer is not in the template anymore
1113 * so it may actually not zeroed for some buffers. One option is to
1114 * uncomment the line lower (comes at a cost), the other, is to just not
1116 /* dst->total_length_not_including_first_buffer = 0; */
1117 ASSERT (dst->n_add_refs == 0);
1121 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1122 vlib_buffer_free_list_t * f,
1123 u32 buffer_index, u8 do_init)
1125 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1127 b = vlib_get_buffer (vm, buffer_index);
1128 if (PREDICT_TRUE (do_init))
1129 vlib_buffer_init_for_free_list (b, f);
1130 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1132 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1134 clib_spinlock_lock (&bp->lock);
1135 /* keep last stored buffers, as they are more likely hot in the cache */
1136 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1137 CLIB_CACHE_LINE_BYTES);
1138 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1139 f->n_alloc -= VLIB_FRAME_SIZE;
1140 clib_spinlock_unlock (&bp->lock);
1145 extern u32 *vlib_buffer_state_validation_lock;
1146 extern uword *vlib_buffer_state_validation_hash;
1147 extern void *vlib_buffer_state_heap;
1151 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1157 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1159 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1162 p = hash_get (vlib_buffer_state_validation_hash, b);
1164 /* If we don't know about b, declare it to be in the expected state */
1167 hash_set (vlib_buffer_state_validation_hash, b, expected);
1171 if (p[0] != expected)
1173 void cj_stop (void);
1175 vlib_main_t *vm = &vlib_global_main;
1179 bi = vlib_get_buffer_index (vm, b);
1181 clib_mem_set_heap (oldheap);
1182 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1183 vlib_time_now (vm), bi,
1184 p[0] ? "busy" : "free", expected ? "busy" : "free");
1188 CLIB_MEMORY_BARRIER ();
1189 *vlib_buffer_state_validation_lock = 0;
1190 clib_mem_set_heap (oldheap);
1195 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1200 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1202 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1205 hash_set (vlib_buffer_state_validation_hash, b, expected);
1207 CLIB_MEMORY_BARRIER ();
1208 *vlib_buffer_state_validation_lock = 0;
1209 clib_mem_set_heap (oldheap);
1213 /** minimum data size of first buffer in a buffer chain */
1214 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1217 * @brief compress buffer chain in a way where the first buffer is at least
1218 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1220 * @param[in] vm - vlib_main
1221 * @param[in,out] first - first buffer in chain
1222 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1226 vlib_buffer_chain_compress (vlib_main_t * vm,
1227 vlib_buffer_t * first, u32 ** discard_vector)
1229 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1230 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1232 /* this is already big enough or not a chain */
1235 /* probe free list to find allocated buffer size to avoid overfill */
1236 vlib_buffer_free_list_index_t index;
1237 vlib_buffer_free_list_t *free_list =
1238 vlib_buffer_get_buffer_free_list (vm, first, &index);
1240 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1241 free_list->n_data_bytes -
1242 first->current_data);
1245 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1246 u32 need = want_first_size - first->current_length;
1247 u32 amount_to_copy = clib_min (need, second->current_length);
1248 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1249 first->current_length,
1250 vlib_buffer_get_current (second), amount_to_copy);
1251 first->current_length += amount_to_copy;
1252 vlib_buffer_advance (second, amount_to_copy);
1253 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1255 first->total_length_not_including_first_buffer -= amount_to_copy;
1257 if (!second->current_length)
1259 vec_add1 (*discard_vector, first->next_buffer);
1260 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1262 first->next_buffer = second->next_buffer;
1266 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1268 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1271 while ((first->current_length < want_first_size) &&
1272 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1275 #endif /* included_vlib_buffer_funcs_h */
1278 * fd.io coding-style-patch-verification: ON
1281 * eval: (c-set-style "gnu")