2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = &buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 /** \brief Translate array of buffer indices into buffer pointers with offset
69 @param vm - (vlib_main_t *) vlib main data structure pointer
70 @param bi - (u32 *) array of buffer indices
71 @param b - (void **) array to store buffer pointers
72 @param count - (uword) number of elements
73 @param offset - (i32) offset applied to each pointer
75 static_always_inline void
76 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
79 #ifdef CLIB_HAVE_VEC256
80 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
81 /* if count is not const, compiler will not unroll while loop
82 se we maintain two-in-parallel variant */
85 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
86 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
87 /* shift and add to get vlib_buffer_t pointer */
88 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
89 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
97 #ifdef CLIB_HAVE_VEC256
98 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
99 /* shift and add to get vlib_buffer_t pointer */
100 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 #elif defined (CLIB_HAVE_VEC128)
102 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
103 u32x4 bi4 = u32x4_load_unaligned (bi);
104 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
105 #if defined (__aarch64__)
106 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
108 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
109 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
111 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
112 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
114 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
115 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
116 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
117 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
125 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
132 /** \brief Translate array of buffer indices into buffer pointers
134 @param vm - (vlib_main_t *) vlib main data structure pointer
135 @param bi - (u32 *) array of buffer indices
136 @param b - (vlib_buffer_t **) array to store buffer pointers
137 @param count - (uword) number of elements
140 static_always_inline void
141 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
143 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
146 /** \brief Translate buffer pointer into buffer index
148 @param vm - (vlib_main_t *) vlib main data structure pointer
149 @param p - (void *) buffer pointer
150 @return - (u32) buffer index
154 vlib_get_buffer_index (vlib_main_t * vm, void *p)
156 vlib_buffer_main_t *bm = &buffer_main;
157 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
158 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
159 ASSERT (offset < bm->buffer_mem_size);
160 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
161 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
164 /** \brief Translate array of buffer pointers into buffer indices with offset
166 @param vm - (vlib_main_t *) vlib main data structure pointer
167 @param b - (void **) array of buffer pointers
168 @param bi - (u32 *) array to store buffer indices
169 @param count - (uword) number of elements
170 @param offset - (i32) offset applied to each pointer
172 static_always_inline void
173 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
174 uword count, i32 offset)
176 #ifdef CLIB_HAVE_VEC256
177 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
178 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
182 /* load 4 pointers into 256-bit register */
183 u64x4 v0 = u64x4_load_unaligned (b);
184 u64x4 v1 = u64x4_load_unaligned (b + 4);
190 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
191 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
193 /* permute 256-bit register so lower u32s of each buffer index are
194 * placed into lower 128-bits */
195 v2 = u32x8_permute ((u32x8) v0, mask);
196 v3 = u32x8_permute ((u32x8) v1, mask);
198 /* extract lower 128-bits and save them to the array of buffer indices */
199 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
200 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
208 /* equivalent non-nector implementation */
209 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
210 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
211 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
212 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
219 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
226 /** \brief Translate array of buffer pointers into buffer indices
228 @param vm - (vlib_main_t *) vlib main data structure pointer
229 @param b - (vlib_buffer_t **) array of buffer pointers
230 @param bi - (u32 *) array to store buffer indices
231 @param count - (uword) number of elements
233 static_always_inline void
234 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
237 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
240 /** \brief Get next buffer in buffer linklist, or zero for end of list.
242 @param vm - (vlib_main_t *) vlib main data structure pointer
243 @param b - (void *) buffer pointer
244 @return - (vlib_buffer_t *) next buffer, or NULL
246 always_inline vlib_buffer_t *
247 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
249 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
250 ? vlib_get_buffer (vm, b->next_buffer) : 0);
253 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
254 vlib_buffer_t * b_first);
256 /** \brief Get length in bytes of the buffer chain
258 @param vm - (vlib_main_t *) vlib main data structure pointer
259 @param b - (void *) buffer pointer
260 @return - (uword) length of buffer chain
263 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
265 uword len = b->current_length;
267 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
270 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
271 return len + b->total_length_not_including_first_buffer;
273 return vlib_buffer_length_in_chain_slow_path (vm, b);
276 /** \brief Get length in bytes of the buffer index buffer chain
278 @param vm - (vlib_main_t *) vlib main data structure pointer
279 @param bi - (u32) buffer index
280 @return - (uword) length of buffer chain
283 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
285 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
286 return vlib_buffer_length_in_chain (vm, b);
289 /** \brief Copy buffer contents to memory
291 @param vm - (vlib_main_t *) vlib main data structure pointer
292 @param buffer_index - (u32) buffer index
293 @param contents - (u8 *) memory, <strong>must be large enough</strong>
294 @return - (uword) length of buffer chain
297 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
299 uword content_len = 0;
305 b = vlib_get_buffer (vm, buffer_index);
306 l = b->current_length;
307 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
309 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
311 buffer_index = b->next_buffer;
318 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
320 return vlib_physmem_get_pa (vm, b->data);
324 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
326 return vlib_buffer_get_pa (vm, b) + b->current_data;
329 /** \brief Prefetch buffer metadata by buffer index
330 The first 64 bytes of buffer contains most header information
332 @param vm - (vlib_main_t *) vlib main data structure pointer
333 @param bi - (u32) buffer index
334 @param type - LOAD, STORE. In most cases, STORE is the right answer
336 /* Prefetch buffer header given index. */
337 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
339 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
340 vlib_prefetch_buffer_header (_b, type); \
344 /* Iterate over known allocated vlib bufs. You probably do not want
346 @param vm the vlib_main_t
347 @param bi found allocated buffer index
348 @param body operation to perform on buffer index
349 function executes body for each allocated buffer index
351 #define vlib_buffer_foreach_allocated(vm,bi,body) \
353 vlib_main_t * _vmain = (vm); \
354 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
355 hash_pair_t * _vbpair; \
356 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
357 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
358 (bi) = _vbpair->key; \
367 /* Index is unknown. */
370 /* Index is known and free/allocated. */
371 VLIB_BUFFER_KNOWN_FREE,
372 VLIB_BUFFER_KNOWN_ALLOCATED,
373 } vlib_buffer_known_state_t;
375 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
377 vlib_buffer_known_state_t
380 always_inline vlib_buffer_known_state_t
381 vlib_buffer_is_known (u32 buffer_index)
383 vlib_buffer_main_t *bm = &buffer_main;
385 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
386 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
387 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
388 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
392 vlib_buffer_set_known_state (u32 buffer_index,
393 vlib_buffer_known_state_t state)
395 vlib_buffer_main_t *bm = &buffer_main;
397 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
398 hash_set (bm->buffer_known_hash, buffer_index, state);
399 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
402 /* Validates sanity of a single buffer.
403 Returns format'ed vector with error message if any. */
404 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
408 vlib_buffer_round_size (u32 size)
410 return round_pow2 (size, sizeof (vlib_buffer_t));
413 always_inline vlib_buffer_free_list_index_t
414 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
416 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
417 return b->free_list_index;
423 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
424 vlib_buffer_free_list_index_t index)
426 if (PREDICT_FALSE (index))
428 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
429 b->free_list_index = index;
432 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
435 /** \brief Allocate buffers from specific freelist into supplied array
437 @param vm - (vlib_main_t *) vlib main data structure pointer
438 @param buffers - (u32 * ) buffer index array
439 @param n_buffers - (u32) number of buffers requested
440 @return - (u32) number of buffers actually allocated, may be
441 less than the number requested or zero
444 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
447 vlib_buffer_free_list_index_t index)
449 vlib_buffer_main_t *bm = &buffer_main;
450 vlib_buffer_free_list_t *fl;
454 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
456 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
458 len = vec_len (fl->buffers);
460 if (PREDICT_FALSE (len < n_buffers))
462 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
463 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
466 /* even if fill free list didn't manage to refill free list
467 we should give what we have */
468 n_buffers = clib_min (len, n_buffers);
470 /* following code is intentionaly duplicated to allow compiler
471 to optimize fast path when n_buffers is constant value */
472 src = fl->buffers + len - n_buffers;
473 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
474 _vec_len (fl->buffers) -= n_buffers;
476 /* Verify that buffers are known free. */
477 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
478 VLIB_BUFFER_KNOWN_FREE);
483 src = fl->buffers + len - n_buffers;
484 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
485 _vec_len (fl->buffers) -= n_buffers;
487 /* Verify that buffers are known free. */
488 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
489 VLIB_BUFFER_KNOWN_FREE);
494 /** \brief Allocate buffers into supplied array
496 @param vm - (vlib_main_t *) vlib main data structure pointer
497 @param buffers - (u32 * ) buffer index array
498 @param n_buffers - (u32) number of buffers requested
499 @return - (u32) number of buffers actually allocated, may be
500 less than the number requested or zero
503 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
505 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
506 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
509 /** \brief Allocate buffers into ring
511 @param vm - (vlib_main_t *) vlib main data structure pointer
512 @param buffers - (u32 * ) buffer index ring
513 @param start - (u32) first slot in the ring
514 @param ring_size - (u32) ring size
515 @param n_buffers - (u32) number of buffers requested
516 @return - (u32) number of buffers actually allocated, may be
517 less than the number requested or zero
520 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
521 u32 ring_size, u32 n_buffers)
525 ASSERT (n_buffers <= ring_size);
527 if (PREDICT_TRUE (start + n_buffers <= ring_size))
528 return vlib_buffer_alloc (vm, ring + start, n_buffers);
530 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
532 if (PREDICT_TRUE (n_alloc == ring_size - start))
533 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
538 /** \brief Free buffers
539 Frees the entire buffer chain for each buffer
541 @param vm - (vlib_main_t *) vlib main data structure pointer
542 @param buffers - (u32 * ) buffer index array
543 @param n_buffers - (u32) number of buffers to free
547 vlib_buffer_free (vlib_main_t * vm,
548 /* pointer to first buffer */
550 /* number of buffers to free */
553 vlib_buffer_main_t *bm = &buffer_main;
555 ASSERT (bm->cb.vlib_buffer_free_cb);
557 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
560 /** \brief Free buffers, does not free the buffer chain for each buffer
562 @param vm - (vlib_main_t *) vlib main data structure pointer
563 @param buffers - (u32 * ) buffer index array
564 @param n_buffers - (u32) number of buffers to free
568 vlib_buffer_free_no_next (vlib_main_t * vm,
569 /* pointer to first buffer */
571 /* number of buffers to free */
574 vlib_buffer_main_t *bm = &buffer_main;
576 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
578 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
581 /** \brief Free one buffer
582 Shorthand to free a single buffer chain.
584 @param vm - (vlib_main_t *) vlib main data structure pointer
585 @param buffer_index - (u32) buffer index to free
588 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
590 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
593 /** \brief Free buffers from ring
595 @param vm - (vlib_main_t *) vlib main data structure pointer
596 @param buffers - (u32 * ) buffer index ring
597 @param start - (u32) first slot in the ring
598 @param ring_size - (u32) ring size
599 @param n_buffers - (u32) number of buffers
602 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
603 u32 ring_size, u32 n_buffers)
605 ASSERT (n_buffers <= ring_size);
607 if (PREDICT_TRUE (start + n_buffers <= ring_size))
609 vlib_buffer_free (vm, ring + start, n_buffers);
613 vlib_buffer_free (vm, ring + start, ring_size - start);
614 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
618 /** \brief Free buffers from ring without freeing tail buffers
620 @param vm - (vlib_main_t *) vlib main data structure pointer
621 @param buffers - (u32 * ) buffer index ring
622 @param start - (u32) first slot in the ring
623 @param ring_size - (u32) ring size
624 @param n_buffers - (u32) number of buffers
627 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
628 u32 ring_size, u32 n_buffers)
630 ASSERT (n_buffers <= ring_size);
632 if (PREDICT_TRUE (start + n_buffers <= ring_size))
634 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
638 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
639 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
643 /* Add/delete buffer free lists. */
644 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
648 vlib_buffer_delete_free_list (vlib_main_t * vm,
649 vlib_buffer_free_list_index_t free_list_index)
651 vlib_buffer_main_t *bm = &buffer_main;
653 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
655 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
658 /* Make sure we have at least given number of unaligned buffers. */
659 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
660 vlib_buffer_free_list_t *
662 uword n_unaligned_buffers);
664 always_inline vlib_buffer_free_list_t *
665 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
666 vlib_buffer_free_list_index_t * index)
668 vlib_buffer_free_list_index_t i;
670 *index = i = vlib_buffer_get_free_list_index (b);
671 return pool_elt_at_index (vm->buffer_free_list_pool, i);
674 always_inline vlib_buffer_free_list_t *
675 vlib_buffer_get_free_list (vlib_main_t * vm,
676 vlib_buffer_free_list_index_t free_list_index)
678 vlib_buffer_free_list_t *f;
680 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
682 /* Sanity: indices must match. */
683 ASSERT (f->index == free_list_index);
689 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
690 vlib_buffer_free_list_index_t index)
692 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
693 return f->n_data_bytes;
696 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
698 /* Reasonably fast buffer copy routine. */
700 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
721 /* Append given data to end of buffer, possibly allocating new buffers. */
722 u32 vlib_buffer_add_data (vlib_main_t * vm,
723 vlib_buffer_free_list_index_t free_list_index,
724 u32 buffer_index, void *data, u32 n_data_bytes);
726 /* duplicate all buffers in chain */
727 always_inline vlib_buffer_t *
728 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
730 vlib_buffer_t *s, *d, *fd;
731 uword n_alloc, n_buffers = 1;
732 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
736 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
739 s = vlib_get_buffer (vm, s->next_buffer);
741 u32 new_buffers[n_buffers];
743 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
745 /* No guarantee that we'll get all the buffers we asked for */
746 if (PREDICT_FALSE (n_alloc < n_buffers))
749 vlib_buffer_free (vm, new_buffers, n_alloc);
755 fd = d = vlib_get_buffer (vm, new_buffers[0]);
756 d->current_data = s->current_data;
757 d->current_length = s->current_length;
758 d->flags = s->flags & flag_mask;
759 d->total_length_not_including_first_buffer =
760 s->total_length_not_including_first_buffer;
761 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
762 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
763 clib_memcpy_fast (vlib_buffer_get_current (d),
764 vlib_buffer_get_current (s), s->current_length);
767 for (i = 1; i < n_buffers; i++)
770 d->next_buffer = new_buffers[i];
772 s = vlib_get_buffer (vm, s->next_buffer);
773 d = vlib_get_buffer (vm, new_buffers[i]);
774 d->current_data = s->current_data;
775 d->current_length = s->current_length;
776 clib_memcpy_fast (vlib_buffer_get_current (d),
777 vlib_buffer_get_current (s), s->current_length);
778 d->flags = s->flags & flag_mask;
784 /** \brief Create a maximum of 256 clones of buffer and store them
785 in the supplied array
787 @param vm - (vlib_main_t *) vlib main data structure pointer
788 @param src_buffer - (u32) source buffer index
789 @param buffers - (u32 * ) buffer index array
790 @param n_buffers - (u16) number of buffer clones requested (<=256)
791 @param head_end_offset - (u16) offset relative to current position
792 where packet head ends
793 @return - (u16) number of buffers actually cloned, may be
794 less than the number requested or zero
797 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
798 u16 n_buffers, u16 head_end_offset)
801 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
803 ASSERT (s->n_add_refs == 0);
805 ASSERT (n_buffers <= 256);
807 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
809 buffers[0] = src_buffer;
810 for (i = 1; i < n_buffers; i++)
813 d = vlib_buffer_copy (vm, s);
816 buffers[i] = vlib_get_buffer_index (vm, d);
822 if (PREDICT_FALSE (n_buffers == 1))
824 buffers[0] = src_buffer;
828 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
829 vlib_buffer_get_free_list_index
832 for (i = 0; i < n_buffers; i++)
834 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
835 d->current_data = s->current_data;
836 d->current_length = head_end_offset;
837 vlib_buffer_set_free_list_index (d,
838 vlib_buffer_get_free_list_index (s));
840 d->total_length_not_including_first_buffer = s->current_length -
842 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
844 d->total_length_not_including_first_buffer +=
845 s->total_length_not_including_first_buffer;
847 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
848 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
849 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
850 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
851 clib_memcpy_fast (vlib_buffer_get_current (d),
852 vlib_buffer_get_current (s), head_end_offset);
853 d->next_buffer = src_buffer;
855 vlib_buffer_advance (s, head_end_offset);
856 s->n_add_refs = n_buffers - 1;
857 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
859 s = vlib_get_buffer (vm, s->next_buffer);
860 s->n_add_refs = n_buffers - 1;
866 /** \brief Create multiple clones of buffer and store them
867 in the supplied array
869 @param vm - (vlib_main_t *) vlib main data structure pointer
870 @param src_buffer - (u32) source buffer index
871 @param buffers - (u32 * ) buffer index array
872 @param n_buffers - (u16) number of buffer clones requested (<=256)
873 @param head_end_offset - (u16) offset relative to current position
874 where packet head ends
875 @return - (u16) number of buffers actually cloned, may be
876 less than the number requested or zero
879 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
880 u16 n_buffers, u16 head_end_offset)
882 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
885 while (n_buffers > 256)
888 copy = vlib_buffer_copy (vm, s);
889 n_cloned += vlib_buffer_clone_256 (vm,
890 vlib_get_buffer_index (vm, copy),
891 (buffers + n_cloned),
892 256, head_end_offset);
895 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
897 n_buffers, head_end_offset);
902 /** \brief Attach cloned tail to the buffer
904 @param vm - (vlib_main_t *) vlib main data structure pointer
905 @param head - (vlib_buffer_t *) head buffer
906 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
910 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
911 vlib_buffer_t * tail)
913 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
914 ASSERT (vlib_buffer_get_free_list_index (head) ==
915 vlib_buffer_get_free_list_index (tail));
917 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
918 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
919 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
920 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
921 head->next_buffer = vlib_get_buffer_index (vm, tail);
922 head->total_length_not_including_first_buffer = tail->current_length +
923 tail->total_length_not_including_first_buffer;
926 clib_atomic_add_fetch (&tail->n_add_refs, 1);
928 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
930 tail = vlib_get_buffer (vm, tail->next_buffer);
935 /* Initializes the buffer as an empty packet with no chained buffers. */
937 vlib_buffer_chain_init (vlib_buffer_t * first)
939 first->total_length_not_including_first_buffer = 0;
940 first->current_length = 0;
941 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
942 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
945 /* The provided next_bi buffer index is appended to the end of the packet. */
946 always_inline vlib_buffer_t *
947 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
949 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
950 last->next_buffer = next_bi;
951 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
952 next_buffer->current_length = 0;
953 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
957 /* Increases or decreases the packet length.
958 * It does not allocate or deallocate new buffers.
959 * Therefore, the added length must be compatible
960 * with the last buffer. */
962 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
963 vlib_buffer_t * last, i32 len)
965 last->current_length += len;
967 first->total_length_not_including_first_buffer += len;
970 /* Copy data to the end of the packet and increases its length.
971 * It does not allocate new buffers.
972 * Returns the number of copied bytes. */
974 vlib_buffer_chain_append_data (vlib_main_t * vm,
975 vlib_buffer_free_list_index_t free_list_index,
976 vlib_buffer_t * first,
977 vlib_buffer_t * last, void *data, u16 data_len)
980 vlib_buffer_free_list_buffer_size (vm, free_list_index);
981 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
982 u16 len = clib_min (data_len,
983 n_buffer_bytes - last->current_length -
985 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
987 vlib_buffer_chain_increase_length (first, last, len);
991 /* Copy data to the end of the packet and increases its length.
992 * Allocates additional buffers from the free list if necessary.
993 * Returns the number of copied bytes.
994 * 'last' value is modified whenever new buffers are allocated and
995 * chained and points to the last buffer in the chain. */
997 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
998 vlib_buffer_free_list_index_t
1000 vlib_buffer_t * first,
1001 vlib_buffer_t ** last, void *data,
1003 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1005 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1006 format_vlib_buffer_contents;
1010 /* Vector of packet data. */
1013 /* Number of buffers to allocate in each call to allocator. */
1014 u32 min_n_buffers_each_alloc;
1016 /* Buffer free list for this template. */
1017 vlib_buffer_free_list_index_t free_list_index;
1020 } vlib_packet_template_t;
1022 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
1023 vlib_packet_template_t * t);
1025 void vlib_packet_template_init (vlib_main_t * vm,
1026 vlib_packet_template_t * t,
1028 uword n_packet_data_bytes,
1029 uword min_n_buffers_each_alloc,
1032 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1033 vlib_packet_template_t * t,
1037 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1039 vec_free (t->packet_data);
1043 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
1045 serialize_stream_t *s = &m->stream;
1046 vlib_serialize_buffer_main_t *sm
1047 = uword_to_pointer (m->stream.data_function_opaque,
1048 vlib_serialize_buffer_main_t *);
1049 vlib_main_t *vm = sm->vlib_main;
1052 n = s->n_buffer_bytes - s->current_buffer_index;
1053 if (sm->last_buffer != ~0)
1055 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
1056 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1058 b = vlib_get_buffer (vm, b->next_buffer);
1059 n += b->current_length;
1064 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
1065 n += vlib_buffer_index_length_in_chain (vm, f[0]);
1072 /* Set a buffer quickly into "uninitialized" state. We want this to
1073 be extremely cheap and arrange for all fields that need to be
1074 initialized to be in the first 128 bits of the buffer. */
1076 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1077 vlib_buffer_free_list_t * fl)
1079 vlib_buffer_t *src = &fl->buffer_init_template;
1081 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1082 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1083 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1084 CLIB_CACHE_LINE_BYTES);
1085 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1086 CLIB_CACHE_LINE_BYTES * 2);
1088 /* Make sure buffer template is sane. */
1089 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
1091 clib_memcpy_fast (STRUCT_MARK_PTR (dst, template_start),
1092 STRUCT_MARK_PTR (src, template_start),
1093 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1094 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1096 /* Not in the first 16 octets. */
1097 dst->n_add_refs = src->n_add_refs;
1098 vlib_buffer_set_free_list_index (dst, fl->index);
1100 /* Make sure it really worked. */
1101 #define _(f) ASSERT (dst->f == src->f);
1106 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1107 /* total_length_not_including_first_buffer is not in the template anymore
1108 * so it may actually not zeroed for some buffers. One option is to
1109 * uncomment the line lower (comes at a cost), the other, is to just not
1111 /* dst->total_length_not_including_first_buffer = 0; */
1112 ASSERT (dst->n_add_refs == 0);
1116 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1117 vlib_buffer_free_list_t * f,
1118 u32 buffer_index, u8 do_init)
1120 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1122 b = vlib_get_buffer (vm, buffer_index);
1123 if (PREDICT_TRUE (do_init))
1124 vlib_buffer_init_for_free_list (b, f);
1125 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1127 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1129 clib_spinlock_lock (&bp->lock);
1130 /* keep last stored buffers, as they are more likely hot in the cache */
1131 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1132 CLIB_CACHE_LINE_BYTES);
1133 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1134 f->n_alloc -= VLIB_FRAME_SIZE;
1135 clib_spinlock_unlock (&bp->lock);
1140 extern u32 *vlib_buffer_state_validation_lock;
1141 extern uword *vlib_buffer_state_validation_hash;
1142 extern void *vlib_buffer_state_heap;
1146 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1152 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1154 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1157 p = hash_get (vlib_buffer_state_validation_hash, b);
1159 /* If we don't know about b, declare it to be in the expected state */
1162 hash_set (vlib_buffer_state_validation_hash, b, expected);
1166 if (p[0] != expected)
1168 void cj_stop (void);
1170 vlib_main_t *vm = &vlib_global_main;
1174 bi = vlib_get_buffer_index (vm, b);
1176 clib_mem_set_heap (oldheap);
1177 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1178 vlib_time_now (vm), bi,
1179 p[0] ? "busy" : "free", expected ? "busy" : "free");
1183 CLIB_MEMORY_BARRIER ();
1184 *vlib_buffer_state_validation_lock = 0;
1185 clib_mem_set_heap (oldheap);
1190 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1195 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1197 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1200 hash_set (vlib_buffer_state_validation_hash, b, expected);
1202 CLIB_MEMORY_BARRIER ();
1203 *vlib_buffer_state_validation_lock = 0;
1204 clib_mem_set_heap (oldheap);
1208 /** minimum data size of first buffer in a buffer chain */
1209 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1212 * @brief compress buffer chain in a way where the first buffer is at least
1213 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1215 * @param[in] vm - vlib_main
1216 * @param[in,out] first - first buffer in chain
1217 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1221 vlib_buffer_chain_compress (vlib_main_t * vm,
1222 vlib_buffer_t * first, u32 ** discard_vector)
1224 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1225 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1227 /* this is already big enough or not a chain */
1230 /* probe free list to find allocated buffer size to avoid overfill */
1231 vlib_buffer_free_list_index_t index;
1232 vlib_buffer_free_list_t *free_list =
1233 vlib_buffer_get_buffer_free_list (vm, first, &index);
1235 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1236 free_list->n_data_bytes -
1237 first->current_data);
1240 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1241 u32 need = want_first_size - first->current_length;
1242 u32 amount_to_copy = clib_min (need, second->current_length);
1243 clib_memcpy_fast (((u8 *) vlib_buffer_get_current (first)) +
1244 first->current_length,
1245 vlib_buffer_get_current (second), amount_to_copy);
1246 first->current_length += amount_to_copy;
1247 vlib_buffer_advance (second, amount_to_copy);
1248 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1250 first->total_length_not_including_first_buffer -= amount_to_copy;
1252 if (!second->current_length)
1254 vec_add1 (*discard_vector, first->next_buffer);
1255 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1257 first->next_buffer = second->next_buffer;
1261 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1263 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1266 while ((first->current_length < want_first_size) &&
1267 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1271 * @brief linearize buffer chain - the first buffer is filled, if needed,
1272 * buffers are allocated and filled, returns free space in last buffer or
1273 * negative on failure
1275 * @param[in] vm - vlib_main
1276 * @param[in,out] first - first buffer in chain
1279 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * first)
1281 vlib_buffer_t *b = first;
1282 vlib_buffer_free_list_t *fl =
1283 vlib_buffer_get_free_list (vm, vlib_buffer_get_free_list_index (b));
1284 u32 buf_len = fl->n_data_bytes;
1285 // free buffer chain starting from the second buffer
1286 int free_count = (b->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1287 u32 chain_to_free = b->next_buffer;
1289 u32 len = vlib_buffer_length_in_chain (vm, b);
1290 u32 free_len = buf_len - b->current_data - b->current_length;
1291 int alloc_len = clib_max (len - free_len, 0); //use the free len in the first buffer
1292 int n_buffers = (alloc_len + buf_len - 1) / buf_len;
1293 u32 new_buffers[n_buffers];
1295 u32 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1296 if (n_alloc != n_buffers)
1298 vlib_buffer_free_no_next (vm, new_buffers, n_alloc);
1302 vlib_buffer_t *s = b;
1303 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1305 s = vlib_get_buffer (vm, s->next_buffer);
1306 int d_free_len = buf_len - b->current_data - b->current_length;
1307 ASSERT (d_free_len >= 0);
1308 // chain buf and split write
1309 u32 copy_len = clib_min (d_free_len, s->current_length);
1310 u8 *d = vlib_buffer_put_uninit (b, copy_len);
1311 clib_memcpy (d, vlib_buffer_get_current (s), copy_len);
1312 int rest = s->current_length - copy_len;
1316 ASSERT (vlib_buffer_get_tail (b) == b->data + buf_len);
1317 ASSERT (n_buffers > 0);
1318 b = vlib_buffer_chain_buffer (vm, b, new_buffers[--n_buffers]);
1319 //make full use of the new buffers
1320 b->current_data = 0;
1321 d = vlib_buffer_put_uninit (b, rest);
1322 clib_memcpy (d, vlib_buffer_get_current (s) + copy_len, rest);
1325 vlib_buffer_free (vm, &chain_to_free, free_count);
1326 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1327 if (b == first) /* no buffers addeed */
1328 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1329 ASSERT (len == vlib_buffer_length_in_chain (vm, first));
1330 ASSERT (n_buffers == 0);
1331 return buf_len - b->current_data - b->current_length;
1334 #endif /* included_vlib_buffer_funcs_h */
1337 * fd.io coding-style-patch-verification: ON
1340 * eval: (c-set-style "gnu")