2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = &buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate array of buffer indices into buffer pointers with offset
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param bi - (u32 *) array of buffer indices
70 @param b - (void **) array to store buffer pointers
71 @param count - (uword) number of elements
72 @param offset - (i32) offset applied to each pointer
74 static_always_inline void
75 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
78 #ifdef CLIB_HAVE_VEC256
79 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
80 /* if count is not const, compiler will not unroll while loop
81 se we maintain two-in-parallel variant */
84 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
85 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
86 /* shift and add to get vlib_buffer_t pointer */
87 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
88 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
96 #ifdef CLIB_HAVE_VEC256
97 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
98 /* shift and add to get vlib_buffer_t pointer */
99 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
100 #elif defined (CLIB_HAVE_VEC128) && defined (__x86_64__)
101 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
102 u32x4 bi4 = u32x4_load_unaligned (bi);
103 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
104 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
105 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
106 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
107 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
109 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
110 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
111 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
112 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
120 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
127 /** \brief Translate array of buffer indices into buffer pointers
129 @param vm - (vlib_main_t *) vlib main data structure pointer
130 @param bi - (u32 *) array of buffer indices
131 @param b - (vlib_buffer_t **) array to store buffer pointers
132 @param count - (uword) number of elements
135 static_always_inline void
136 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
138 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
141 /** \brief Translate buffer pointer into buffer index
143 @param vm - (vlib_main_t *) vlib main data structure pointer
144 @param p - (void *) buffer pointer
145 @return - (u32) buffer index
149 vlib_get_buffer_index (vlib_main_t * vm, void *p)
151 vlib_buffer_main_t *bm = &buffer_main;
152 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
153 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
154 ASSERT (offset < bm->buffer_mem_size);
155 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
156 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
159 /** \brief Translate array of buffer pointers into buffer indices with offset
161 @param vm - (vlib_main_t *) vlib main data structure pointer
162 @param b - (void **) array of buffer pointers
163 @param bi - (u32 *) array to store buffer indices
164 @param count - (uword) number of elements
165 @param offset - (i32) offset applied to each pointer
167 static_always_inline void
168 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
169 uword count, i32 offset)
171 #ifdef CLIB_HAVE_VEC256
172 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
173 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
177 /* load 4 pointers into 256-bit register */
178 u64x4 v0 = u64x4_load_unaligned (b);
179 u64x4 v1 = u64x4_load_unaligned (b + 4);
185 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
186 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
188 /* permute 256-bit register so lower u32s of each buffer index are
189 * placed into lower 128-bits */
190 v2 = u32x8_permute ((u32x8) v0, mask);
191 v3 = u32x8_permute ((u32x8) v1, mask);
193 /* extract lower 128-bits and save them to the array of buffer indices */
194 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
195 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
203 /* equivalent non-nector implementation */
204 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
205 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
206 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
207 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
214 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
221 /** \brief Translate array of buffer pointers into buffer indices
223 @param vm - (vlib_main_t *) vlib main data structure pointer
224 @param b - (vlib_buffer_t **) array of buffer pointers
225 @param bi - (u32 *) array to store buffer indices
226 @param count - (uword) number of elements
228 static_always_inline void
229 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
232 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
235 /** \brief Get next buffer in buffer linklist, or zero for end of list.
237 @param vm - (vlib_main_t *) vlib main data structure pointer
238 @param b - (void *) buffer pointer
239 @return - (vlib_buffer_t *) next buffer, or NULL
241 always_inline vlib_buffer_t *
242 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
244 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
245 ? vlib_get_buffer (vm, b->next_buffer) : 0);
248 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
249 vlib_buffer_t * b_first);
251 /** \brief Get length in bytes of the buffer chain
253 @param vm - (vlib_main_t *) vlib main data structure pointer
254 @param b - (void *) buffer pointer
255 @return - (uword) length of buffer chain
258 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
260 uword len = b->current_length;
262 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
265 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
266 return len + b->total_length_not_including_first_buffer;
268 return vlib_buffer_length_in_chain_slow_path (vm, b);
271 /** \brief Get length in bytes of the buffer index buffer chain
273 @param vm - (vlib_main_t *) vlib main data structure pointer
274 @param bi - (u32) buffer index
275 @return - (uword) length of buffer chain
278 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
280 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
281 return vlib_buffer_length_in_chain (vm, b);
284 /** \brief Copy buffer contents to memory
286 @param vm - (vlib_main_t *) vlib main data structure pointer
287 @param buffer_index - (u32) buffer index
288 @param contents - (u8 *) memory, <strong>must be large enough</strong>
289 @return - (uword) length of buffer chain
292 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
294 uword content_len = 0;
300 b = vlib_get_buffer (vm, buffer_index);
301 l = b->current_length;
302 clib_memcpy (contents + content_len, b->data + b->current_data, l);
304 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
306 buffer_index = b->next_buffer;
312 /* Return physical address of buffer->data start. */
314 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
316 vlib_buffer_main_t *bm = &buffer_main;
317 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
318 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
319 b->buffer_pool_index);
321 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
324 /** \brief Prefetch buffer metadata by buffer index
325 The first 64 bytes of buffer contains most header information
327 @param vm - (vlib_main_t *) vlib main data structure pointer
328 @param bi - (u32) buffer index
329 @param type - LOAD, STORE. In most cases, STORE is the right answer
331 /* Prefetch buffer header given index. */
332 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
334 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
335 vlib_prefetch_buffer_header (_b, type); \
339 /* Iterate over known allocated vlib bufs. You probably do not want
341 @param vm the vlib_main_t
342 @param bi found allocated buffer index
343 @param body operation to perform on buffer index
344 function executes body for each allocated buffer index
346 #define vlib_buffer_foreach_allocated(vm,bi,body) \
348 vlib_main_t * _vmain = (vm); \
349 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
350 hash_pair_t * _vbpair; \
351 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
352 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
353 (bi) = _vbpair->key; \
362 /* Index is unknown. */
365 /* Index is known and free/allocated. */
366 VLIB_BUFFER_KNOWN_FREE,
367 VLIB_BUFFER_KNOWN_ALLOCATED,
368 } vlib_buffer_known_state_t;
370 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
372 vlib_buffer_known_state_t
375 always_inline vlib_buffer_known_state_t
376 vlib_buffer_is_known (u32 buffer_index)
378 vlib_buffer_main_t *bm = &buffer_main;
380 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
381 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
382 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
383 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
387 vlib_buffer_set_known_state (u32 buffer_index,
388 vlib_buffer_known_state_t state)
390 vlib_buffer_main_t *bm = &buffer_main;
392 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
393 hash_set (bm->buffer_known_hash, buffer_index, state);
394 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
397 /* Validates sanity of a single buffer.
398 Returns format'ed vector with error message if any. */
399 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
403 vlib_buffer_round_size (u32 size)
405 return round_pow2 (size, sizeof (vlib_buffer_t));
408 always_inline vlib_buffer_free_list_index_t
409 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
411 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
412 return b->free_list_index;
418 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
419 vlib_buffer_free_list_index_t index)
421 if (PREDICT_FALSE (index))
423 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
424 b->free_list_index = index;
427 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
430 /** \brief Allocate buffers from specific freelist into supplied array
432 @param vm - (vlib_main_t *) vlib main data structure pointer
433 @param buffers - (u32 * ) buffer index array
434 @param n_buffers - (u32) number of buffers requested
435 @return - (u32) number of buffers actually allocated, may be
436 less than the number requested or zero
439 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
442 vlib_buffer_free_list_index_t index)
444 vlib_buffer_main_t *bm = &buffer_main;
445 vlib_buffer_free_list_t *fl;
449 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
451 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
453 len = vec_len (fl->buffers);
455 if (PREDICT_FALSE (len < n_buffers))
457 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
458 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
461 /* even if fill free list didn't manage to refill free list
462 we should give what we have */
463 n_buffers = clib_min (len, n_buffers);
465 /* following code is intentionaly duplicated to allow compiler
466 to optimize fast path when n_buffers is constant value */
467 src = fl->buffers + len - n_buffers;
468 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
469 _vec_len (fl->buffers) -= n_buffers;
471 /* Verify that buffers are known free. */
472 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
473 VLIB_BUFFER_KNOWN_FREE);
478 src = fl->buffers + len - n_buffers;
479 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
480 _vec_len (fl->buffers) -= n_buffers;
482 /* Verify that buffers are known free. */
483 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
484 VLIB_BUFFER_KNOWN_FREE);
489 /** \brief Allocate buffers into supplied array
491 @param vm - (vlib_main_t *) vlib main data structure pointer
492 @param buffers - (u32 * ) buffer index array
493 @param n_buffers - (u32) number of buffers requested
494 @return - (u32) number of buffers actually allocated, may be
495 less than the number requested or zero
498 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
500 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
501 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
504 /** \brief Allocate buffers into ring
506 @param vm - (vlib_main_t *) vlib main data structure pointer
507 @param buffers - (u32 * ) buffer index ring
508 @param start - (u32) first slot in the ring
509 @param ring_size - (u32) ring size
510 @param n_buffers - (u32) number of buffers requested
511 @return - (u32) number of buffers actually allocated, may be
512 less than the number requested or zero
515 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
516 u32 ring_size, u32 n_buffers)
520 ASSERT (n_buffers <= ring_size);
522 if (PREDICT_TRUE (start + n_buffers <= ring_size))
523 return vlib_buffer_alloc (vm, ring + start, n_buffers);
525 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
527 if (PREDICT_TRUE (n_alloc == ring_size - start))
528 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
533 /** \brief Free buffers
534 Frees the entire buffer chain for each buffer
536 @param vm - (vlib_main_t *) vlib main data structure pointer
537 @param buffers - (u32 * ) buffer index array
538 @param n_buffers - (u32) number of buffers to free
542 vlib_buffer_free (vlib_main_t * vm,
543 /* pointer to first buffer */
545 /* number of buffers to free */
548 vlib_buffer_main_t *bm = &buffer_main;
550 ASSERT (bm->cb.vlib_buffer_free_cb);
552 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
555 /** \brief Free buffers, does not free the buffer chain for each buffer
557 @param vm - (vlib_main_t *) vlib main data structure pointer
558 @param buffers - (u32 * ) buffer index array
559 @param n_buffers - (u32) number of buffers to free
563 vlib_buffer_free_no_next (vlib_main_t * vm,
564 /* pointer to first buffer */
566 /* number of buffers to free */
569 vlib_buffer_main_t *bm = &buffer_main;
571 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
573 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
576 /** \brief Free one buffer
577 Shorthand to free a single buffer chain.
579 @param vm - (vlib_main_t *) vlib main data structure pointer
580 @param buffer_index - (u32) buffer index to free
583 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
585 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
588 /** \brief Free buffers from ring
590 @param vm - (vlib_main_t *) vlib main data structure pointer
591 @param buffers - (u32 * ) buffer index ring
592 @param start - (u32) first slot in the ring
593 @param ring_size - (u32) ring size
594 @param n_buffers - (u32) number of buffers
597 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
598 u32 ring_size, u32 n_buffers)
600 ASSERT (n_buffers <= ring_size);
602 if (PREDICT_TRUE (start + n_buffers <= ring_size))
604 vlib_buffer_free (vm, ring + start, n_buffers);
608 vlib_buffer_free (vm, ring + start, ring_size - start);
609 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
613 /** \brief Free buffers from ring without freeing tail buffers
615 @param vm - (vlib_main_t *) vlib main data structure pointer
616 @param buffers - (u32 * ) buffer index ring
617 @param start - (u32) first slot in the ring
618 @param ring_size - (u32) ring size
619 @param n_buffers - (u32) number of buffers
622 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
623 u32 ring_size, u32 n_buffers)
625 ASSERT (n_buffers <= ring_size);
627 if (PREDICT_TRUE (start + n_buffers <= ring_size))
629 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
633 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
634 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
638 /* Add/delete buffer free lists. */
639 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
643 vlib_buffer_delete_free_list (vlib_main_t * vm,
644 vlib_buffer_free_list_index_t free_list_index)
646 vlib_buffer_main_t *bm = &buffer_main;
648 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
650 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
653 /* Make sure we have at least given number of unaligned buffers. */
654 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
655 vlib_buffer_free_list_t *
657 uword n_unaligned_buffers);
659 always_inline vlib_buffer_free_list_t *
660 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
661 vlib_buffer_free_list_index_t * index)
663 vlib_buffer_free_list_index_t i;
665 *index = i = vlib_buffer_get_free_list_index (b);
666 return pool_elt_at_index (vm->buffer_free_list_pool, i);
669 always_inline vlib_buffer_free_list_t *
670 vlib_buffer_get_free_list (vlib_main_t * vm,
671 vlib_buffer_free_list_index_t free_list_index)
673 vlib_buffer_free_list_t *f;
675 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
677 /* Sanity: indices must match. */
678 ASSERT (f->index == free_list_index);
684 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
685 vlib_buffer_free_list_index_t index)
687 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
688 return f->n_data_bytes;
691 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
693 /* Reasonably fast buffer copy routine. */
695 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
716 /* Append given data to end of buffer, possibly allocating new buffers. */
717 u32 vlib_buffer_add_data (vlib_main_t * vm,
718 vlib_buffer_free_list_index_t free_list_index,
719 u32 buffer_index, void *data, u32 n_data_bytes);
721 /* duplicate all buffers in chain */
722 always_inline vlib_buffer_t *
723 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
725 vlib_buffer_t *s, *d, *fd;
726 uword n_alloc, n_buffers = 1;
727 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
731 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
734 s = vlib_get_buffer (vm, s->next_buffer);
736 u32 new_buffers[n_buffers];
738 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
740 /* No guarantee that we'll get all the buffers we asked for */
741 if (PREDICT_FALSE (n_alloc < n_buffers))
744 vlib_buffer_free (vm, new_buffers, n_alloc);
750 fd = d = vlib_get_buffer (vm, new_buffers[0]);
751 d->current_data = s->current_data;
752 d->current_length = s->current_length;
753 d->flags = s->flags & flag_mask;
754 d->total_length_not_including_first_buffer =
755 s->total_length_not_including_first_buffer;
756 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
757 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
758 clib_memcpy (vlib_buffer_get_current (d),
759 vlib_buffer_get_current (s), s->current_length);
762 for (i = 1; i < n_buffers; i++)
765 d->next_buffer = new_buffers[i];
767 s = vlib_get_buffer (vm, s->next_buffer);
768 d = vlib_get_buffer (vm, new_buffers[i]);
769 d->current_data = s->current_data;
770 d->current_length = s->current_length;
771 clib_memcpy (vlib_buffer_get_current (d),
772 vlib_buffer_get_current (s), s->current_length);
773 d->flags = s->flags & flag_mask;
779 /** \brief Create a maximum of 256 clones of buffer and store them
780 in the supplied array
782 @param vm - (vlib_main_t *) vlib main data structure pointer
783 @param src_buffer - (u32) source buffer index
784 @param buffers - (u32 * ) buffer index array
785 @param n_buffers - (u16) number of buffer clones requested (<=256)
786 @param head_end_offset - (u16) offset relative to current position
787 where packet head ends
788 @return - (u16) number of buffers actually cloned, may be
789 less than the number requested or zero
792 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
793 u16 n_buffers, u16 head_end_offset)
796 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
798 ASSERT (s->n_add_refs == 0);
800 ASSERT (n_buffers <= 256);
802 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
804 buffers[0] = src_buffer;
805 for (i = 1; i < n_buffers; i++)
808 d = vlib_buffer_copy (vm, s);
811 buffers[i] = vlib_get_buffer_index (vm, d);
817 if (PREDICT_FALSE (n_buffers == 1))
819 buffers[0] = src_buffer;
823 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
824 vlib_buffer_get_free_list_index
827 for (i = 0; i < n_buffers; i++)
829 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
830 d->current_data = s->current_data;
831 d->current_length = head_end_offset;
832 vlib_buffer_set_free_list_index (d,
833 vlib_buffer_get_free_list_index (s));
835 d->total_length_not_including_first_buffer = s->current_length -
837 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
839 d->total_length_not_including_first_buffer +=
840 s->total_length_not_including_first_buffer;
842 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
843 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
844 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
845 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
846 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
848 d->next_buffer = src_buffer;
850 vlib_buffer_advance (s, head_end_offset);
851 s->n_add_refs = n_buffers - 1;
852 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
854 s = vlib_get_buffer (vm, s->next_buffer);
855 s->n_add_refs = n_buffers - 1;
861 /** \brief Create multiple clones of buffer and store them
862 in the supplied array
864 @param vm - (vlib_main_t *) vlib main data structure pointer
865 @param src_buffer - (u32) source buffer index
866 @param buffers - (u32 * ) buffer index array
867 @param n_buffers - (u16) number of buffer clones requested (<=256)
868 @param head_end_offset - (u16) offset relative to current position
869 where packet head ends
870 @return - (u16) number of buffers actually cloned, may be
871 less than the number requested or zero
874 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
875 u16 n_buffers, u16 head_end_offset)
877 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
880 while (n_buffers > 256)
883 copy = vlib_buffer_copy (vm, s);
884 n_cloned += vlib_buffer_clone_256 (vm,
885 vlib_get_buffer_index (vm, copy),
886 (buffers + n_cloned),
887 256, head_end_offset);
890 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
892 n_buffers, head_end_offset);
897 /** \brief Attach cloned tail to the buffer
899 @param vm - (vlib_main_t *) vlib main data structure pointer
900 @param head - (vlib_buffer_t *) head buffer
901 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
905 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
906 vlib_buffer_t * tail)
908 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
909 ASSERT (vlib_buffer_get_free_list_index (head) ==
910 vlib_buffer_get_free_list_index (tail));
912 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
913 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
914 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
915 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
916 head->next_buffer = vlib_get_buffer_index (vm, tail);
917 head->total_length_not_including_first_buffer = tail->current_length +
918 tail->total_length_not_including_first_buffer;
921 __sync_add_and_fetch (&tail->n_add_refs, 1);
923 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
925 tail = vlib_get_buffer (vm, tail->next_buffer);
930 /* Initializes the buffer as an empty packet with no chained buffers. */
932 vlib_buffer_chain_init (vlib_buffer_t * first)
934 first->total_length_not_including_first_buffer = 0;
935 first->current_length = 0;
936 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
937 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
940 /* The provided next_bi buffer index is appended to the end of the packet. */
941 always_inline vlib_buffer_t *
942 vlib_buffer_chain_buffer (vlib_main_t * vm,
943 vlib_buffer_t * first,
944 vlib_buffer_t * last, u32 next_bi)
946 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
947 last->next_buffer = next_bi;
948 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
949 next_buffer->current_length = 0;
950 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
954 /* Increases or decreases the packet length.
955 * It does not allocate or deallocate new buffers.
956 * Therefore, the added length must be compatible
957 * with the last buffer. */
959 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
960 vlib_buffer_t * last, i32 len)
962 last->current_length += len;
964 first->total_length_not_including_first_buffer += len;
967 /* Copy data to the end of the packet and increases its length.
968 * It does not allocate new buffers.
969 * Returns the number of copied bytes. */
971 vlib_buffer_chain_append_data (vlib_main_t * vm,
972 vlib_buffer_free_list_index_t free_list_index,
973 vlib_buffer_t * first,
974 vlib_buffer_t * last, void *data, u16 data_len)
977 vlib_buffer_free_list_buffer_size (vm, free_list_index);
978 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
979 u16 len = clib_min (data_len,
980 n_buffer_bytes - last->current_length -
982 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
984 vlib_buffer_chain_increase_length (first, last, len);
988 /* Copy data to the end of the packet and increases its length.
989 * Allocates additional buffers from the free list if necessary.
990 * Returns the number of copied bytes.
991 * 'last' value is modified whenever new buffers are allocated and
992 * chained and points to the last buffer in the chain. */
994 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
995 vlib_buffer_free_list_index_t
997 vlib_buffer_t * first,
998 vlib_buffer_t ** last, void *data,
1000 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
1002 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
1003 format_vlib_buffer_contents;
1007 /* Vector of packet data. */
1010 /* Number of buffers to allocate in each call to allocator. */
1011 u32 min_n_buffers_each_alloc;
1013 /* Buffer free list for this template. */
1014 vlib_buffer_free_list_index_t free_list_index;
1017 } vlib_packet_template_t;
1019 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
1020 vlib_packet_template_t * t);
1022 void vlib_packet_template_init (vlib_main_t * vm,
1023 vlib_packet_template_t * t,
1025 uword n_packet_data_bytes,
1026 uword min_n_buffers_each_alloc,
1029 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1030 vlib_packet_template_t * t,
1034 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1036 vec_free (t->packet_data);
1040 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
1042 serialize_stream_t *s = &m->stream;
1043 vlib_serialize_buffer_main_t *sm
1044 = uword_to_pointer (m->stream.data_function_opaque,
1045 vlib_serialize_buffer_main_t *);
1046 vlib_main_t *vm = sm->vlib_main;
1049 n = s->n_buffer_bytes - s->current_buffer_index;
1050 if (sm->last_buffer != ~0)
1052 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
1053 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1055 b = vlib_get_buffer (vm, b->next_buffer);
1056 n += b->current_length;
1061 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
1062 n += vlib_buffer_index_length_in_chain (vm, f[0]);
1069 /* Set a buffer quickly into "uninitialized" state. We want this to
1070 be extremely cheap and arrange for all fields that need to be
1071 initialized to be in the first 128 bits of the buffer. */
1073 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1074 vlib_buffer_free_list_t * fl)
1076 vlib_buffer_t *src = &fl->buffer_init_template;
1078 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1079 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1080 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1081 CLIB_CACHE_LINE_BYTES);
1082 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1083 CLIB_CACHE_LINE_BYTES * 2);
1085 /* Make sure buffer template is sane. */
1086 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
1088 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
1089 STRUCT_MARK_PTR (src, template_start),
1090 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1091 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1093 /* Not in the first 16 octets. */
1094 dst->n_add_refs = src->n_add_refs;
1095 vlib_buffer_set_free_list_index (dst, fl->index);
1097 /* Make sure it really worked. */
1098 #define _(f) ASSERT (dst->f == src->f);
1103 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1104 /* total_length_not_including_first_buffer is not in the template anymore
1105 * so it may actually not zeroed for some buffers. One option is to
1106 * uncomment the line lower (comes at a cost), the other, is to just not
1108 /* dst->total_length_not_including_first_buffer = 0; */
1109 ASSERT (dst->n_add_refs == 0);
1113 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1114 vlib_buffer_free_list_t * f,
1115 u32 buffer_index, u8 do_init)
1117 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1119 b = vlib_get_buffer (vm, buffer_index);
1120 if (PREDICT_TRUE (do_init))
1121 vlib_buffer_init_for_free_list (b, f);
1122 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1124 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1126 clib_spinlock_lock (&bp->lock);
1127 /* keep last stored buffers, as they are more likely hot in the cache */
1128 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1129 CLIB_CACHE_LINE_BYTES);
1130 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1131 f->n_alloc -= VLIB_FRAME_SIZE;
1132 clib_spinlock_unlock (&bp->lock);
1137 extern u32 *vlib_buffer_state_validation_lock;
1138 extern uword *vlib_buffer_state_validation_hash;
1139 extern void *vlib_buffer_state_heap;
1143 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1149 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1151 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1154 p = hash_get (vlib_buffer_state_validation_hash, b);
1156 /* If we don't know about b, declare it to be in the expected state */
1159 hash_set (vlib_buffer_state_validation_hash, b, expected);
1163 if (p[0] != expected)
1165 void cj_stop (void);
1167 vlib_main_t *vm = &vlib_global_main;
1171 bi = vlib_get_buffer_index (vm, b);
1173 clib_mem_set_heap (oldheap);
1174 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1175 vlib_time_now (vm), bi,
1176 p[0] ? "busy" : "free", expected ? "busy" : "free");
1180 CLIB_MEMORY_BARRIER ();
1181 *vlib_buffer_state_validation_lock = 0;
1182 clib_mem_set_heap (oldheap);
1187 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1192 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1194 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1197 hash_set (vlib_buffer_state_validation_hash, b, expected);
1199 CLIB_MEMORY_BARRIER ();
1200 *vlib_buffer_state_validation_lock = 0;
1201 clib_mem_set_heap (oldheap);
1205 /** minimum data size of first buffer in a buffer chain */
1206 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1209 * @brief compress buffer chain in a way where the first buffer is at least
1210 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1212 * @param[in] vm - vlib_main
1213 * @param[in,out] first - first buffer in chain
1214 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1218 vlib_buffer_chain_compress (vlib_main_t * vm,
1219 vlib_buffer_t * first, u32 ** discard_vector)
1221 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1222 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1224 /* this is already big enough or not a chain */
1227 /* probe free list to find allocated buffer size to avoid overfill */
1228 vlib_buffer_free_list_index_t index;
1229 vlib_buffer_free_list_t *free_list =
1230 vlib_buffer_get_buffer_free_list (vm, first, &index);
1232 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1233 free_list->n_data_bytes -
1234 first->current_data);
1237 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1238 u32 need = want_first_size - first->current_length;
1239 u32 amount_to_copy = clib_min (need, second->current_length);
1240 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1241 first->current_length,
1242 vlib_buffer_get_current (second), amount_to_copy);
1243 first->current_length += amount_to_copy;
1244 vlib_buffer_advance (second, amount_to_copy);
1245 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1247 first->total_length_not_including_first_buffer -= amount_to_copy;
1249 if (!second->current_length)
1251 vec_add1 (*discard_vector, first->next_buffer);
1252 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1254 first->next_buffer = second->next_buffer;
1258 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1260 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1263 while ((first->current_length < want_first_size) &&
1264 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1267 #endif /* included_vlib_buffer_funcs_h */
1270 * fd.io coding-style-patch-verification: ON
1273 * eval: (c-set-style "gnu")