2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = &buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate array of buffer indices into buffer pointers with offset
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param bi - (u32 *) array of buffer indices
70 @param b - (void **) array to store buffer pointers
71 @param count - (uword) number of elements
72 @param offset - (i32) offset applied to each pointer
74 static_always_inline void
75 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
78 #ifdef CLIB_HAVE_VEC256
79 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
80 /* if count is not const, compiler will not unroll while loop
81 se we maintain two-in-parallel variant */
84 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
85 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
86 /* shift and add to get vlib_buffer_t pointer */
87 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
88 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
96 #ifdef CLIB_HAVE_VEC256
97 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
98 /* shift and add to get vlib_buffer_t pointer */
99 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
102 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
103 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
104 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
112 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
119 /** \brief Translate array of buffer indices into buffer pointers
121 @param vm - (vlib_main_t *) vlib main data structure pointer
122 @param bi - (u32 *) array of buffer indices
123 @param b - (vlib_buffer_t **) array to store buffer pointers
124 @param count - (uword) number of elements
127 static_always_inline void
128 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
130 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
133 /** \brief Translate buffer pointer into buffer index
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param p - (void *) buffer pointer
137 @return - (u32) buffer index
141 vlib_get_buffer_index (vlib_main_t * vm, void *p)
143 vlib_buffer_main_t *bm = &buffer_main;
144 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
145 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
146 ASSERT (offset < bm->buffer_mem_size);
147 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
148 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
151 /** \brief Translate array of buffer pointers into buffer indices with offset
153 @param vm - (vlib_main_t *) vlib main data structure pointer
154 @param b - (void **) array of buffer pointers
155 @param bi - (u32 *) array to store buffer indices
156 @param count - (uword) number of elements
157 @param offset - (i32) offset applied to each pointer
159 static_always_inline void
160 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
161 uword count, i32 offset)
163 #ifdef CLIB_HAVE_VEC256
164 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
165 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
169 /* load 4 pointers into 256-bit register */
170 u64x4 v0 = u64x4_load_unaligned (b);
171 u64x4 v1 = u64x4_load_unaligned (b + 4);
177 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
178 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
180 /* permute 256-bit register so lower u32s of each buffer index are
181 * placed into lower 128-bits */
182 v2 = u32x8_permute ((u32x8) v0, mask);
183 v3 = u32x8_permute ((u32x8) v1, mask);
185 /* extract lower 128-bits and save them to the array of buffer indices */
186 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
187 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
195 /* equivalent non-nector implementation */
196 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
197 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
198 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
199 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
206 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
213 /** \brief Translate array of buffer pointers into buffer indices
215 @param vm - (vlib_main_t *) vlib main data structure pointer
216 @param b - (vlib_buffer_t **) array of buffer pointers
217 @param bi - (u32 *) array to store buffer indices
218 @param count - (uword) number of elements
220 static_always_inline void
221 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
224 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
227 /** \brief Get next buffer in buffer linklist, or zero for end of list.
229 @param vm - (vlib_main_t *) vlib main data structure pointer
230 @param b - (void *) buffer pointer
231 @return - (vlib_buffer_t *) next buffer, or NULL
233 always_inline vlib_buffer_t *
234 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
236 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
237 ? vlib_get_buffer (vm, b->next_buffer) : 0);
240 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
241 vlib_buffer_t * b_first);
243 /** \brief Get length in bytes of the buffer chain
245 @param vm - (vlib_main_t *) vlib main data structure pointer
246 @param b - (void *) buffer pointer
247 @return - (uword) length of buffer chain
250 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
252 uword len = b->current_length;
254 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
257 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
258 return len + b->total_length_not_including_first_buffer;
260 return vlib_buffer_length_in_chain_slow_path (vm, b);
263 /** \brief Get length in bytes of the buffer index buffer chain
265 @param vm - (vlib_main_t *) vlib main data structure pointer
266 @param bi - (u32) buffer index
267 @return - (uword) length of buffer chain
270 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
272 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
273 return vlib_buffer_length_in_chain (vm, b);
276 /** \brief Copy buffer contents to memory
278 @param vm - (vlib_main_t *) vlib main data structure pointer
279 @param buffer_index - (u32) buffer index
280 @param contents - (u8 *) memory, <strong>must be large enough</strong>
281 @return - (uword) length of buffer chain
284 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
286 uword content_len = 0;
292 b = vlib_get_buffer (vm, buffer_index);
293 l = b->current_length;
294 clib_memcpy (contents + content_len, b->data + b->current_data, l);
296 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
298 buffer_index = b->next_buffer;
304 /* Return physical address of buffer->data start. */
306 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
308 vlib_buffer_main_t *bm = &buffer_main;
309 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
310 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
311 b->buffer_pool_index);
313 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
316 /** \brief Prefetch buffer metadata by buffer index
317 The first 64 bytes of buffer contains most header information
319 @param vm - (vlib_main_t *) vlib main data structure pointer
320 @param bi - (u32) buffer index
321 @param type - LOAD, STORE. In most cases, STORE is the right answer
323 /* Prefetch buffer header given index. */
324 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
326 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
327 vlib_prefetch_buffer_header (_b, type); \
331 /* Iterate over known allocated vlib bufs. You probably do not want
333 @param vm the vlib_main_t
334 @param bi found allocated buffer index
335 @param body operation to perform on buffer index
336 function executes body for each allocated buffer index
338 #define vlib_buffer_foreach_allocated(vm,bi,body) \
340 vlib_main_t * _vmain = (vm); \
341 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
342 hash_pair_t * _vbpair; \
343 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
344 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
345 (bi) = _vbpair->key; \
354 /* Index is unknown. */
357 /* Index is known and free/allocated. */
358 VLIB_BUFFER_KNOWN_FREE,
359 VLIB_BUFFER_KNOWN_ALLOCATED,
360 } vlib_buffer_known_state_t;
362 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
364 vlib_buffer_known_state_t
367 always_inline vlib_buffer_known_state_t
368 vlib_buffer_is_known (u32 buffer_index)
370 vlib_buffer_main_t *bm = &buffer_main;
372 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
373 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
374 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
375 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
379 vlib_buffer_set_known_state (u32 buffer_index,
380 vlib_buffer_known_state_t state)
382 vlib_buffer_main_t *bm = &buffer_main;
384 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
385 hash_set (bm->buffer_known_hash, buffer_index, state);
386 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
389 /* Validates sanity of a single buffer.
390 Returns format'ed vector with error message if any. */
391 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
395 vlib_buffer_round_size (u32 size)
397 return round_pow2 (size, sizeof (vlib_buffer_t));
400 always_inline vlib_buffer_free_list_index_t
401 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
403 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
404 return b->free_list_index;
410 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
411 vlib_buffer_free_list_index_t index)
413 if (PREDICT_FALSE (index))
415 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
416 b->free_list_index = index;
419 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
422 /** \brief Allocate buffers from specific freelist into supplied array
424 @param vm - (vlib_main_t *) vlib main data structure pointer
425 @param buffers - (u32 * ) buffer index array
426 @param n_buffers - (u32) number of buffers requested
427 @return - (u32) number of buffers actually allocated, may be
428 less than the number requested or zero
431 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
434 vlib_buffer_free_list_index_t index)
436 vlib_buffer_main_t *bm = &buffer_main;
437 vlib_buffer_free_list_t *fl;
441 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
443 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
445 len = vec_len (fl->buffers);
447 if (PREDICT_FALSE (len < n_buffers))
449 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
450 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
453 /* even if fill free list didn't manage to refill free list
454 we should give what we have */
455 n_buffers = clib_min (len, n_buffers);
457 /* following code is intentionaly duplicated to allow compiler
458 to optimize fast path when n_buffers is constant value */
459 src = fl->buffers + len - n_buffers;
460 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
461 _vec_len (fl->buffers) -= n_buffers;
463 /* Verify that buffers are known free. */
464 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
465 VLIB_BUFFER_KNOWN_FREE);
470 src = fl->buffers + len - n_buffers;
471 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
472 _vec_len (fl->buffers) -= n_buffers;
474 /* Verify that buffers are known free. */
475 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
476 VLIB_BUFFER_KNOWN_FREE);
481 /** \brief Allocate buffers into supplied array
483 @param vm - (vlib_main_t *) vlib main data structure pointer
484 @param buffers - (u32 * ) buffer index array
485 @param n_buffers - (u32) number of buffers requested
486 @return - (u32) number of buffers actually allocated, may be
487 less than the number requested or zero
490 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
492 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
493 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
496 /** \brief Allocate buffers into ring
498 @param vm - (vlib_main_t *) vlib main data structure pointer
499 @param buffers - (u32 * ) buffer index ring
500 @param start - (u32) first slot in the ring
501 @param ring_size - (u32) ring size
502 @param n_buffers - (u32) number of buffers requested
503 @return - (u32) number of buffers actually allocated, may be
504 less than the number requested or zero
507 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
508 u32 ring_size, u32 n_buffers)
512 ASSERT (n_buffers <= ring_size);
514 if (PREDICT_TRUE (start + n_buffers <= ring_size))
515 return vlib_buffer_alloc (vm, ring + start, n_buffers);
517 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
519 if (PREDICT_TRUE (n_alloc == ring_size - start))
520 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
525 /** \brief Free buffers
526 Frees the entire buffer chain for each buffer
528 @param vm - (vlib_main_t *) vlib main data structure pointer
529 @param buffers - (u32 * ) buffer index array
530 @param n_buffers - (u32) number of buffers to free
534 vlib_buffer_free (vlib_main_t * vm,
535 /* pointer to first buffer */
537 /* number of buffers to free */
540 vlib_buffer_main_t *bm = &buffer_main;
542 ASSERT (bm->cb.vlib_buffer_free_cb);
544 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
547 /** \brief Free buffers, does not free the buffer chain for each buffer
549 @param vm - (vlib_main_t *) vlib main data structure pointer
550 @param buffers - (u32 * ) buffer index array
551 @param n_buffers - (u32) number of buffers to free
555 vlib_buffer_free_no_next (vlib_main_t * vm,
556 /* pointer to first buffer */
558 /* number of buffers to free */
561 vlib_buffer_main_t *bm = &buffer_main;
563 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
565 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
568 /** \brief Free one buffer
569 Shorthand to free a single buffer chain.
571 @param vm - (vlib_main_t *) vlib main data structure pointer
572 @param buffer_index - (u32) buffer index to free
575 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
577 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
580 /** \brief Free buffers from ring
582 @param vm - (vlib_main_t *) vlib main data structure pointer
583 @param buffers - (u32 * ) buffer index ring
584 @param start - (u32) first slot in the ring
585 @param ring_size - (u32) ring size
586 @param n_buffers - (u32) number of buffers
589 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
590 u32 ring_size, u32 n_buffers)
592 ASSERT (n_buffers <= ring_size);
594 if (PREDICT_TRUE (start + n_buffers <= ring_size))
596 vlib_buffer_free (vm, ring + start, n_buffers);
600 vlib_buffer_free (vm, ring + start, ring_size - start);
601 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
605 /** \brief Free buffers from ring without freeing tail buffers
607 @param vm - (vlib_main_t *) vlib main data structure pointer
608 @param buffers - (u32 * ) buffer index ring
609 @param start - (u32) first slot in the ring
610 @param ring_size - (u32) ring size
611 @param n_buffers - (u32) number of buffers
614 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
615 u32 ring_size, u32 n_buffers)
617 ASSERT (n_buffers <= ring_size);
619 if (PREDICT_TRUE (start + n_buffers <= ring_size))
621 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
625 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
626 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
630 /* Add/delete buffer free lists. */
631 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
635 vlib_buffer_delete_free_list (vlib_main_t * vm,
636 vlib_buffer_free_list_index_t free_list_index)
638 vlib_buffer_main_t *bm = &buffer_main;
640 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
642 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
645 /* Make sure we have at least given number of unaligned buffers. */
646 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
647 vlib_buffer_free_list_t *
649 uword n_unaligned_buffers);
651 always_inline vlib_buffer_free_list_t *
652 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
653 vlib_buffer_free_list_index_t * index)
655 vlib_buffer_free_list_index_t i;
657 *index = i = vlib_buffer_get_free_list_index (b);
658 return pool_elt_at_index (vm->buffer_free_list_pool, i);
661 always_inline vlib_buffer_free_list_t *
662 vlib_buffer_get_free_list (vlib_main_t * vm,
663 vlib_buffer_free_list_index_t free_list_index)
665 vlib_buffer_free_list_t *f;
667 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
669 /* Sanity: indices must match. */
670 ASSERT (f->index == free_list_index);
676 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
677 vlib_buffer_free_list_index_t index)
679 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
680 return f->n_data_bytes;
683 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
685 /* Reasonably fast buffer copy routine. */
687 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
708 /* Append given data to end of buffer, possibly allocating new buffers. */
709 u32 vlib_buffer_add_data (vlib_main_t * vm,
710 vlib_buffer_free_list_index_t free_list_index,
711 u32 buffer_index, void *data, u32 n_data_bytes);
713 /* duplicate all buffers in chain */
714 always_inline vlib_buffer_t *
715 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
717 vlib_buffer_t *s, *d, *fd;
718 uword n_alloc, n_buffers = 1;
719 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
723 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
726 s = vlib_get_buffer (vm, s->next_buffer);
728 u32 new_buffers[n_buffers];
730 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
732 /* No guarantee that we'll get all the buffers we asked for */
733 if (PREDICT_FALSE (n_alloc < n_buffers))
736 vlib_buffer_free (vm, new_buffers, n_alloc);
742 fd = d = vlib_get_buffer (vm, new_buffers[0]);
743 d->current_data = s->current_data;
744 d->current_length = s->current_length;
745 d->flags = s->flags & flag_mask;
746 d->total_length_not_including_first_buffer =
747 s->total_length_not_including_first_buffer;
748 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
749 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
750 clib_memcpy (vlib_buffer_get_current (d),
751 vlib_buffer_get_current (s), s->current_length);
754 for (i = 1; i < n_buffers; i++)
757 d->next_buffer = new_buffers[i];
759 s = vlib_get_buffer (vm, s->next_buffer);
760 d = vlib_get_buffer (vm, new_buffers[i]);
761 d->current_data = s->current_data;
762 d->current_length = s->current_length;
763 clib_memcpy (vlib_buffer_get_current (d),
764 vlib_buffer_get_current (s), s->current_length);
765 d->flags = s->flags & flag_mask;
771 /** \brief Create a maximum of 256 clones of buffer and store them
772 in the supplied array
774 @param vm - (vlib_main_t *) vlib main data structure pointer
775 @param src_buffer - (u32) source buffer index
776 @param buffers - (u32 * ) buffer index array
777 @param n_buffers - (u16) number of buffer clones requested (<=256)
778 @param head_end_offset - (u16) offset relative to current position
779 where packet head ends
780 @return - (u16) number of buffers actually cloned, may be
781 less than the number requested or zero
784 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
785 u16 n_buffers, u16 head_end_offset)
788 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
790 ASSERT (s->n_add_refs == 0);
792 ASSERT (n_buffers <= 256);
794 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
796 buffers[0] = src_buffer;
797 for (i = 1; i < n_buffers; i++)
800 d = vlib_buffer_copy (vm, s);
803 buffers[i] = vlib_get_buffer_index (vm, d);
809 if (PREDICT_FALSE (n_buffers == 1))
811 buffers[0] = src_buffer;
815 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
816 vlib_buffer_get_free_list_index
819 for (i = 0; i < n_buffers; i++)
821 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
822 d->current_data = s->current_data;
823 d->current_length = head_end_offset;
824 vlib_buffer_set_free_list_index (d,
825 vlib_buffer_get_free_list_index (s));
827 d->total_length_not_including_first_buffer = s->current_length -
829 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
831 d->total_length_not_including_first_buffer +=
832 s->total_length_not_including_first_buffer;
834 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
835 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
836 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
837 clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
838 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
840 d->next_buffer = src_buffer;
842 vlib_buffer_advance (s, head_end_offset);
843 s->n_add_refs = n_buffers - 1;
844 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
846 s = vlib_get_buffer (vm, s->next_buffer);
847 s->n_add_refs = n_buffers - 1;
853 /** \brief Create multiple clones of buffer and store them
854 in the supplied array
856 @param vm - (vlib_main_t *) vlib main data structure pointer
857 @param src_buffer - (u32) source buffer index
858 @param buffers - (u32 * ) buffer index array
859 @param n_buffers - (u16) number of buffer clones requested (<=256)
860 @param head_end_offset - (u16) offset relative to current position
861 where packet head ends
862 @return - (u16) number of buffers actually cloned, may be
863 less than the number requested or zero
866 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
867 u16 n_buffers, u16 head_end_offset)
869 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
872 while (n_buffers > 256)
875 copy = vlib_buffer_copy (vm, s);
876 n_cloned += vlib_buffer_clone_256 (vm,
877 vlib_get_buffer_index (vm, copy),
878 (buffers + n_cloned),
879 256, head_end_offset);
882 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
884 n_buffers, head_end_offset);
889 /** \brief Attach cloned tail to the buffer
891 @param vm - (vlib_main_t *) vlib main data structure pointer
892 @param head - (vlib_buffer_t *) head buffer
893 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
897 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
898 vlib_buffer_t * tail)
900 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
901 ASSERT (vlib_buffer_get_free_list_index (head) ==
902 vlib_buffer_get_free_list_index (tail));
904 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
905 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
906 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
907 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
908 head->next_buffer = vlib_get_buffer_index (vm, tail);
909 head->total_length_not_including_first_buffer = tail->current_length +
910 tail->total_length_not_including_first_buffer;
913 __sync_add_and_fetch (&tail->n_add_refs, 1);
915 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
917 tail = vlib_get_buffer (vm, tail->next_buffer);
922 /* Initializes the buffer as an empty packet with no chained buffers. */
924 vlib_buffer_chain_init (vlib_buffer_t * first)
926 first->total_length_not_including_first_buffer = 0;
927 first->current_length = 0;
928 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
929 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
932 /* The provided next_bi buffer index is appended to the end of the packet. */
933 always_inline vlib_buffer_t *
934 vlib_buffer_chain_buffer (vlib_main_t * vm,
935 vlib_buffer_t * first,
936 vlib_buffer_t * last, u32 next_bi)
938 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
939 last->next_buffer = next_bi;
940 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
941 next_buffer->current_length = 0;
942 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
946 /* Increases or decreases the packet length.
947 * It does not allocate or deallocate new buffers.
948 * Therefore, the added length must be compatible
949 * with the last buffer. */
951 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
952 vlib_buffer_t * last, i32 len)
954 last->current_length += len;
956 first->total_length_not_including_first_buffer += len;
959 /* Copy data to the end of the packet and increases its length.
960 * It does not allocate new buffers.
961 * Returns the number of copied bytes. */
963 vlib_buffer_chain_append_data (vlib_main_t * vm,
964 vlib_buffer_free_list_index_t free_list_index,
965 vlib_buffer_t * first,
966 vlib_buffer_t * last, void *data, u16 data_len)
969 vlib_buffer_free_list_buffer_size (vm, free_list_index);
970 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
971 u16 len = clib_min (data_len,
972 n_buffer_bytes - last->current_length -
974 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
976 vlib_buffer_chain_increase_length (first, last, len);
980 /* Copy data to the end of the packet and increases its length.
981 * Allocates additional buffers from the free list if necessary.
982 * Returns the number of copied bytes.
983 * 'last' value is modified whenever new buffers are allocated and
984 * chained and points to the last buffer in the chain. */
986 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
987 vlib_buffer_free_list_index_t
989 vlib_buffer_t * first,
990 vlib_buffer_t ** last, void *data,
992 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
994 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
995 format_vlib_buffer_contents;
999 /* Vector of packet data. */
1002 /* Number of buffers to allocate in each call to allocator. */
1003 u32 min_n_buffers_each_alloc;
1005 /* Buffer free list for this template. */
1006 vlib_buffer_free_list_index_t free_list_index;
1009 } vlib_packet_template_t;
1011 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
1012 vlib_packet_template_t * t);
1014 void vlib_packet_template_init (vlib_main_t * vm,
1015 vlib_packet_template_t * t,
1017 uword n_packet_data_bytes,
1018 uword min_n_buffers_each_alloc,
1021 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1022 vlib_packet_template_t * t,
1026 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1028 vec_free (t->packet_data);
1032 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
1034 serialize_stream_t *s = &m->stream;
1035 vlib_serialize_buffer_main_t *sm
1036 = uword_to_pointer (m->stream.data_function_opaque,
1037 vlib_serialize_buffer_main_t *);
1038 vlib_main_t *vm = sm->vlib_main;
1041 n = s->n_buffer_bytes - s->current_buffer_index;
1042 if (sm->last_buffer != ~0)
1044 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
1045 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1047 b = vlib_get_buffer (vm, b->next_buffer);
1048 n += b->current_length;
1053 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
1054 n += vlib_buffer_index_length_in_chain (vm, f[0]);
1061 /* Set a buffer quickly into "uninitialized" state. We want this to
1062 be extremely cheap and arrange for all fields that need to be
1063 initialized to be in the first 128 bits of the buffer. */
1065 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1066 vlib_buffer_free_list_t * fl)
1068 vlib_buffer_t *src = &fl->buffer_init_template;
1070 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1071 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1072 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1073 CLIB_CACHE_LINE_BYTES);
1074 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1075 CLIB_CACHE_LINE_BYTES * 2);
1077 /* Make sure buffer template is sane. */
1078 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
1080 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
1081 STRUCT_MARK_PTR (src, template_start),
1082 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1083 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1085 /* Not in the first 16 octets. */
1086 dst->n_add_refs = src->n_add_refs;
1087 vlib_buffer_set_free_list_index (dst, fl->index);
1089 /* Make sure it really worked. */
1090 #define _(f) ASSERT (dst->f == src->f);
1095 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1096 /* total_length_not_including_first_buffer is not in the template anymore
1097 * so it may actually not zeroed for some buffers. One option is to
1098 * uncomment the line lower (comes at a cost), the other, is to just not
1100 /* dst->total_length_not_including_first_buffer = 0; */
1101 ASSERT (dst->n_add_refs == 0);
1105 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1106 vlib_buffer_free_list_t * f,
1107 u32 buffer_index, u8 do_init)
1109 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1111 b = vlib_get_buffer (vm, buffer_index);
1112 if (PREDICT_TRUE (do_init))
1113 vlib_buffer_init_for_free_list (b, f);
1114 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1116 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1118 clib_spinlock_lock (&bp->lock);
1119 /* keep last stored buffers, as they are more likely hot in the cache */
1120 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1121 CLIB_CACHE_LINE_BYTES);
1122 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1123 f->n_alloc -= VLIB_FRAME_SIZE;
1124 clib_spinlock_unlock (&bp->lock);
1129 extern u32 *vlib_buffer_state_validation_lock;
1130 extern uword *vlib_buffer_state_validation_hash;
1131 extern void *vlib_buffer_state_heap;
1135 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1141 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1143 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1146 p = hash_get (vlib_buffer_state_validation_hash, b);
1148 /* If we don't know about b, declare it to be in the expected state */
1151 hash_set (vlib_buffer_state_validation_hash, b, expected);
1155 if (p[0] != expected)
1157 void cj_stop (void);
1159 vlib_main_t *vm = &vlib_global_main;
1163 bi = vlib_get_buffer_index (vm, b);
1165 clib_mem_set_heap (oldheap);
1166 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1167 vlib_time_now (vm), bi,
1168 p[0] ? "busy" : "free", expected ? "busy" : "free");
1172 CLIB_MEMORY_BARRIER ();
1173 *vlib_buffer_state_validation_lock = 0;
1174 clib_mem_set_heap (oldheap);
1179 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1184 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1186 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1189 hash_set (vlib_buffer_state_validation_hash, b, expected);
1191 CLIB_MEMORY_BARRIER ();
1192 *vlib_buffer_state_validation_lock = 0;
1193 clib_mem_set_heap (oldheap);
1197 /** minimum data size of first buffer in a buffer chain */
1198 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1201 * @brief compress buffer chain in a way where the first buffer is at least
1202 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1204 * @param[in] vm - vlib_main
1205 * @param[in,out] first - first buffer in chain
1206 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1210 vlib_buffer_chain_compress (vlib_main_t * vm,
1211 vlib_buffer_t * first, u32 ** discard_vector)
1213 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1214 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1216 /* this is already big enough or not a chain */
1219 /* probe free list to find allocated buffer size to avoid overfill */
1220 vlib_buffer_free_list_index_t index;
1221 vlib_buffer_free_list_t *free_list =
1222 vlib_buffer_get_buffer_free_list (vm, first, &index);
1224 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1225 free_list->n_data_bytes -
1226 first->current_data);
1229 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1230 u32 need = want_first_size - first->current_length;
1231 u32 amount_to_copy = clib_min (need, second->current_length);
1232 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1233 first->current_length,
1234 vlib_buffer_get_current (second), amount_to_copy);
1235 first->current_length += amount_to_copy;
1236 vlib_buffer_advance (second, amount_to_copy);
1237 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1239 first->total_length_not_including_first_buffer -= amount_to_copy;
1241 if (!second->current_length)
1243 vec_add1 (*discard_vector, first->next_buffer);
1244 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1246 first->next_buffer = second->next_buffer;
1250 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1252 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1255 while ((first->current_length < want_first_size) &&
1256 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1259 #endif /* included_vlib_buffer_funcs_h */
1262 * fd.io coding-style-patch-verification: ON
1265 * eval: (c-set-style "gnu")