2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = vm->buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 static_always_inline void
68 vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
70 clib_memcpy_fast (dst, src, n_indices * sizeof (u32));
73 static_always_inline void
74 vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
76 clib_memcpy_fast (b, bt, STRUCT_OFFSET_OF (vlib_buffer_t, template_end));
79 /** \brief Translate array of buffer indices into buffer pointers with offset
81 @param vm - (vlib_main_t *) vlib main data structure pointer
82 @param bi - (u32 *) array of buffer indices
83 @param b - (void **) array to store buffer pointers
84 @param count - (uword) number of elements
85 @param offset - (i32) offset applied to each pointer
87 static_always_inline void
88 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
91 uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
92 #ifdef CLIB_HAVE_VEC256
93 u64x4 off = u64x4_splat (buffer_mem_start + offset);
94 /* if count is not const, compiler will not unroll while loop
95 se we maintain two-in-parallel variant */
98 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
99 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
100 /* shift and add to get vlib_buffer_t pointer */
101 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
102 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
110 #ifdef CLIB_HAVE_VEC256
111 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
112 /* shift and add to get vlib_buffer_t pointer */
113 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
114 #elif defined (CLIB_HAVE_VEC128)
115 u64x2 off = u64x2_splat (buffer_mem_start + offset);
116 u32x4 bi4 = u32x4_load_unaligned (bi);
117 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
118 #if defined (__aarch64__)
119 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
121 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
122 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
124 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
125 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
127 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
128 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
129 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
130 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
138 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
145 /** \brief Translate array of buffer indices into buffer pointers
147 @param vm - (vlib_main_t *) vlib main data structure pointer
148 @param bi - (u32 *) array of buffer indices
149 @param b - (vlib_buffer_t **) array to store buffer pointers
150 @param count - (uword) number of elements
153 static_always_inline void
154 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
156 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
159 /** \brief Translate buffer pointer into buffer index
161 @param vm - (vlib_main_t *) vlib main data structure pointer
162 @param p - (void *) buffer pointer
163 @return - (u32) buffer index
167 vlib_get_buffer_index (vlib_main_t * vm, void *p)
169 vlib_buffer_main_t *bm = vm->buffer_main;
170 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
171 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
172 ASSERT (offset < bm->buffer_mem_size);
173 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
174 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
177 /** \brief Translate array of buffer pointers into buffer indices with offset
179 @param vm - (vlib_main_t *) vlib main data structure pointer
180 @param b - (void **) array of buffer pointers
181 @param bi - (u32 *) array to store buffer indices
182 @param count - (uword) number of elements
183 @param offset - (i32) offset applied to each pointer
185 static_always_inline void
186 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
187 uword count, i32 offset)
189 #ifdef CLIB_HAVE_VEC256
190 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
191 u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
195 /* load 4 pointers into 256-bit register */
196 u64x4 v0 = u64x4_load_unaligned (b);
197 u64x4 v1 = u64x4_load_unaligned (b + 4);
203 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
204 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
206 /* permute 256-bit register so lower u32s of each buffer index are
207 * placed into lower 128-bits */
208 v2 = u32x8_permute ((u32x8) v0, mask);
209 v3 = u32x8_permute ((u32x8) v1, mask);
211 /* extract lower 128-bits and save them to the array of buffer indices */
212 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
213 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
221 /* equivalent non-nector implementation */
222 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
223 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
224 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
225 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
232 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
239 /** \brief Translate array of buffer pointers into buffer indices
241 @param vm - (vlib_main_t *) vlib main data structure pointer
242 @param b - (vlib_buffer_t **) array of buffer pointers
243 @param bi - (u32 *) array to store buffer indices
244 @param count - (uword) number of elements
246 static_always_inline void
247 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
250 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
253 /** \brief Get next buffer in buffer linklist, or zero for end of list.
255 @param vm - (vlib_main_t *) vlib main data structure pointer
256 @param b - (void *) buffer pointer
257 @return - (vlib_buffer_t *) next buffer, or NULL
259 always_inline vlib_buffer_t *
260 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
262 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
263 ? vlib_get_buffer (vm, b->next_buffer) : 0);
266 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
267 vlib_buffer_t * b_first);
269 /** \brief Get length in bytes of the buffer chain
271 @param vm - (vlib_main_t *) vlib main data structure pointer
272 @param b - (void *) buffer pointer
273 @return - (uword) length of buffer chain
276 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
278 uword len = b->current_length;
280 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
283 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
284 return len + b->total_length_not_including_first_buffer;
286 return vlib_buffer_length_in_chain_slow_path (vm, b);
289 /** \brief Get length in bytes of the buffer index buffer chain
291 @param vm - (vlib_main_t *) vlib main data structure pointer
292 @param bi - (u32) buffer index
293 @return - (uword) length of buffer chain
296 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
298 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
299 return vlib_buffer_length_in_chain (vm, b);
302 /** \brief Copy buffer contents to memory
304 @param vm - (vlib_main_t *) vlib main data structure pointer
305 @param buffer_index - (u32) buffer index
306 @param contents - (u8 *) memory, <strong>must be large enough</strong>
307 @return - (uword) length of buffer chain
310 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
312 uword content_len = 0;
318 b = vlib_get_buffer (vm, buffer_index);
319 l = b->current_length;
320 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
322 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
324 buffer_index = b->next_buffer;
331 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
333 return vlib_physmem_get_pa (vm, b->data);
337 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
339 return vlib_buffer_get_pa (vm, b) + b->current_data;
342 /** \brief Prefetch buffer metadata by buffer index
343 The first 64 bytes of buffer contains most header information
345 @param vm - (vlib_main_t *) vlib main data structure pointer
346 @param bi - (u32) buffer index
347 @param type - LOAD, STORE. In most cases, STORE is the right answer
349 /* Prefetch buffer header given index. */
350 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
352 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
353 vlib_prefetch_buffer_header (_b, type); \
358 /* Index is unknown. */
361 /* Index is known and free/allocated. */
362 VLIB_BUFFER_KNOWN_FREE,
363 VLIB_BUFFER_KNOWN_ALLOCATED,
364 } vlib_buffer_known_state_t;
366 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
368 vlib_buffer_known_state_t
371 always_inline vlib_buffer_known_state_t
372 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
374 vlib_buffer_main_t *bm = vm->buffer_main;
376 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
377 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
378 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
379 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
383 vlib_buffer_set_known_state (vlib_main_t * vm, u32 buffer_index,
384 vlib_buffer_known_state_t state)
386 vlib_buffer_main_t *bm = vm->buffer_main;
388 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
389 hash_set (bm->buffer_known_hash, buffer_index, state);
390 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
393 /* Validates sanity of a single buffer.
394 Returns format'ed vector with error message if any. */
395 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
399 vlib_buffer_round_size (u32 size)
401 return round_pow2 (size, sizeof (vlib_buffer_t));
404 /** \brief Allocate buffers from specific freelist into supplied array
406 @param vm - (vlib_main_t *) vlib main data structure pointer
407 @param buffers - (u32 * ) buffer index array
408 @param n_buffers - (u32) number of buffers requested
409 @return - (u32) number of buffers actually allocated, may be
410 less than the number requested or zero
413 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
416 vlib_buffer_free_list_index_t index)
418 vlib_buffer_free_list_t *fl;
419 vlib_buffer_main_t *bm = vm->buffer_main;
423 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
425 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
427 len = vec_len (fl->buffers);
429 if (PREDICT_FALSE (len < n_buffers))
431 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
432 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
435 /* even if fill free list didn't manage to refill free list
436 we should give what we have */
437 n_buffers = clib_min (len, n_buffers);
439 /* following code is intentionaly duplicated to allow compiler
440 to optimize fast path when n_buffers is constant value */
441 src = fl->buffers + len - n_buffers;
442 vlib_buffer_copy_indices (buffers, src, n_buffers);
443 _vec_len (fl->buffers) -= n_buffers;
445 /* Verify that buffers are known free. */
446 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
447 VLIB_BUFFER_KNOWN_FREE);
452 src = fl->buffers + len - n_buffers;
453 vlib_buffer_copy_indices (buffers, src, n_buffers);
454 _vec_len (fl->buffers) -= n_buffers;
456 /* Verify that buffers are known free. */
457 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
458 VLIB_BUFFER_KNOWN_FREE);
463 /** \brief Allocate buffers into supplied array
465 @param vm - (vlib_main_t *) vlib main data structure pointer
466 @param buffers - (u32 * ) buffer index array
467 @param n_buffers - (u32) number of buffers requested
468 @return - (u32) number of buffers actually allocated, may be
469 less than the number requested or zero
472 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
474 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
475 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
478 /** \brief Allocate buffers into ring
480 @param vm - (vlib_main_t *) vlib main data structure pointer
481 @param buffers - (u32 * ) buffer index ring
482 @param start - (u32) first slot in the ring
483 @param ring_size - (u32) ring size
484 @param n_buffers - (u32) number of buffers requested
485 @return - (u32) number of buffers actually allocated, may be
486 less than the number requested or zero
489 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
490 u32 ring_size, u32 n_buffers)
494 ASSERT (n_buffers <= ring_size);
496 if (PREDICT_TRUE (start + n_buffers <= ring_size))
497 return vlib_buffer_alloc (vm, ring + start, n_buffers);
499 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
501 if (PREDICT_TRUE (n_alloc == ring_size - start))
502 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
507 /** \brief Free buffers
508 Frees the entire buffer chain for each buffer
510 @param vm - (vlib_main_t *) vlib main data structure pointer
511 @param buffers - (u32 * ) buffer index array
512 @param n_buffers - (u32) number of buffers to free
516 vlib_buffer_free (vlib_main_t * vm,
517 /* pointer to first buffer */
519 /* number of buffers to free */
522 vlib_buffer_main_t *bm = vm->buffer_main;
524 ASSERT (bm->cb.vlib_buffer_free_cb);
526 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
529 /** \brief Free buffers, does not free the buffer chain for each buffer
531 @param vm - (vlib_main_t *) vlib main data structure pointer
532 @param buffers - (u32 * ) buffer index array
533 @param n_buffers - (u32) number of buffers to free
537 vlib_buffer_free_no_next (vlib_main_t * vm,
538 /* pointer to first buffer */
540 /* number of buffers to free */
543 vlib_buffer_main_t *bm = vm->buffer_main;
545 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
547 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
550 /** \brief Free one buffer
551 Shorthand to free a single buffer chain.
553 @param vm - (vlib_main_t *) vlib main data structure pointer
554 @param buffer_index - (u32) buffer index to free
557 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
559 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
562 /** \brief Free buffers from ring
564 @param vm - (vlib_main_t *) vlib main data structure pointer
565 @param buffers - (u32 * ) buffer index ring
566 @param start - (u32) first slot in the ring
567 @param ring_size - (u32) ring size
568 @param n_buffers - (u32) number of buffers
571 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
572 u32 ring_size, u32 n_buffers)
574 ASSERT (n_buffers <= ring_size);
576 if (PREDICT_TRUE (start + n_buffers <= ring_size))
578 vlib_buffer_free (vm, ring + start, n_buffers);
582 vlib_buffer_free (vm, ring + start, ring_size - start);
583 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
587 /** \brief Free buffers from ring without freeing tail buffers
589 @param vm - (vlib_main_t *) vlib main data structure pointer
590 @param buffers - (u32 * ) buffer index ring
591 @param start - (u32) first slot in the ring
592 @param ring_size - (u32) ring size
593 @param n_buffers - (u32) number of buffers
596 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
597 u32 ring_size, u32 n_buffers)
599 ASSERT (n_buffers <= ring_size);
601 if (PREDICT_TRUE (start + n_buffers <= ring_size))
603 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
607 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
608 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
612 /* Add/delete buffer free lists. */
613 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
617 vlib_buffer_delete_free_list (vlib_main_t * vm,
618 vlib_buffer_free_list_index_t free_list_index)
620 vlib_buffer_main_t *bm = vm->buffer_main;
622 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
624 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
627 /* Make sure we have at least given number of unaligned buffers. */
628 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
629 vlib_buffer_free_list_t *
631 uword n_unaligned_buffers);
633 always_inline vlib_buffer_free_list_t *
634 vlib_buffer_get_free_list (vlib_main_t * vm,
635 vlib_buffer_free_list_index_t free_list_index)
637 vlib_buffer_free_list_t *f;
639 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
641 /* Sanity: indices must match. */
642 ASSERT (f->index == free_list_index);
648 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
649 vlib_buffer_free_list_index_t index)
651 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
652 return f->n_data_bytes;
655 /* Append given data to end of buffer, possibly allocating new buffers. */
656 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
659 /* duplicate all buffers in chain */
660 always_inline vlib_buffer_t *
661 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
663 vlib_buffer_t *s, *d, *fd;
664 uword n_alloc, n_buffers = 1;
665 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
669 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
672 s = vlib_get_buffer (vm, s->next_buffer);
674 u32 new_buffers[n_buffers];
676 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
678 /* No guarantee that we'll get all the buffers we asked for */
679 if (PREDICT_FALSE (n_alloc < n_buffers))
682 vlib_buffer_free (vm, new_buffers, n_alloc);
688 fd = d = vlib_get_buffer (vm, new_buffers[0]);
689 d->current_data = s->current_data;
690 d->current_length = s->current_length;
691 d->flags = s->flags & flag_mask;
692 d->total_length_not_including_first_buffer =
693 s->total_length_not_including_first_buffer;
694 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
695 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
696 clib_memcpy_fast (vlib_buffer_get_current (d),
697 vlib_buffer_get_current (s), s->current_length);
700 for (i = 1; i < n_buffers; i++)
703 d->next_buffer = new_buffers[i];
705 s = vlib_get_buffer (vm, s->next_buffer);
706 d = vlib_get_buffer (vm, new_buffers[i]);
707 d->current_data = s->current_data;
708 d->current_length = s->current_length;
709 clib_memcpy_fast (vlib_buffer_get_current (d),
710 vlib_buffer_get_current (s), s->current_length);
711 d->flags = s->flags & flag_mask;
717 /** \brief Create a maximum of 256 clones of buffer and store them
718 in the supplied array
720 @param vm - (vlib_main_t *) vlib main data structure pointer
721 @param src_buffer - (u32) source buffer index
722 @param buffers - (u32 * ) buffer index array
723 @param n_buffers - (u16) number of buffer clones requested (<=256)
724 @param head_end_offset - (u16) offset relative to current position
725 where packet head ends
726 @return - (u16) number of buffers actually cloned, may be
727 less than the number requested or zero
730 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
731 u16 n_buffers, u16 head_end_offset)
734 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
736 ASSERT (s->n_add_refs == 0);
738 ASSERT (n_buffers <= 256);
740 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
742 buffers[0] = src_buffer;
743 for (i = 1; i < n_buffers; i++)
746 d = vlib_buffer_copy (vm, s);
749 buffers[i] = vlib_get_buffer_index (vm, d);
755 if (PREDICT_FALSE (n_buffers == 1))
757 buffers[0] = src_buffer;
761 n_buffers = vlib_buffer_alloc (vm, buffers, n_buffers);
763 for (i = 0; i < n_buffers; i++)
765 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
766 d->current_data = s->current_data;
767 d->current_length = head_end_offset;
768 d->total_length_not_including_first_buffer = s->current_length -
770 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
772 d->total_length_not_including_first_buffer +=
773 s->total_length_not_including_first_buffer;
775 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
776 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
777 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
778 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
779 clib_memcpy_fast (vlib_buffer_get_current (d),
780 vlib_buffer_get_current (s), head_end_offset);
781 d->next_buffer = src_buffer;
783 vlib_buffer_advance (s, head_end_offset);
784 s->n_add_refs = n_buffers - 1;
785 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
787 s = vlib_get_buffer (vm, s->next_buffer);
788 s->n_add_refs = n_buffers - 1;
794 /** \brief Create multiple clones of buffer and store them
795 in the supplied array
797 @param vm - (vlib_main_t *) vlib main data structure pointer
798 @param src_buffer - (u32) source buffer index
799 @param buffers - (u32 * ) buffer index array
800 @param n_buffers - (u16) number of buffer clones requested (<=256)
801 @param head_end_offset - (u16) offset relative to current position
802 where packet head ends
803 @return - (u16) number of buffers actually cloned, may be
804 less than the number requested or zero
807 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
808 u16 n_buffers, u16 head_end_offset)
810 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
813 while (n_buffers > 256)
816 copy = vlib_buffer_copy (vm, s);
817 n_cloned += vlib_buffer_clone_256 (vm,
818 vlib_get_buffer_index (vm, copy),
819 (buffers + n_cloned),
820 256, head_end_offset);
823 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
825 n_buffers, head_end_offset);
830 /** \brief Attach cloned tail to the buffer
832 @param vm - (vlib_main_t *) vlib main data structure pointer
833 @param head - (vlib_buffer_t *) head buffer
834 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
838 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
839 vlib_buffer_t * tail)
841 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
843 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
844 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
845 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
846 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
847 head->next_buffer = vlib_get_buffer_index (vm, tail);
848 head->total_length_not_including_first_buffer = tail->current_length +
849 tail->total_length_not_including_first_buffer;
852 clib_atomic_add_fetch (&tail->n_add_refs, 1);
854 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
856 tail = vlib_get_buffer (vm, tail->next_buffer);
861 /* Initializes the buffer as an empty packet with no chained buffers. */
863 vlib_buffer_chain_init (vlib_buffer_t * first)
865 first->total_length_not_including_first_buffer = 0;
866 first->current_length = 0;
867 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
868 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
871 /* The provided next_bi buffer index is appended to the end of the packet. */
872 always_inline vlib_buffer_t *
873 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
875 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
876 last->next_buffer = next_bi;
877 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
878 next_buffer->current_length = 0;
879 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
883 /* Increases or decreases the packet length.
884 * It does not allocate or deallocate new buffers.
885 * Therefore, the added length must be compatible
886 * with the last buffer. */
888 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
889 vlib_buffer_t * last, i32 len)
891 last->current_length += len;
893 first->total_length_not_including_first_buffer += len;
896 /* Copy data to the end of the packet and increases its length.
897 * It does not allocate new buffers.
898 * Returns the number of copied bytes. */
900 vlib_buffer_chain_append_data (vlib_main_t * vm,
901 vlib_buffer_free_list_index_t free_list_index,
902 vlib_buffer_t * first,
903 vlib_buffer_t * last, void *data, u16 data_len)
906 vlib_buffer_free_list_buffer_size (vm, free_list_index);
907 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
908 u16 len = clib_min (data_len,
909 n_buffer_bytes - last->current_length -
911 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
913 vlib_buffer_chain_increase_length (first, last, len);
917 /* Copy data to the end of the packet and increases its length.
918 * Allocates additional buffers from the free list if necessary.
919 * Returns the number of copied bytes.
920 * 'last' value is modified whenever new buffers are allocated and
921 * chained and points to the last buffer in the chain. */
923 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
924 vlib_buffer_free_list_index_t
926 vlib_buffer_t * first,
927 vlib_buffer_t ** last, void *data,
929 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
931 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
932 format_vlib_buffer_contents;
936 /* Vector of packet data. */
939 /* Number of buffers to allocate in each call to allocator. */
940 u32 min_n_buffers_each_alloc;
942 /* Buffer free list for this template. */
943 vlib_buffer_free_list_index_t free_list_index;
948 } vlib_packet_template_t;
950 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
951 vlib_packet_template_t * t);
953 void vlib_packet_template_init (vlib_main_t * vm,
954 vlib_packet_template_t * t,
956 uword n_packet_data_bytes,
957 uword min_n_buffers_each_alloc,
960 void *vlib_packet_template_get_packet (vlib_main_t * vm,
961 vlib_packet_template_t * t,
965 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
967 vec_free (t->packet_data);
970 /* Set a buffer quickly into "uninitialized" state. We want this to
971 be extremely cheap and arrange for all fields that need to be
972 initialized to be in the first 128 bits of the buffer. */
974 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
975 vlib_buffer_free_list_t * fl)
977 vlib_buffer_t *src = &fl->buffer_init_template;
979 /* Make sure vlib_buffer_t is cacheline aligned and sized */
980 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
981 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
982 CLIB_CACHE_LINE_BYTES);
983 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
984 CLIB_CACHE_LINE_BYTES * 2);
986 /* Make sure buffer template is sane. */
987 vlib_buffer_copy_template (dst, src);
989 /* Not in the first 16 octets. */
990 dst->n_add_refs = src->n_add_refs;
992 /* Make sure it really worked. */
993 #define _(f) ASSERT (dst->f == src->f);
998 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
999 /* total_length_not_including_first_buffer is not in the template anymore
1000 * so it may actually not zeroed for some buffers. One option is to
1001 * uncomment the line lower (comes at a cost), the other, is to just not
1003 /* dst->total_length_not_including_first_buffer = 0; */
1004 ASSERT (dst->n_add_refs == 0);
1007 static_always_inline vlib_buffer_pool_t *
1008 vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index)
1010 vlib_buffer_main_t *bm = vm->buffer_main;
1011 return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
1015 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1016 vlib_buffer_free_list_t * f,
1017 u32 buffer_index, u8 do_init)
1019 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (vm, f->buffer_pool_index);
1021 b = vlib_get_buffer (vm, buffer_index);
1022 if (PREDICT_TRUE (do_init))
1023 vlib_buffer_init_for_free_list (b, f);
1024 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1026 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1028 clib_spinlock_lock (&bp->lock);
1029 /* keep last stored buffers, as they are more likely hot in the cache */
1030 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1031 CLIB_CACHE_LINE_BYTES);
1032 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1033 f->n_alloc -= VLIB_FRAME_SIZE;
1034 clib_spinlock_unlock (&bp->lock);
1039 extern u32 *vlib_buffer_state_validation_lock;
1040 extern uword *vlib_buffer_state_validation_hash;
1041 extern void *vlib_buffer_state_heap;
1045 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1051 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1053 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1056 p = hash_get (vlib_buffer_state_validation_hash, b);
1058 /* If we don't know about b, declare it to be in the expected state */
1061 hash_set (vlib_buffer_state_validation_hash, b, expected);
1065 if (p[0] != expected)
1067 void cj_stop (void);
1069 vlib_main_t *vm = &vlib_global_main;
1073 bi = vlib_get_buffer_index (vm, b);
1075 clib_mem_set_heap (oldheap);
1076 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1077 vlib_time_now (vm), bi,
1078 p[0] ? "busy" : "free", expected ? "busy" : "free");
1082 CLIB_MEMORY_BARRIER ();
1083 *vlib_buffer_state_validation_lock = 0;
1084 clib_mem_set_heap (oldheap);
1089 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1094 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1096 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1099 hash_set (vlib_buffer_state_validation_hash, b, expected);
1101 CLIB_MEMORY_BARRIER ();
1102 *vlib_buffer_state_validation_lock = 0;
1103 clib_mem_set_heap (oldheap);
1107 /** minimum data size of first buffer in a buffer chain */
1108 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1111 * @brief compress buffer chain in a way where the first buffer is at least
1112 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1114 * @param[in] vm - vlib_main
1115 * @param[in,out] first - first buffer in chain
1116 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1120 vlib_buffer_chain_compress (vlib_main_t * vm,
1121 vlib_buffer_t * first, u32 ** discard_vector)
1123 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1124 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1126 /* this is already big enough or not a chain */
1129 /* probe free list to find allocated buffer size to avoid overfill */
1130 vlib_buffer_free_list_t *free_list;
1132 free_list = pool_elt_at_index (vm->buffer_free_list_pool,
1133 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
1135 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1136 free_list->n_data_bytes -
1137 first->current_data);
1140 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1141 u32 need = want_first_size - first->current_length;
1142 u32 amount_to_copy = clib_min (need, second->current_length);
1143 clib_memcpy_fast (((u8 *) vlib_buffer_get_current (first)) +
1144 first->current_length,
1145 vlib_buffer_get_current (second), amount_to_copy);
1146 first->current_length += amount_to_copy;
1147 second->current_data += amount_to_copy;
1148 second->current_length -= amount_to_copy;
1149 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1151 first->total_length_not_including_first_buffer -= amount_to_copy;
1153 if (!second->current_length)
1155 vec_add1 (*discard_vector, first->next_buffer);
1156 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1158 first->next_buffer = second->next_buffer;
1162 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1164 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1167 while ((first->current_length < want_first_size) &&
1168 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1172 * @brief linearize buffer chain - the first buffer is filled, if needed,
1173 * buffers are allocated and filled, returns free space in last buffer or
1174 * negative on failure
1176 * @param[in] vm - vlib_main
1177 * @param[in,out] first - first buffer in chain
1180 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * first)
1182 vlib_buffer_t *b = first;
1183 vlib_buffer_free_list_t *fl =
1184 vlib_buffer_get_free_list (vm, VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
1185 u32 buf_len = fl->n_data_bytes;
1186 // free buffer chain starting from the second buffer
1187 int free_count = (b->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1188 u32 chain_to_free = b->next_buffer;
1190 u32 len = vlib_buffer_length_in_chain (vm, b);
1191 u32 free_len = buf_len - b->current_data - b->current_length;
1192 int alloc_len = clib_max (len - free_len, 0); //use the free len in the first buffer
1193 int n_buffers = (alloc_len + buf_len - 1) / buf_len;
1194 u32 new_buffers[n_buffers];
1196 u32 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1197 if (n_alloc != n_buffers)
1199 vlib_buffer_free_no_next (vm, new_buffers, n_alloc);
1203 vlib_buffer_t *s = b;
1204 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1206 s = vlib_get_buffer (vm, s->next_buffer);
1207 int d_free_len = buf_len - b->current_data - b->current_length;
1208 ASSERT (d_free_len >= 0);
1209 // chain buf and split write
1210 u32 copy_len = clib_min (d_free_len, s->current_length);
1211 u8 *d = vlib_buffer_put_uninit (b, copy_len);
1212 clib_memcpy (d, vlib_buffer_get_current (s), copy_len);
1213 int rest = s->current_length - copy_len;
1217 ASSERT (vlib_buffer_get_tail (b) == b->data + buf_len);
1218 ASSERT (n_buffers > 0);
1219 b = vlib_buffer_chain_buffer (vm, b, new_buffers[--n_buffers]);
1220 //make full use of the new buffers
1221 b->current_data = 0;
1222 d = vlib_buffer_put_uninit (b, rest);
1223 clib_memcpy (d, vlib_buffer_get_current (s) + copy_len, rest);
1226 vlib_buffer_free (vm, &chain_to_free, free_count);
1227 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1228 if (b == first) /* no buffers addeed */
1229 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1230 ASSERT (len == vlib_buffer_length_in_chain (vm, first));
1231 ASSERT (n_buffers == 0);
1232 return buf_len - b->current_data - b->current_length;
1235 #endif /* included_vlib_buffer_funcs_h */
1238 * fd.io coding-style-patch-verification: ON
1241 * eval: (c-set-style "gnu")