2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = &buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 static_always_inline void
68 vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
70 clib_memcpy_fast (dst, src, n_indices * sizeof (u32));
73 static_always_inline void
74 vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
76 clib_memcpy_fast (b, bt, STRUCT_OFFSET_OF (vlib_buffer_t, template_end));
79 /** \brief Translate array of buffer indices into buffer pointers with offset
81 @param vm - (vlib_main_t *) vlib main data structure pointer
82 @param bi - (u32 *) array of buffer indices
83 @param b - (void **) array to store buffer pointers
84 @param count - (uword) number of elements
85 @param offset - (i32) offset applied to each pointer
87 static_always_inline void
88 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
91 #ifdef CLIB_HAVE_VEC256
92 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
93 /* if count is not const, compiler will not unroll while loop
94 se we maintain two-in-parallel variant */
97 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
98 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
99 /* shift and add to get vlib_buffer_t pointer */
100 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
109 #ifdef CLIB_HAVE_VEC256
110 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
111 /* shift and add to get vlib_buffer_t pointer */
112 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
113 #elif defined (CLIB_HAVE_VEC128)
114 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
115 u32x4 bi4 = u32x4_load_unaligned (bi);
116 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
117 #if defined (__aarch64__)
118 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
120 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
121 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
123 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
124 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
126 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
127 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
128 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
129 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
137 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
144 /** \brief Translate array of buffer indices into buffer pointers
146 @param vm - (vlib_main_t *) vlib main data structure pointer
147 @param bi - (u32 *) array of buffer indices
148 @param b - (vlib_buffer_t **) array to store buffer pointers
149 @param count - (uword) number of elements
152 static_always_inline void
153 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
155 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
158 /** \brief Translate buffer pointer into buffer index
160 @param vm - (vlib_main_t *) vlib main data structure pointer
161 @param p - (void *) buffer pointer
162 @return - (u32) buffer index
166 vlib_get_buffer_index (vlib_main_t * vm, void *p)
168 vlib_buffer_main_t *bm = &buffer_main;
169 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
170 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
171 ASSERT (offset < bm->buffer_mem_size);
172 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
173 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
176 /** \brief Translate array of buffer pointers into buffer indices with offset
178 @param vm - (vlib_main_t *) vlib main data structure pointer
179 @param b - (void **) array of buffer pointers
180 @param bi - (u32 *) array to store buffer indices
181 @param count - (uword) number of elements
182 @param offset - (i32) offset applied to each pointer
184 static_always_inline void
185 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
186 uword count, i32 offset)
188 #ifdef CLIB_HAVE_VEC256
189 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
190 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
194 /* load 4 pointers into 256-bit register */
195 u64x4 v0 = u64x4_load_unaligned (b);
196 u64x4 v1 = u64x4_load_unaligned (b + 4);
202 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
203 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
205 /* permute 256-bit register so lower u32s of each buffer index are
206 * placed into lower 128-bits */
207 v2 = u32x8_permute ((u32x8) v0, mask);
208 v3 = u32x8_permute ((u32x8) v1, mask);
210 /* extract lower 128-bits and save them to the array of buffer indices */
211 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
212 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
220 /* equivalent non-nector implementation */
221 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
222 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
223 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
224 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
231 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
238 /** \brief Translate array of buffer pointers into buffer indices
240 @param vm - (vlib_main_t *) vlib main data structure pointer
241 @param b - (vlib_buffer_t **) array of buffer pointers
242 @param bi - (u32 *) array to store buffer indices
243 @param count - (uword) number of elements
245 static_always_inline void
246 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
249 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
252 /** \brief Get next buffer in buffer linklist, or zero for end of list.
254 @param vm - (vlib_main_t *) vlib main data structure pointer
255 @param b - (void *) buffer pointer
256 @return - (vlib_buffer_t *) next buffer, or NULL
258 always_inline vlib_buffer_t *
259 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
261 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
262 ? vlib_get_buffer (vm, b->next_buffer) : 0);
265 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
266 vlib_buffer_t * b_first);
268 /** \brief Get length in bytes of the buffer chain
270 @param vm - (vlib_main_t *) vlib main data structure pointer
271 @param b - (void *) buffer pointer
272 @return - (uword) length of buffer chain
275 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
277 uword len = b->current_length;
279 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
282 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
283 return len + b->total_length_not_including_first_buffer;
285 return vlib_buffer_length_in_chain_slow_path (vm, b);
288 /** \brief Get length in bytes of the buffer index buffer chain
290 @param vm - (vlib_main_t *) vlib main data structure pointer
291 @param bi - (u32) buffer index
292 @return - (uword) length of buffer chain
295 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
297 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
298 return vlib_buffer_length_in_chain (vm, b);
301 /** \brief Copy buffer contents to memory
303 @param vm - (vlib_main_t *) vlib main data structure pointer
304 @param buffer_index - (u32) buffer index
305 @param contents - (u8 *) memory, <strong>must be large enough</strong>
306 @return - (uword) length of buffer chain
309 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
311 uword content_len = 0;
317 b = vlib_get_buffer (vm, buffer_index);
318 l = b->current_length;
319 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
321 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
323 buffer_index = b->next_buffer;
330 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
332 return vlib_physmem_get_pa (vm, b->data);
336 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
338 return vlib_buffer_get_pa (vm, b) + b->current_data;
341 /** \brief Prefetch buffer metadata by buffer index
342 The first 64 bytes of buffer contains most header information
344 @param vm - (vlib_main_t *) vlib main data structure pointer
345 @param bi - (u32) buffer index
346 @param type - LOAD, STORE. In most cases, STORE is the right answer
348 /* Prefetch buffer header given index. */
349 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
351 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
352 vlib_prefetch_buffer_header (_b, type); \
357 /* Index is unknown. */
360 /* Index is known and free/allocated. */
361 VLIB_BUFFER_KNOWN_FREE,
362 VLIB_BUFFER_KNOWN_ALLOCATED,
363 } vlib_buffer_known_state_t;
365 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
367 vlib_buffer_known_state_t
370 always_inline vlib_buffer_known_state_t
371 vlib_buffer_is_known (u32 buffer_index)
373 vlib_buffer_main_t *bm = &buffer_main;
375 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
376 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
377 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
378 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
382 vlib_buffer_set_known_state (u32 buffer_index,
383 vlib_buffer_known_state_t state)
385 vlib_buffer_main_t *bm = &buffer_main;
387 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
388 hash_set (bm->buffer_known_hash, buffer_index, state);
389 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
392 /* Validates sanity of a single buffer.
393 Returns format'ed vector with error message if any. */
394 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
398 vlib_buffer_round_size (u32 size)
400 return round_pow2 (size, sizeof (vlib_buffer_t));
403 always_inline vlib_buffer_free_list_index_t
404 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
406 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
407 return b->free_list_index;
413 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
414 vlib_buffer_free_list_index_t index)
416 if (PREDICT_FALSE (index))
418 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
419 b->free_list_index = index;
422 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
425 /** \brief Allocate buffers from specific freelist into supplied array
427 @param vm - (vlib_main_t *) vlib main data structure pointer
428 @param buffers - (u32 * ) buffer index array
429 @param n_buffers - (u32) number of buffers requested
430 @return - (u32) number of buffers actually allocated, may be
431 less than the number requested or zero
434 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
437 vlib_buffer_free_list_index_t index)
439 vlib_buffer_main_t *bm = &buffer_main;
440 vlib_buffer_free_list_t *fl;
444 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
446 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
448 len = vec_len (fl->buffers);
450 if (PREDICT_FALSE (len < n_buffers))
452 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
453 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
456 /* even if fill free list didn't manage to refill free list
457 we should give what we have */
458 n_buffers = clib_min (len, n_buffers);
460 /* following code is intentionaly duplicated to allow compiler
461 to optimize fast path when n_buffers is constant value */
462 src = fl->buffers + len - n_buffers;
463 vlib_buffer_copy_indices (buffers, src, n_buffers);
464 _vec_len (fl->buffers) -= n_buffers;
466 /* Verify that buffers are known free. */
467 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
468 VLIB_BUFFER_KNOWN_FREE);
473 src = fl->buffers + len - n_buffers;
474 vlib_buffer_copy_indices (buffers, src, n_buffers);
475 _vec_len (fl->buffers) -= n_buffers;
477 /* Verify that buffers are known free. */
478 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
479 VLIB_BUFFER_KNOWN_FREE);
484 /** \brief Allocate buffers into supplied array
486 @param vm - (vlib_main_t *) vlib main data structure pointer
487 @param buffers - (u32 * ) buffer index array
488 @param n_buffers - (u32) number of buffers requested
489 @return - (u32) number of buffers actually allocated, may be
490 less than the number requested or zero
493 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
495 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
496 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
499 /** \brief Allocate buffers into ring
501 @param vm - (vlib_main_t *) vlib main data structure pointer
502 @param buffers - (u32 * ) buffer index ring
503 @param start - (u32) first slot in the ring
504 @param ring_size - (u32) ring size
505 @param n_buffers - (u32) number of buffers requested
506 @return - (u32) number of buffers actually allocated, may be
507 less than the number requested or zero
510 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
511 u32 ring_size, u32 n_buffers)
515 ASSERT (n_buffers <= ring_size);
517 if (PREDICT_TRUE (start + n_buffers <= ring_size))
518 return vlib_buffer_alloc (vm, ring + start, n_buffers);
520 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
522 if (PREDICT_TRUE (n_alloc == ring_size - start))
523 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
528 /** \brief Free buffers
529 Frees the entire buffer chain for each buffer
531 @param vm - (vlib_main_t *) vlib main data structure pointer
532 @param buffers - (u32 * ) buffer index array
533 @param n_buffers - (u32) number of buffers to free
537 vlib_buffer_free (vlib_main_t * vm,
538 /* pointer to first buffer */
540 /* number of buffers to free */
543 vlib_buffer_main_t *bm = &buffer_main;
545 ASSERT (bm->cb.vlib_buffer_free_cb);
547 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
550 /** \brief Free buffers, does not free the buffer chain for each buffer
552 @param vm - (vlib_main_t *) vlib main data structure pointer
553 @param buffers - (u32 * ) buffer index array
554 @param n_buffers - (u32) number of buffers to free
558 vlib_buffer_free_no_next (vlib_main_t * vm,
559 /* pointer to first buffer */
561 /* number of buffers to free */
564 vlib_buffer_main_t *bm = &buffer_main;
566 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
568 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
571 /** \brief Free one buffer
572 Shorthand to free a single buffer chain.
574 @param vm - (vlib_main_t *) vlib main data structure pointer
575 @param buffer_index - (u32) buffer index to free
578 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
580 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
583 /** \brief Free buffers from ring
585 @param vm - (vlib_main_t *) vlib main data structure pointer
586 @param buffers - (u32 * ) buffer index ring
587 @param start - (u32) first slot in the ring
588 @param ring_size - (u32) ring size
589 @param n_buffers - (u32) number of buffers
592 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
593 u32 ring_size, u32 n_buffers)
595 ASSERT (n_buffers <= ring_size);
597 if (PREDICT_TRUE (start + n_buffers <= ring_size))
599 vlib_buffer_free (vm, ring + start, n_buffers);
603 vlib_buffer_free (vm, ring + start, ring_size - start);
604 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
608 /** \brief Free buffers from ring without freeing tail buffers
610 @param vm - (vlib_main_t *) vlib main data structure pointer
611 @param buffers - (u32 * ) buffer index ring
612 @param start - (u32) first slot in the ring
613 @param ring_size - (u32) ring size
614 @param n_buffers - (u32) number of buffers
617 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
618 u32 ring_size, u32 n_buffers)
620 ASSERT (n_buffers <= ring_size);
622 if (PREDICT_TRUE (start + n_buffers <= ring_size))
624 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
628 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
629 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
633 /* Add/delete buffer free lists. */
634 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
638 vlib_buffer_delete_free_list (vlib_main_t * vm,
639 vlib_buffer_free_list_index_t free_list_index)
641 vlib_buffer_main_t *bm = &buffer_main;
643 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
645 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
648 /* Make sure we have at least given number of unaligned buffers. */
649 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
650 vlib_buffer_free_list_t *
652 uword n_unaligned_buffers);
654 always_inline vlib_buffer_free_list_t *
655 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
656 vlib_buffer_free_list_index_t * index)
658 vlib_buffer_free_list_index_t i;
660 *index = i = vlib_buffer_get_free_list_index (b);
661 return pool_elt_at_index (vm->buffer_free_list_pool, i);
664 always_inline vlib_buffer_free_list_t *
665 vlib_buffer_get_free_list (vlib_main_t * vm,
666 vlib_buffer_free_list_index_t free_list_index)
668 vlib_buffer_free_list_t *f;
670 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
672 /* Sanity: indices must match. */
673 ASSERT (f->index == free_list_index);
679 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
680 vlib_buffer_free_list_index_t index)
682 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
683 return f->n_data_bytes;
686 /* Append given data to end of buffer, possibly allocating new buffers. */
687 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
690 /* duplicate all buffers in chain */
691 always_inline vlib_buffer_t *
692 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
694 vlib_buffer_t *s, *d, *fd;
695 uword n_alloc, n_buffers = 1;
696 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
700 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
703 s = vlib_get_buffer (vm, s->next_buffer);
705 u32 new_buffers[n_buffers];
707 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
709 /* No guarantee that we'll get all the buffers we asked for */
710 if (PREDICT_FALSE (n_alloc < n_buffers))
713 vlib_buffer_free (vm, new_buffers, n_alloc);
719 fd = d = vlib_get_buffer (vm, new_buffers[0]);
720 d->current_data = s->current_data;
721 d->current_length = s->current_length;
722 d->flags = s->flags & flag_mask;
723 d->total_length_not_including_first_buffer =
724 s->total_length_not_including_first_buffer;
725 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
726 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
727 clib_memcpy_fast (vlib_buffer_get_current (d),
728 vlib_buffer_get_current (s), s->current_length);
731 for (i = 1; i < n_buffers; i++)
734 d->next_buffer = new_buffers[i];
736 s = vlib_get_buffer (vm, s->next_buffer);
737 d = vlib_get_buffer (vm, new_buffers[i]);
738 d->current_data = s->current_data;
739 d->current_length = s->current_length;
740 clib_memcpy_fast (vlib_buffer_get_current (d),
741 vlib_buffer_get_current (s), s->current_length);
742 d->flags = s->flags & flag_mask;
748 /** \brief Create a maximum of 256 clones of buffer and store them
749 in the supplied array
751 @param vm - (vlib_main_t *) vlib main data structure pointer
752 @param src_buffer - (u32) source buffer index
753 @param buffers - (u32 * ) buffer index array
754 @param n_buffers - (u16) number of buffer clones requested (<=256)
755 @param head_end_offset - (u16) offset relative to current position
756 where packet head ends
757 @return - (u16) number of buffers actually cloned, may be
758 less than the number requested or zero
761 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
762 u16 n_buffers, u16 head_end_offset)
765 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
767 ASSERT (s->n_add_refs == 0);
769 ASSERT (n_buffers <= 256);
771 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
773 buffers[0] = src_buffer;
774 for (i = 1; i < n_buffers; i++)
777 d = vlib_buffer_copy (vm, s);
780 buffers[i] = vlib_get_buffer_index (vm, d);
786 if (PREDICT_FALSE (n_buffers == 1))
788 buffers[0] = src_buffer;
792 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
793 vlib_buffer_get_free_list_index
796 for (i = 0; i < n_buffers; i++)
798 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
799 d->current_data = s->current_data;
800 d->current_length = head_end_offset;
801 vlib_buffer_set_free_list_index (d,
802 vlib_buffer_get_free_list_index (s));
804 d->total_length_not_including_first_buffer = s->current_length -
806 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
808 d->total_length_not_including_first_buffer +=
809 s->total_length_not_including_first_buffer;
811 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
812 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
813 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
814 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
815 clib_memcpy_fast (vlib_buffer_get_current (d),
816 vlib_buffer_get_current (s), head_end_offset);
817 d->next_buffer = src_buffer;
819 vlib_buffer_advance (s, head_end_offset);
820 s->n_add_refs = n_buffers - 1;
821 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
823 s = vlib_get_buffer (vm, s->next_buffer);
824 s->n_add_refs = n_buffers - 1;
830 /** \brief Create multiple clones of buffer and store them
831 in the supplied array
833 @param vm - (vlib_main_t *) vlib main data structure pointer
834 @param src_buffer - (u32) source buffer index
835 @param buffers - (u32 * ) buffer index array
836 @param n_buffers - (u16) number of buffer clones requested (<=256)
837 @param head_end_offset - (u16) offset relative to current position
838 where packet head ends
839 @return - (u16) number of buffers actually cloned, may be
840 less than the number requested or zero
843 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
844 u16 n_buffers, u16 head_end_offset)
846 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
849 while (n_buffers > 256)
852 copy = vlib_buffer_copy (vm, s);
853 n_cloned += vlib_buffer_clone_256 (vm,
854 vlib_get_buffer_index (vm, copy),
855 (buffers + n_cloned),
856 256, head_end_offset);
859 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
861 n_buffers, head_end_offset);
866 /** \brief Attach cloned tail to the buffer
868 @param vm - (vlib_main_t *) vlib main data structure pointer
869 @param head - (vlib_buffer_t *) head buffer
870 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
874 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
875 vlib_buffer_t * tail)
877 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
878 ASSERT (vlib_buffer_get_free_list_index (head) ==
879 vlib_buffer_get_free_list_index (tail));
881 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
882 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
883 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
884 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
885 head->next_buffer = vlib_get_buffer_index (vm, tail);
886 head->total_length_not_including_first_buffer = tail->current_length +
887 tail->total_length_not_including_first_buffer;
890 clib_atomic_add_fetch (&tail->n_add_refs, 1);
892 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
894 tail = vlib_get_buffer (vm, tail->next_buffer);
899 /* Initializes the buffer as an empty packet with no chained buffers. */
901 vlib_buffer_chain_init (vlib_buffer_t * first)
903 first->total_length_not_including_first_buffer = 0;
904 first->current_length = 0;
905 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
906 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
909 /* The provided next_bi buffer index is appended to the end of the packet. */
910 always_inline vlib_buffer_t *
911 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
913 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
914 last->next_buffer = next_bi;
915 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
916 next_buffer->current_length = 0;
917 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
921 /* Increases or decreases the packet length.
922 * It does not allocate or deallocate new buffers.
923 * Therefore, the added length must be compatible
924 * with the last buffer. */
926 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
927 vlib_buffer_t * last, i32 len)
929 last->current_length += len;
931 first->total_length_not_including_first_buffer += len;
934 /* Copy data to the end of the packet and increases its length.
935 * It does not allocate new buffers.
936 * Returns the number of copied bytes. */
938 vlib_buffer_chain_append_data (vlib_main_t * vm,
939 vlib_buffer_free_list_index_t free_list_index,
940 vlib_buffer_t * first,
941 vlib_buffer_t * last, void *data, u16 data_len)
944 vlib_buffer_free_list_buffer_size (vm, free_list_index);
945 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
946 u16 len = clib_min (data_len,
947 n_buffer_bytes - last->current_length -
949 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
951 vlib_buffer_chain_increase_length (first, last, len);
955 /* Copy data to the end of the packet and increases its length.
956 * Allocates additional buffers from the free list if necessary.
957 * Returns the number of copied bytes.
958 * 'last' value is modified whenever new buffers are allocated and
959 * chained and points to the last buffer in the chain. */
961 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
962 vlib_buffer_free_list_index_t
964 vlib_buffer_t * first,
965 vlib_buffer_t ** last, void *data,
967 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
969 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
970 format_vlib_buffer_contents;
974 /* Vector of packet data. */
977 /* Number of buffers to allocate in each call to allocator. */
978 u32 min_n_buffers_each_alloc;
980 /* Buffer free list for this template. */
981 vlib_buffer_free_list_index_t free_list_index;
986 } vlib_packet_template_t;
988 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
989 vlib_packet_template_t * t);
991 void vlib_packet_template_init (vlib_main_t * vm,
992 vlib_packet_template_t * t,
994 uword n_packet_data_bytes,
995 uword min_n_buffers_each_alloc,
998 void *vlib_packet_template_get_packet (vlib_main_t * vm,
999 vlib_packet_template_t * t,
1003 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1005 vec_free (t->packet_data);
1008 /* Set a buffer quickly into "uninitialized" state. We want this to
1009 be extremely cheap and arrange for all fields that need to be
1010 initialized to be in the first 128 bits of the buffer. */
1012 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1013 vlib_buffer_free_list_t * fl)
1015 vlib_buffer_t *src = &fl->buffer_init_template;
1017 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1018 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1019 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1020 CLIB_CACHE_LINE_BYTES);
1021 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1022 CLIB_CACHE_LINE_BYTES * 2);
1024 /* Make sure buffer template is sane. */
1025 vlib_buffer_copy_template (dst, src);
1027 /* Not in the first 16 octets. */
1028 dst->n_add_refs = src->n_add_refs;
1029 vlib_buffer_set_free_list_index (dst, fl->index);
1031 /* Make sure it really worked. */
1032 #define _(f) ASSERT (dst->f == src->f);
1037 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1038 /* total_length_not_including_first_buffer is not in the template anymore
1039 * so it may actually not zeroed for some buffers. One option is to
1040 * uncomment the line lower (comes at a cost), the other, is to just not
1042 /* dst->total_length_not_including_first_buffer = 0; */
1043 ASSERT (dst->n_add_refs == 0);
1047 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1048 vlib_buffer_free_list_t * f,
1049 u32 buffer_index, u8 do_init)
1051 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1053 b = vlib_get_buffer (vm, buffer_index);
1054 if (PREDICT_TRUE (do_init))
1055 vlib_buffer_init_for_free_list (b, f);
1056 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1058 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1060 clib_spinlock_lock (&bp->lock);
1061 /* keep last stored buffers, as they are more likely hot in the cache */
1062 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1063 CLIB_CACHE_LINE_BYTES);
1064 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1065 f->n_alloc -= VLIB_FRAME_SIZE;
1066 clib_spinlock_unlock (&bp->lock);
1071 extern u32 *vlib_buffer_state_validation_lock;
1072 extern uword *vlib_buffer_state_validation_hash;
1073 extern void *vlib_buffer_state_heap;
1077 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1083 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1085 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1088 p = hash_get (vlib_buffer_state_validation_hash, b);
1090 /* If we don't know about b, declare it to be in the expected state */
1093 hash_set (vlib_buffer_state_validation_hash, b, expected);
1097 if (p[0] != expected)
1099 void cj_stop (void);
1101 vlib_main_t *vm = &vlib_global_main;
1105 bi = vlib_get_buffer_index (vm, b);
1107 clib_mem_set_heap (oldheap);
1108 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1109 vlib_time_now (vm), bi,
1110 p[0] ? "busy" : "free", expected ? "busy" : "free");
1114 CLIB_MEMORY_BARRIER ();
1115 *vlib_buffer_state_validation_lock = 0;
1116 clib_mem_set_heap (oldheap);
1121 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1126 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1128 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1131 hash_set (vlib_buffer_state_validation_hash, b, expected);
1133 CLIB_MEMORY_BARRIER ();
1134 *vlib_buffer_state_validation_lock = 0;
1135 clib_mem_set_heap (oldheap);
1139 /** minimum data size of first buffer in a buffer chain */
1140 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1143 * @brief compress buffer chain in a way where the first buffer is at least
1144 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1146 * @param[in] vm - vlib_main
1147 * @param[in,out] first - first buffer in chain
1148 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1152 vlib_buffer_chain_compress (vlib_main_t * vm,
1153 vlib_buffer_t * first, u32 ** discard_vector)
1155 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1156 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1158 /* this is already big enough or not a chain */
1161 /* probe free list to find allocated buffer size to avoid overfill */
1162 vlib_buffer_free_list_index_t index;
1163 vlib_buffer_free_list_t *free_list =
1164 vlib_buffer_get_buffer_free_list (vm, first, &index);
1166 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1167 free_list->n_data_bytes -
1168 first->current_data);
1171 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1172 u32 need = want_first_size - first->current_length;
1173 u32 amount_to_copy = clib_min (need, second->current_length);
1174 clib_memcpy_fast (((u8 *) vlib_buffer_get_current (first)) +
1175 first->current_length,
1176 vlib_buffer_get_current (second), amount_to_copy);
1177 first->current_length += amount_to_copy;
1178 second->current_data += amount_to_copy;
1179 second->current_length -= amount_to_copy;
1180 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1182 first->total_length_not_including_first_buffer -= amount_to_copy;
1184 if (!second->current_length)
1186 vec_add1 (*discard_vector, first->next_buffer);
1187 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1189 first->next_buffer = second->next_buffer;
1193 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1195 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1198 while ((first->current_length < want_first_size) &&
1199 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1203 * @brief linearize buffer chain - the first buffer is filled, if needed,
1204 * buffers are allocated and filled, returns free space in last buffer or
1205 * negative on failure
1207 * @param[in] vm - vlib_main
1208 * @param[in,out] first - first buffer in chain
1211 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * first)
1213 vlib_buffer_t *b = first;
1214 vlib_buffer_free_list_t *fl =
1215 vlib_buffer_get_free_list (vm, vlib_buffer_get_free_list_index (b));
1216 u32 buf_len = fl->n_data_bytes;
1217 // free buffer chain starting from the second buffer
1218 int free_count = (b->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1219 u32 chain_to_free = b->next_buffer;
1221 u32 len = vlib_buffer_length_in_chain (vm, b);
1222 u32 free_len = buf_len - b->current_data - b->current_length;
1223 int alloc_len = clib_max (len - free_len, 0); //use the free len in the first buffer
1224 int n_buffers = (alloc_len + buf_len - 1) / buf_len;
1225 u32 new_buffers[n_buffers];
1227 u32 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1228 if (n_alloc != n_buffers)
1230 vlib_buffer_free_no_next (vm, new_buffers, n_alloc);
1234 vlib_buffer_t *s = b;
1235 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1237 s = vlib_get_buffer (vm, s->next_buffer);
1238 int d_free_len = buf_len - b->current_data - b->current_length;
1239 ASSERT (d_free_len >= 0);
1240 // chain buf and split write
1241 u32 copy_len = clib_min (d_free_len, s->current_length);
1242 u8 *d = vlib_buffer_put_uninit (b, copy_len);
1243 clib_memcpy (d, vlib_buffer_get_current (s), copy_len);
1244 int rest = s->current_length - copy_len;
1248 ASSERT (vlib_buffer_get_tail (b) == b->data + buf_len);
1249 ASSERT (n_buffers > 0);
1250 b = vlib_buffer_chain_buffer (vm, b, new_buffers[--n_buffers]);
1251 //make full use of the new buffers
1252 b->current_data = 0;
1253 d = vlib_buffer_put_uninit (b, rest);
1254 clib_memcpy (d, vlib_buffer_get_current (s) + copy_len, rest);
1257 vlib_buffer_free (vm, &chain_to_free, free_count);
1258 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1259 if (b == first) /* no buffers addeed */
1260 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1261 ASSERT (len == vlib_buffer_length_in_chain (vm, first));
1262 ASSERT (n_buffers == 0);
1263 return buf_len - b->current_data - b->current_length;
1266 #endif /* included_vlib_buffer_funcs_h */
1269 * fd.io coding-style-patch-verification: ON
1272 * eval: (c-set-style "gnu")