2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = &buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 static_always_inline void
68 vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
70 clib_memcpy_fast (dst, src, n_indices * sizeof (u32));
73 static_always_inline void
74 vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
76 clib_memcpy_fast (b, bt, STRUCT_OFFSET_OF (vlib_buffer_t, template_end));
79 /** \brief Translate array of buffer indices into buffer pointers with offset
81 @param vm - (vlib_main_t *) vlib main data structure pointer
82 @param bi - (u32 *) array of buffer indices
83 @param b - (void **) array to store buffer pointers
84 @param count - (uword) number of elements
85 @param offset - (i32) offset applied to each pointer
87 static_always_inline void
88 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
91 #ifdef CLIB_HAVE_VEC256
92 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
93 /* if count is not const, compiler will not unroll while loop
94 se we maintain two-in-parallel variant */
97 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
98 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
99 /* shift and add to get vlib_buffer_t pointer */
100 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
109 #ifdef CLIB_HAVE_VEC256
110 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
111 /* shift and add to get vlib_buffer_t pointer */
112 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
113 #elif defined (CLIB_HAVE_VEC128)
114 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
115 u32x4 bi4 = u32x4_load_unaligned (bi);
116 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
117 #if defined (__aarch64__)
118 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
120 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
121 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
123 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
124 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
126 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
127 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
128 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
129 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
137 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
144 /** \brief Translate array of buffer indices into buffer pointers
146 @param vm - (vlib_main_t *) vlib main data structure pointer
147 @param bi - (u32 *) array of buffer indices
148 @param b - (vlib_buffer_t **) array to store buffer pointers
149 @param count - (uword) number of elements
152 static_always_inline void
153 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
155 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
158 /** \brief Translate buffer pointer into buffer index
160 @param vm - (vlib_main_t *) vlib main data structure pointer
161 @param p - (void *) buffer pointer
162 @return - (u32) buffer index
166 vlib_get_buffer_index (vlib_main_t * vm, void *p)
168 vlib_buffer_main_t *bm = &buffer_main;
169 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
170 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
171 ASSERT (offset < bm->buffer_mem_size);
172 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
173 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
176 /** \brief Translate array of buffer pointers into buffer indices with offset
178 @param vm - (vlib_main_t *) vlib main data structure pointer
179 @param b - (void **) array of buffer pointers
180 @param bi - (u32 *) array to store buffer indices
181 @param count - (uword) number of elements
182 @param offset - (i32) offset applied to each pointer
184 static_always_inline void
185 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
186 uword count, i32 offset)
188 #ifdef CLIB_HAVE_VEC256
189 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
190 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
194 /* load 4 pointers into 256-bit register */
195 u64x4 v0 = u64x4_load_unaligned (b);
196 u64x4 v1 = u64x4_load_unaligned (b + 4);
202 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
203 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
205 /* permute 256-bit register so lower u32s of each buffer index are
206 * placed into lower 128-bits */
207 v2 = u32x8_permute ((u32x8) v0, mask);
208 v3 = u32x8_permute ((u32x8) v1, mask);
210 /* extract lower 128-bits and save them to the array of buffer indices */
211 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
212 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
220 /* equivalent non-nector implementation */
221 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
222 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
223 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
224 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
231 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
238 /** \brief Translate array of buffer pointers into buffer indices
240 @param vm - (vlib_main_t *) vlib main data structure pointer
241 @param b - (vlib_buffer_t **) array of buffer pointers
242 @param bi - (u32 *) array to store buffer indices
243 @param count - (uword) number of elements
245 static_always_inline void
246 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
249 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
252 /** \brief Get next buffer in buffer linklist, or zero for end of list.
254 @param vm - (vlib_main_t *) vlib main data structure pointer
255 @param b - (void *) buffer pointer
256 @return - (vlib_buffer_t *) next buffer, or NULL
258 always_inline vlib_buffer_t *
259 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
261 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
262 ? vlib_get_buffer (vm, b->next_buffer) : 0);
265 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
266 vlib_buffer_t * b_first);
268 /** \brief Get length in bytes of the buffer chain
270 @param vm - (vlib_main_t *) vlib main data structure pointer
271 @param b - (void *) buffer pointer
272 @return - (uword) length of buffer chain
275 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
277 uword len = b->current_length;
279 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
282 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
283 return len + b->total_length_not_including_first_buffer;
285 return vlib_buffer_length_in_chain_slow_path (vm, b);
288 /** \brief Get length in bytes of the buffer index buffer chain
290 @param vm - (vlib_main_t *) vlib main data structure pointer
291 @param bi - (u32) buffer index
292 @return - (uword) length of buffer chain
295 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
297 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
298 return vlib_buffer_length_in_chain (vm, b);
301 /** \brief Copy buffer contents to memory
303 @param vm - (vlib_main_t *) vlib main data structure pointer
304 @param buffer_index - (u32) buffer index
305 @param contents - (u8 *) memory, <strong>must be large enough</strong>
306 @return - (uword) length of buffer chain
309 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
311 uword content_len = 0;
317 b = vlib_get_buffer (vm, buffer_index);
318 l = b->current_length;
319 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
321 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
323 buffer_index = b->next_buffer;
330 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
332 return vlib_physmem_get_pa (vm, b->data);
336 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
338 return vlib_buffer_get_pa (vm, b) + b->current_data;
341 /** \brief Prefetch buffer metadata by buffer index
342 The first 64 bytes of buffer contains most header information
344 @param vm - (vlib_main_t *) vlib main data structure pointer
345 @param bi - (u32) buffer index
346 @param type - LOAD, STORE. In most cases, STORE is the right answer
348 /* Prefetch buffer header given index. */
349 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
351 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
352 vlib_prefetch_buffer_header (_b, type); \
357 /* Index is unknown. */
360 /* Index is known and free/allocated. */
361 VLIB_BUFFER_KNOWN_FREE,
362 VLIB_BUFFER_KNOWN_ALLOCATED,
363 } vlib_buffer_known_state_t;
365 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
367 vlib_buffer_known_state_t
370 always_inline vlib_buffer_known_state_t
371 vlib_buffer_is_known (u32 buffer_index)
373 vlib_buffer_main_t *bm = &buffer_main;
375 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
376 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
377 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
378 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
382 vlib_buffer_set_known_state (u32 buffer_index,
383 vlib_buffer_known_state_t state)
385 vlib_buffer_main_t *bm = &buffer_main;
387 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
388 hash_set (bm->buffer_known_hash, buffer_index, state);
389 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
392 /* Validates sanity of a single buffer.
393 Returns format'ed vector with error message if any. */
394 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
398 vlib_buffer_round_size (u32 size)
400 return round_pow2 (size, sizeof (vlib_buffer_t));
403 always_inline vlib_buffer_free_list_index_t
404 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
406 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
407 return b->free_list_index;
413 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
414 vlib_buffer_free_list_index_t index)
416 if (PREDICT_FALSE (index))
418 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
419 b->free_list_index = index;
422 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
425 /** \brief Allocate buffers from specific freelist into supplied array
427 @param vm - (vlib_main_t *) vlib main data structure pointer
428 @param buffers - (u32 * ) buffer index array
429 @param n_buffers - (u32) number of buffers requested
430 @return - (u32) number of buffers actually allocated, may be
431 less than the number requested or zero
434 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
437 vlib_buffer_free_list_index_t index)
439 vlib_buffer_main_t *bm = &buffer_main;
440 vlib_buffer_free_list_t *fl;
444 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
446 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
448 len = vec_len (fl->buffers);
450 if (PREDICT_FALSE (len < n_buffers))
452 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
453 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
456 /* even if fill free list didn't manage to refill free list
457 we should give what we have */
458 n_buffers = clib_min (len, n_buffers);
460 /* following code is intentionaly duplicated to allow compiler
461 to optimize fast path when n_buffers is constant value */
462 src = fl->buffers + len - n_buffers;
463 vlib_buffer_copy_indices (buffers, src, n_buffers);
464 _vec_len (fl->buffers) -= n_buffers;
466 /* Verify that buffers are known free. */
467 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
468 VLIB_BUFFER_KNOWN_FREE);
473 src = fl->buffers + len - n_buffers;
474 vlib_buffer_copy_indices (buffers, src, n_buffers);
475 _vec_len (fl->buffers) -= n_buffers;
477 /* Verify that buffers are known free. */
478 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
479 VLIB_BUFFER_KNOWN_FREE);
484 /** \brief Allocate buffers into supplied array
486 @param vm - (vlib_main_t *) vlib main data structure pointer
487 @param buffers - (u32 * ) buffer index array
488 @param n_buffers - (u32) number of buffers requested
489 @return - (u32) number of buffers actually allocated, may be
490 less than the number requested or zero
493 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
495 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
496 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
499 /** \brief Allocate buffers into ring
501 @param vm - (vlib_main_t *) vlib main data structure pointer
502 @param buffers - (u32 * ) buffer index ring
503 @param start - (u32) first slot in the ring
504 @param ring_size - (u32) ring size
505 @param n_buffers - (u32) number of buffers requested
506 @return - (u32) number of buffers actually allocated, may be
507 less than the number requested or zero
510 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
511 u32 ring_size, u32 n_buffers)
515 ASSERT (n_buffers <= ring_size);
517 if (PREDICT_TRUE (start + n_buffers <= ring_size))
518 return vlib_buffer_alloc (vm, ring + start, n_buffers);
520 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
522 if (PREDICT_TRUE (n_alloc == ring_size - start))
523 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
528 /** \brief Free buffers
529 Frees the entire buffer chain for each buffer
531 @param vm - (vlib_main_t *) vlib main data structure pointer
532 @param buffers - (u32 * ) buffer index array
533 @param n_buffers - (u32) number of buffers to free
537 vlib_buffer_free (vlib_main_t * vm,
538 /* pointer to first buffer */
540 /* number of buffers to free */
543 vlib_buffer_main_t *bm = &buffer_main;
545 ASSERT (bm->cb.vlib_buffer_free_cb);
547 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
550 /** \brief Free buffers, does not free the buffer chain for each buffer
552 @param vm - (vlib_main_t *) vlib main data structure pointer
553 @param buffers - (u32 * ) buffer index array
554 @param n_buffers - (u32) number of buffers to free
558 vlib_buffer_free_no_next (vlib_main_t * vm,
559 /* pointer to first buffer */
561 /* number of buffers to free */
564 vlib_buffer_main_t *bm = &buffer_main;
566 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
568 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
571 /** \brief Free one buffer
572 Shorthand to free a single buffer chain.
574 @param vm - (vlib_main_t *) vlib main data structure pointer
575 @param buffer_index - (u32) buffer index to free
578 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
580 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
583 /** \brief Free buffers from ring
585 @param vm - (vlib_main_t *) vlib main data structure pointer
586 @param buffers - (u32 * ) buffer index ring
587 @param start - (u32) first slot in the ring
588 @param ring_size - (u32) ring size
589 @param n_buffers - (u32) number of buffers
592 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
593 u32 ring_size, u32 n_buffers)
595 ASSERT (n_buffers <= ring_size);
597 if (PREDICT_TRUE (start + n_buffers <= ring_size))
599 vlib_buffer_free (vm, ring + start, n_buffers);
603 vlib_buffer_free (vm, ring + start, ring_size - start);
604 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
608 /** \brief Free buffers from ring without freeing tail buffers
610 @param vm - (vlib_main_t *) vlib main data structure pointer
611 @param buffers - (u32 * ) buffer index ring
612 @param start - (u32) first slot in the ring
613 @param ring_size - (u32) ring size
614 @param n_buffers - (u32) number of buffers
617 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
618 u32 ring_size, u32 n_buffers)
620 ASSERT (n_buffers <= ring_size);
622 if (PREDICT_TRUE (start + n_buffers <= ring_size))
624 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
628 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
629 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
633 /* Add/delete buffer free lists. */
634 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
638 vlib_buffer_delete_free_list (vlib_main_t * vm,
639 vlib_buffer_free_list_index_t free_list_index)
641 vlib_buffer_main_t *bm = &buffer_main;
643 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
645 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
648 /* Make sure we have at least given number of unaligned buffers. */
649 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
650 vlib_buffer_free_list_t *
652 uword n_unaligned_buffers);
654 always_inline vlib_buffer_free_list_t *
655 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
656 vlib_buffer_free_list_index_t * index)
658 vlib_buffer_free_list_index_t i;
660 *index = i = vlib_buffer_get_free_list_index (b);
661 return pool_elt_at_index (vm->buffer_free_list_pool, i);
664 always_inline vlib_buffer_free_list_t *
665 vlib_buffer_get_free_list (vlib_main_t * vm,
666 vlib_buffer_free_list_index_t free_list_index)
668 vlib_buffer_free_list_t *f;
670 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
672 /* Sanity: indices must match. */
673 ASSERT (f->index == free_list_index);
679 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
680 vlib_buffer_free_list_index_t index)
682 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
683 return f->n_data_bytes;
686 /* Append given data to end of buffer, possibly allocating new buffers. */
687 int vlib_buffer_add_data (vlib_main_t * vm,
688 vlib_buffer_free_list_index_t free_list_index,
689 u32 * buffer_index, void *data, u32 n_data_bytes);
691 /* duplicate all buffers in chain */
692 always_inline vlib_buffer_t *
693 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
695 vlib_buffer_t *s, *d, *fd;
696 uword n_alloc, n_buffers = 1;
697 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
701 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
704 s = vlib_get_buffer (vm, s->next_buffer);
706 u32 new_buffers[n_buffers];
708 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
710 /* No guarantee that we'll get all the buffers we asked for */
711 if (PREDICT_FALSE (n_alloc < n_buffers))
714 vlib_buffer_free (vm, new_buffers, n_alloc);
720 fd = d = vlib_get_buffer (vm, new_buffers[0]);
721 d->current_data = s->current_data;
722 d->current_length = s->current_length;
723 d->flags = s->flags & flag_mask;
724 d->total_length_not_including_first_buffer =
725 s->total_length_not_including_first_buffer;
726 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
727 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
728 clib_memcpy_fast (vlib_buffer_get_current (d),
729 vlib_buffer_get_current (s), s->current_length);
732 for (i = 1; i < n_buffers; i++)
735 d->next_buffer = new_buffers[i];
737 s = vlib_get_buffer (vm, s->next_buffer);
738 d = vlib_get_buffer (vm, new_buffers[i]);
739 d->current_data = s->current_data;
740 d->current_length = s->current_length;
741 clib_memcpy_fast (vlib_buffer_get_current (d),
742 vlib_buffer_get_current (s), s->current_length);
743 d->flags = s->flags & flag_mask;
749 /** \brief Create a maximum of 256 clones of buffer and store them
750 in the supplied array
752 @param vm - (vlib_main_t *) vlib main data structure pointer
753 @param src_buffer - (u32) source buffer index
754 @param buffers - (u32 * ) buffer index array
755 @param n_buffers - (u16) number of buffer clones requested (<=256)
756 @param head_end_offset - (u16) offset relative to current position
757 where packet head ends
758 @return - (u16) number of buffers actually cloned, may be
759 less than the number requested or zero
762 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
763 u16 n_buffers, u16 head_end_offset)
766 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
768 ASSERT (s->n_add_refs == 0);
770 ASSERT (n_buffers <= 256);
772 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
774 buffers[0] = src_buffer;
775 for (i = 1; i < n_buffers; i++)
778 d = vlib_buffer_copy (vm, s);
781 buffers[i] = vlib_get_buffer_index (vm, d);
787 if (PREDICT_FALSE (n_buffers == 1))
789 buffers[0] = src_buffer;
793 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
794 vlib_buffer_get_free_list_index
797 for (i = 0; i < n_buffers; i++)
799 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
800 d->current_data = s->current_data;
801 d->current_length = head_end_offset;
802 vlib_buffer_set_free_list_index (d,
803 vlib_buffer_get_free_list_index (s));
805 d->total_length_not_including_first_buffer = s->current_length -
807 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
809 d->total_length_not_including_first_buffer +=
810 s->total_length_not_including_first_buffer;
812 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
813 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
814 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
815 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
816 clib_memcpy_fast (vlib_buffer_get_current (d),
817 vlib_buffer_get_current (s), head_end_offset);
818 d->next_buffer = src_buffer;
820 vlib_buffer_advance (s, head_end_offset);
821 s->n_add_refs = n_buffers - 1;
822 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
824 s = vlib_get_buffer (vm, s->next_buffer);
825 s->n_add_refs = n_buffers - 1;
831 /** \brief Create multiple clones of buffer and store them
832 in the supplied array
834 @param vm - (vlib_main_t *) vlib main data structure pointer
835 @param src_buffer - (u32) source buffer index
836 @param buffers - (u32 * ) buffer index array
837 @param n_buffers - (u16) number of buffer clones requested (<=256)
838 @param head_end_offset - (u16) offset relative to current position
839 where packet head ends
840 @return - (u16) number of buffers actually cloned, may be
841 less than the number requested or zero
844 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
845 u16 n_buffers, u16 head_end_offset)
847 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
850 while (n_buffers > 256)
853 copy = vlib_buffer_copy (vm, s);
854 n_cloned += vlib_buffer_clone_256 (vm,
855 vlib_get_buffer_index (vm, copy),
856 (buffers + n_cloned),
857 256, head_end_offset);
860 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
862 n_buffers, head_end_offset);
867 /** \brief Attach cloned tail to the buffer
869 @param vm - (vlib_main_t *) vlib main data structure pointer
870 @param head - (vlib_buffer_t *) head buffer
871 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
875 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
876 vlib_buffer_t * tail)
878 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
879 ASSERT (vlib_buffer_get_free_list_index (head) ==
880 vlib_buffer_get_free_list_index (tail));
882 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
883 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
884 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
885 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
886 head->next_buffer = vlib_get_buffer_index (vm, tail);
887 head->total_length_not_including_first_buffer = tail->current_length +
888 tail->total_length_not_including_first_buffer;
891 clib_atomic_add_fetch (&tail->n_add_refs, 1);
893 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
895 tail = vlib_get_buffer (vm, tail->next_buffer);
900 /* Initializes the buffer as an empty packet with no chained buffers. */
902 vlib_buffer_chain_init (vlib_buffer_t * first)
904 first->total_length_not_including_first_buffer = 0;
905 first->current_length = 0;
906 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
907 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
910 /* The provided next_bi buffer index is appended to the end of the packet. */
911 always_inline vlib_buffer_t *
912 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
914 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
915 last->next_buffer = next_bi;
916 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
917 next_buffer->current_length = 0;
918 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
922 /* Increases or decreases the packet length.
923 * It does not allocate or deallocate new buffers.
924 * Therefore, the added length must be compatible
925 * with the last buffer. */
927 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
928 vlib_buffer_t * last, i32 len)
930 last->current_length += len;
932 first->total_length_not_including_first_buffer += len;
935 /* Copy data to the end of the packet and increases its length.
936 * It does not allocate new buffers.
937 * Returns the number of copied bytes. */
939 vlib_buffer_chain_append_data (vlib_main_t * vm,
940 vlib_buffer_free_list_index_t free_list_index,
941 vlib_buffer_t * first,
942 vlib_buffer_t * last, void *data, u16 data_len)
945 vlib_buffer_free_list_buffer_size (vm, free_list_index);
946 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
947 u16 len = clib_min (data_len,
948 n_buffer_bytes - last->current_length -
950 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
952 vlib_buffer_chain_increase_length (first, last, len);
956 /* Copy data to the end of the packet and increases its length.
957 * Allocates additional buffers from the free list if necessary.
958 * Returns the number of copied bytes.
959 * 'last' value is modified whenever new buffers are allocated and
960 * chained and points to the last buffer in the chain. */
962 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
963 vlib_buffer_free_list_index_t
965 vlib_buffer_t * first,
966 vlib_buffer_t ** last, void *data,
968 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
970 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
971 format_vlib_buffer_contents;
975 /* Vector of packet data. */
978 /* Number of buffers to allocate in each call to allocator. */
979 u32 min_n_buffers_each_alloc;
981 /* Buffer free list for this template. */
982 vlib_buffer_free_list_index_t free_list_index;
987 } vlib_packet_template_t;
989 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
990 vlib_packet_template_t * t);
992 void vlib_packet_template_init (vlib_main_t * vm,
993 vlib_packet_template_t * t,
995 uword n_packet_data_bytes,
996 uword min_n_buffers_each_alloc,
999 void *vlib_packet_template_get_packet (vlib_main_t * vm,
1000 vlib_packet_template_t * t,
1004 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1006 vec_free (t->packet_data);
1009 /* Set a buffer quickly into "uninitialized" state. We want this to
1010 be extremely cheap and arrange for all fields that need to be
1011 initialized to be in the first 128 bits of the buffer. */
1013 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1014 vlib_buffer_free_list_t * fl)
1016 vlib_buffer_t *src = &fl->buffer_init_template;
1018 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1019 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1020 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1021 CLIB_CACHE_LINE_BYTES);
1022 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1023 CLIB_CACHE_LINE_BYTES * 2);
1025 /* Make sure buffer template is sane. */
1026 vlib_buffer_copy_template (dst, src);
1028 /* Not in the first 16 octets. */
1029 dst->n_add_refs = src->n_add_refs;
1030 vlib_buffer_set_free_list_index (dst, fl->index);
1032 /* Make sure it really worked. */
1033 #define _(f) ASSERT (dst->f == src->f);
1038 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1039 /* total_length_not_including_first_buffer is not in the template anymore
1040 * so it may actually not zeroed for some buffers. One option is to
1041 * uncomment the line lower (comes at a cost), the other, is to just not
1043 /* dst->total_length_not_including_first_buffer = 0; */
1044 ASSERT (dst->n_add_refs == 0);
1048 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1049 vlib_buffer_free_list_t * f,
1050 u32 buffer_index, u8 do_init)
1052 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1054 b = vlib_get_buffer (vm, buffer_index);
1055 if (PREDICT_TRUE (do_init))
1056 vlib_buffer_init_for_free_list (b, f);
1057 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1059 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1061 clib_spinlock_lock (&bp->lock);
1062 /* keep last stored buffers, as they are more likely hot in the cache */
1063 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1064 CLIB_CACHE_LINE_BYTES);
1065 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1066 f->n_alloc -= VLIB_FRAME_SIZE;
1067 clib_spinlock_unlock (&bp->lock);
1072 extern u32 *vlib_buffer_state_validation_lock;
1073 extern uword *vlib_buffer_state_validation_hash;
1074 extern void *vlib_buffer_state_heap;
1078 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1084 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1086 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1089 p = hash_get (vlib_buffer_state_validation_hash, b);
1091 /* If we don't know about b, declare it to be in the expected state */
1094 hash_set (vlib_buffer_state_validation_hash, b, expected);
1098 if (p[0] != expected)
1100 void cj_stop (void);
1102 vlib_main_t *vm = &vlib_global_main;
1106 bi = vlib_get_buffer_index (vm, b);
1108 clib_mem_set_heap (oldheap);
1109 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1110 vlib_time_now (vm), bi,
1111 p[0] ? "busy" : "free", expected ? "busy" : "free");
1115 CLIB_MEMORY_BARRIER ();
1116 *vlib_buffer_state_validation_lock = 0;
1117 clib_mem_set_heap (oldheap);
1122 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1127 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1129 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1132 hash_set (vlib_buffer_state_validation_hash, b, expected);
1134 CLIB_MEMORY_BARRIER ();
1135 *vlib_buffer_state_validation_lock = 0;
1136 clib_mem_set_heap (oldheap);
1140 /** minimum data size of first buffer in a buffer chain */
1141 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1144 * @brief compress buffer chain in a way where the first buffer is at least
1145 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1147 * @param[in] vm - vlib_main
1148 * @param[in,out] first - first buffer in chain
1149 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1153 vlib_buffer_chain_compress (vlib_main_t * vm,
1154 vlib_buffer_t * first, u32 ** discard_vector)
1156 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1157 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1159 /* this is already big enough or not a chain */
1162 /* probe free list to find allocated buffer size to avoid overfill */
1163 vlib_buffer_free_list_index_t index;
1164 vlib_buffer_free_list_t *free_list =
1165 vlib_buffer_get_buffer_free_list (vm, first, &index);
1167 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1168 free_list->n_data_bytes -
1169 first->current_data);
1172 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1173 u32 need = want_first_size - first->current_length;
1174 u32 amount_to_copy = clib_min (need, second->current_length);
1175 clib_memcpy_fast (((u8 *) vlib_buffer_get_current (first)) +
1176 first->current_length,
1177 vlib_buffer_get_current (second), amount_to_copy);
1178 first->current_length += amount_to_copy;
1179 second->current_data += amount_to_copy;
1180 second->current_length -= amount_to_copy;
1181 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1183 first->total_length_not_including_first_buffer -= amount_to_copy;
1185 if (!second->current_length)
1187 vec_add1 (*discard_vector, first->next_buffer);
1188 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1190 first->next_buffer = second->next_buffer;
1194 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1196 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1199 while ((first->current_length < want_first_size) &&
1200 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1204 * @brief linearize buffer chain - the first buffer is filled, if needed,
1205 * buffers are allocated and filled, returns free space in last buffer or
1206 * negative on failure
1208 * @param[in] vm - vlib_main
1209 * @param[in,out] first - first buffer in chain
1212 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * first)
1214 vlib_buffer_t *b = first;
1215 vlib_buffer_free_list_t *fl =
1216 vlib_buffer_get_free_list (vm, vlib_buffer_get_free_list_index (b));
1217 u32 buf_len = fl->n_data_bytes;
1218 // free buffer chain starting from the second buffer
1219 int free_count = (b->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1220 u32 chain_to_free = b->next_buffer;
1222 u32 len = vlib_buffer_length_in_chain (vm, b);
1223 u32 free_len = buf_len - b->current_data - b->current_length;
1224 int alloc_len = clib_max (len - free_len, 0); //use the free len in the first buffer
1225 int n_buffers = (alloc_len + buf_len - 1) / buf_len;
1226 u32 new_buffers[n_buffers];
1228 u32 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1229 if (n_alloc != n_buffers)
1231 vlib_buffer_free_no_next (vm, new_buffers, n_alloc);
1235 vlib_buffer_t *s = b;
1236 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1238 s = vlib_get_buffer (vm, s->next_buffer);
1239 int d_free_len = buf_len - b->current_data - b->current_length;
1240 ASSERT (d_free_len >= 0);
1241 // chain buf and split write
1242 u32 copy_len = clib_min (d_free_len, s->current_length);
1243 u8 *d = vlib_buffer_put_uninit (b, copy_len);
1244 clib_memcpy (d, vlib_buffer_get_current (s), copy_len);
1245 int rest = s->current_length - copy_len;
1249 ASSERT (vlib_buffer_get_tail (b) == b->data + buf_len);
1250 ASSERT (n_buffers > 0);
1251 b = vlib_buffer_chain_buffer (vm, b, new_buffers[--n_buffers]);
1252 //make full use of the new buffers
1253 b->current_data = 0;
1254 d = vlib_buffer_put_uninit (b, rest);
1255 clib_memcpy (d, vlib_buffer_get_current (s) + copy_len, rest);
1258 vlib_buffer_free (vm, &chain_to_free, free_count);
1259 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1260 if (b == first) /* no buffers addeed */
1261 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1262 ASSERT (len == vlib_buffer_length_in_chain (vm, first));
1263 ASSERT (n_buffers == 0);
1264 return buf_len - b->current_data - b->current_length;
1267 #endif /* included_vlib_buffer_funcs_h */
1270 * fd.io coding-style-patch-verification: ON
1273 * eval: (c-set-style "gnu")