2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = &buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 /** \brief Translate array of buffer indices into buffer pointers with offset
69 @param vm - (vlib_main_t *) vlib main data structure pointer
70 @param bi - (u32 *) array of buffer indices
71 @param b - (void **) array to store buffer pointers
72 @param count - (uword) number of elements
73 @param offset - (i32) offset applied to each pointer
75 static_always_inline void
76 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
79 #ifdef CLIB_HAVE_VEC256
80 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
81 /* if count is not const, compiler will not unroll while loop
82 se we maintain two-in-parallel variant */
85 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
86 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
87 /* shift and add to get vlib_buffer_t pointer */
88 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
89 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
97 #ifdef CLIB_HAVE_VEC256
98 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
99 /* shift and add to get vlib_buffer_t pointer */
100 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 #elif defined (CLIB_HAVE_VEC128)
102 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
103 u32x4 bi4 = u32x4_load_unaligned (bi);
104 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
105 #if defined (__aarch64__)
106 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
108 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
109 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
111 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
112 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
114 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
115 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
116 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
117 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
125 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
132 /** \brief Translate array of buffer indices into buffer pointers
134 @param vm - (vlib_main_t *) vlib main data structure pointer
135 @param bi - (u32 *) array of buffer indices
136 @param b - (vlib_buffer_t **) array to store buffer pointers
137 @param count - (uword) number of elements
140 static_always_inline void
141 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
143 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
146 /** \brief Translate buffer pointer into buffer index
148 @param vm - (vlib_main_t *) vlib main data structure pointer
149 @param p - (void *) buffer pointer
150 @return - (u32) buffer index
154 vlib_get_buffer_index (vlib_main_t * vm, void *p)
156 vlib_buffer_main_t *bm = &buffer_main;
157 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
158 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
159 ASSERT (offset < bm->buffer_mem_size);
160 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
161 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
164 /** \brief Translate array of buffer pointers into buffer indices with offset
166 @param vm - (vlib_main_t *) vlib main data structure pointer
167 @param b - (void **) array of buffer pointers
168 @param bi - (u32 *) array to store buffer indices
169 @param count - (uword) number of elements
170 @param offset - (i32) offset applied to each pointer
172 static_always_inline void
173 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
174 uword count, i32 offset)
176 #ifdef CLIB_HAVE_VEC256
177 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
178 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
182 /* load 4 pointers into 256-bit register */
183 u64x4 v0 = u64x4_load_unaligned (b);
184 u64x4 v1 = u64x4_load_unaligned (b + 4);
190 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
191 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
193 /* permute 256-bit register so lower u32s of each buffer index are
194 * placed into lower 128-bits */
195 v2 = u32x8_permute ((u32x8) v0, mask);
196 v3 = u32x8_permute ((u32x8) v1, mask);
198 /* extract lower 128-bits and save them to the array of buffer indices */
199 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
200 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
208 /* equivalent non-nector implementation */
209 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
210 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
211 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
212 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
219 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
226 /** \brief Translate array of buffer pointers into buffer indices
228 @param vm - (vlib_main_t *) vlib main data structure pointer
229 @param b - (vlib_buffer_t **) array of buffer pointers
230 @param bi - (u32 *) array to store buffer indices
231 @param count - (uword) number of elements
233 static_always_inline void
234 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
237 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
240 /** \brief Get next buffer in buffer linklist, or zero for end of list.
242 @param vm - (vlib_main_t *) vlib main data structure pointer
243 @param b - (void *) buffer pointer
244 @return - (vlib_buffer_t *) next buffer, or NULL
246 always_inline vlib_buffer_t *
247 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
249 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
250 ? vlib_get_buffer (vm, b->next_buffer) : 0);
253 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
254 vlib_buffer_t * b_first);
256 /** \brief Get length in bytes of the buffer chain
258 @param vm - (vlib_main_t *) vlib main data structure pointer
259 @param b - (void *) buffer pointer
260 @return - (uword) length of buffer chain
263 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
265 uword len = b->current_length;
267 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
270 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
271 return len + b->total_length_not_including_first_buffer;
273 return vlib_buffer_length_in_chain_slow_path (vm, b);
276 /** \brief Get length in bytes of the buffer index buffer chain
278 @param vm - (vlib_main_t *) vlib main data structure pointer
279 @param bi - (u32) buffer index
280 @return - (uword) length of buffer chain
283 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
285 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
286 return vlib_buffer_length_in_chain (vm, b);
289 /** \brief Copy buffer contents to memory
291 @param vm - (vlib_main_t *) vlib main data structure pointer
292 @param buffer_index - (u32) buffer index
293 @param contents - (u8 *) memory, <strong>must be large enough</strong>
294 @return - (uword) length of buffer chain
297 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
299 uword content_len = 0;
305 b = vlib_get_buffer (vm, buffer_index);
306 l = b->current_length;
307 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
309 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
311 buffer_index = b->next_buffer;
318 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
320 return vlib_physmem_get_pa (vm, b->data);
324 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
326 return vlib_buffer_get_pa (vm, b) + b->current_data;
329 /** \brief Prefetch buffer metadata by buffer index
330 The first 64 bytes of buffer contains most header information
332 @param vm - (vlib_main_t *) vlib main data structure pointer
333 @param bi - (u32) buffer index
334 @param type - LOAD, STORE. In most cases, STORE is the right answer
336 /* Prefetch buffer header given index. */
337 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
339 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
340 vlib_prefetch_buffer_header (_b, type); \
345 /* Index is unknown. */
348 /* Index is known and free/allocated. */
349 VLIB_BUFFER_KNOWN_FREE,
350 VLIB_BUFFER_KNOWN_ALLOCATED,
351 } vlib_buffer_known_state_t;
353 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
355 vlib_buffer_known_state_t
358 always_inline vlib_buffer_known_state_t
359 vlib_buffer_is_known (u32 buffer_index)
361 vlib_buffer_main_t *bm = &buffer_main;
363 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
364 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
365 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
366 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
370 vlib_buffer_set_known_state (u32 buffer_index,
371 vlib_buffer_known_state_t state)
373 vlib_buffer_main_t *bm = &buffer_main;
375 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
376 hash_set (bm->buffer_known_hash, buffer_index, state);
377 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
380 /* Validates sanity of a single buffer.
381 Returns format'ed vector with error message if any. */
382 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
386 vlib_buffer_round_size (u32 size)
388 return round_pow2 (size, sizeof (vlib_buffer_t));
391 always_inline vlib_buffer_free_list_index_t
392 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
394 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
395 return b->free_list_index;
401 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
402 vlib_buffer_free_list_index_t index)
404 if (PREDICT_FALSE (index))
406 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
407 b->free_list_index = index;
410 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
413 /** \brief Allocate buffers from specific freelist into supplied array
415 @param vm - (vlib_main_t *) vlib main data structure pointer
416 @param buffers - (u32 * ) buffer index array
417 @param n_buffers - (u32) number of buffers requested
418 @return - (u32) number of buffers actually allocated, may be
419 less than the number requested or zero
422 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
425 vlib_buffer_free_list_index_t index)
427 vlib_buffer_main_t *bm = &buffer_main;
428 vlib_buffer_free_list_t *fl;
432 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
434 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
436 len = vec_len (fl->buffers);
438 if (PREDICT_FALSE (len < n_buffers))
440 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
441 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
444 /* even if fill free list didn't manage to refill free list
445 we should give what we have */
446 n_buffers = clib_min (len, n_buffers);
448 /* following code is intentionaly duplicated to allow compiler
449 to optimize fast path when n_buffers is constant value */
450 src = fl->buffers + len - n_buffers;
451 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
452 _vec_len (fl->buffers) -= n_buffers;
454 /* Verify that buffers are known free. */
455 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
456 VLIB_BUFFER_KNOWN_FREE);
461 src = fl->buffers + len - n_buffers;
462 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
463 _vec_len (fl->buffers) -= n_buffers;
465 /* Verify that buffers are known free. */
466 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
467 VLIB_BUFFER_KNOWN_FREE);
472 /** \brief Allocate buffers into supplied array
474 @param vm - (vlib_main_t *) vlib main data structure pointer
475 @param buffers - (u32 * ) buffer index array
476 @param n_buffers - (u32) number of buffers requested
477 @return - (u32) number of buffers actually allocated, may be
478 less than the number requested or zero
481 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
483 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
484 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
487 /** \brief Allocate buffers into ring
489 @param vm - (vlib_main_t *) vlib main data structure pointer
490 @param buffers - (u32 * ) buffer index ring
491 @param start - (u32) first slot in the ring
492 @param ring_size - (u32) ring size
493 @param n_buffers - (u32) number of buffers requested
494 @return - (u32) number of buffers actually allocated, may be
495 less than the number requested or zero
498 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
499 u32 ring_size, u32 n_buffers)
503 ASSERT (n_buffers <= ring_size);
505 if (PREDICT_TRUE (start + n_buffers <= ring_size))
506 return vlib_buffer_alloc (vm, ring + start, n_buffers);
508 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
510 if (PREDICT_TRUE (n_alloc == ring_size - start))
511 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
516 /** \brief Free buffers
517 Frees the entire buffer chain for each buffer
519 @param vm - (vlib_main_t *) vlib main data structure pointer
520 @param buffers - (u32 * ) buffer index array
521 @param n_buffers - (u32) number of buffers to free
525 vlib_buffer_free (vlib_main_t * vm,
526 /* pointer to first buffer */
528 /* number of buffers to free */
531 vlib_buffer_main_t *bm = &buffer_main;
533 ASSERT (bm->cb.vlib_buffer_free_cb);
535 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
538 /** \brief Free buffers, does not free the buffer chain for each buffer
540 @param vm - (vlib_main_t *) vlib main data structure pointer
541 @param buffers - (u32 * ) buffer index array
542 @param n_buffers - (u32) number of buffers to free
546 vlib_buffer_free_no_next (vlib_main_t * vm,
547 /* pointer to first buffer */
549 /* number of buffers to free */
552 vlib_buffer_main_t *bm = &buffer_main;
554 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
556 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
559 /** \brief Free one buffer
560 Shorthand to free a single buffer chain.
562 @param vm - (vlib_main_t *) vlib main data structure pointer
563 @param buffer_index - (u32) buffer index to free
566 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
568 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
571 /** \brief Free buffers from ring
573 @param vm - (vlib_main_t *) vlib main data structure pointer
574 @param buffers - (u32 * ) buffer index ring
575 @param start - (u32) first slot in the ring
576 @param ring_size - (u32) ring size
577 @param n_buffers - (u32) number of buffers
580 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
581 u32 ring_size, u32 n_buffers)
583 ASSERT (n_buffers <= ring_size);
585 if (PREDICT_TRUE (start + n_buffers <= ring_size))
587 vlib_buffer_free (vm, ring + start, n_buffers);
591 vlib_buffer_free (vm, ring + start, ring_size - start);
592 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
596 /** \brief Free buffers from ring without freeing tail buffers
598 @param vm - (vlib_main_t *) vlib main data structure pointer
599 @param buffers - (u32 * ) buffer index ring
600 @param start - (u32) first slot in the ring
601 @param ring_size - (u32) ring size
602 @param n_buffers - (u32) number of buffers
605 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
606 u32 ring_size, u32 n_buffers)
608 ASSERT (n_buffers <= ring_size);
610 if (PREDICT_TRUE (start + n_buffers <= ring_size))
612 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
616 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
617 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
621 /* Add/delete buffer free lists. */
622 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
626 vlib_buffer_delete_free_list (vlib_main_t * vm,
627 vlib_buffer_free_list_index_t free_list_index)
629 vlib_buffer_main_t *bm = &buffer_main;
631 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
633 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
636 /* Make sure we have at least given number of unaligned buffers. */
637 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
638 vlib_buffer_free_list_t *
640 uword n_unaligned_buffers);
642 always_inline vlib_buffer_free_list_t *
643 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
644 vlib_buffer_free_list_index_t * index)
646 vlib_buffer_free_list_index_t i;
648 *index = i = vlib_buffer_get_free_list_index (b);
649 return pool_elt_at_index (vm->buffer_free_list_pool, i);
652 always_inline vlib_buffer_free_list_t *
653 vlib_buffer_get_free_list (vlib_main_t * vm,
654 vlib_buffer_free_list_index_t free_list_index)
656 vlib_buffer_free_list_t *f;
658 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
660 /* Sanity: indices must match. */
661 ASSERT (f->index == free_list_index);
667 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
668 vlib_buffer_free_list_index_t index)
670 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
671 return f->n_data_bytes;
674 /* Append given data to end of buffer, possibly allocating new buffers. */
675 int vlib_buffer_add_data (vlib_main_t * vm,
676 vlib_buffer_free_list_index_t free_list_index,
677 u32 * buffer_index, void *data, u32 n_data_bytes);
679 /* duplicate all buffers in chain */
680 always_inline vlib_buffer_t *
681 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
683 vlib_buffer_t *s, *d, *fd;
684 uword n_alloc, n_buffers = 1;
685 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
689 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
692 s = vlib_get_buffer (vm, s->next_buffer);
694 u32 new_buffers[n_buffers];
696 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
698 /* No guarantee that we'll get all the buffers we asked for */
699 if (PREDICT_FALSE (n_alloc < n_buffers))
702 vlib_buffer_free (vm, new_buffers, n_alloc);
708 fd = d = vlib_get_buffer (vm, new_buffers[0]);
709 d->current_data = s->current_data;
710 d->current_length = s->current_length;
711 d->flags = s->flags & flag_mask;
712 d->total_length_not_including_first_buffer =
713 s->total_length_not_including_first_buffer;
714 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
715 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
716 clib_memcpy_fast (vlib_buffer_get_current (d),
717 vlib_buffer_get_current (s), s->current_length);
720 for (i = 1; i < n_buffers; i++)
723 d->next_buffer = new_buffers[i];
725 s = vlib_get_buffer (vm, s->next_buffer);
726 d = vlib_get_buffer (vm, new_buffers[i]);
727 d->current_data = s->current_data;
728 d->current_length = s->current_length;
729 clib_memcpy_fast (vlib_buffer_get_current (d),
730 vlib_buffer_get_current (s), s->current_length);
731 d->flags = s->flags & flag_mask;
737 /** \brief Create a maximum of 256 clones of buffer and store them
738 in the supplied array
740 @param vm - (vlib_main_t *) vlib main data structure pointer
741 @param src_buffer - (u32) source buffer index
742 @param buffers - (u32 * ) buffer index array
743 @param n_buffers - (u16) number of buffer clones requested (<=256)
744 @param head_end_offset - (u16) offset relative to current position
745 where packet head ends
746 @return - (u16) number of buffers actually cloned, may be
747 less than the number requested or zero
750 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
751 u16 n_buffers, u16 head_end_offset)
754 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
756 ASSERT (s->n_add_refs == 0);
758 ASSERT (n_buffers <= 256);
760 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
762 buffers[0] = src_buffer;
763 for (i = 1; i < n_buffers; i++)
766 d = vlib_buffer_copy (vm, s);
769 buffers[i] = vlib_get_buffer_index (vm, d);
775 if (PREDICT_FALSE (n_buffers == 1))
777 buffers[0] = src_buffer;
781 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
782 vlib_buffer_get_free_list_index
785 for (i = 0; i < n_buffers; i++)
787 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
788 d->current_data = s->current_data;
789 d->current_length = head_end_offset;
790 vlib_buffer_set_free_list_index (d,
791 vlib_buffer_get_free_list_index (s));
793 d->total_length_not_including_first_buffer = s->current_length -
795 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
797 d->total_length_not_including_first_buffer +=
798 s->total_length_not_including_first_buffer;
800 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
801 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
802 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
803 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
804 clib_memcpy_fast (vlib_buffer_get_current (d),
805 vlib_buffer_get_current (s), head_end_offset);
806 d->next_buffer = src_buffer;
808 vlib_buffer_advance (s, head_end_offset);
809 s->n_add_refs = n_buffers - 1;
810 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
812 s = vlib_get_buffer (vm, s->next_buffer);
813 s->n_add_refs = n_buffers - 1;
819 /** \brief Create multiple clones of buffer and store them
820 in the supplied array
822 @param vm - (vlib_main_t *) vlib main data structure pointer
823 @param src_buffer - (u32) source buffer index
824 @param buffers - (u32 * ) buffer index array
825 @param n_buffers - (u16) number of buffer clones requested (<=256)
826 @param head_end_offset - (u16) offset relative to current position
827 where packet head ends
828 @return - (u16) number of buffers actually cloned, may be
829 less than the number requested or zero
832 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
833 u16 n_buffers, u16 head_end_offset)
835 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
838 while (n_buffers > 256)
841 copy = vlib_buffer_copy (vm, s);
842 n_cloned += vlib_buffer_clone_256 (vm,
843 vlib_get_buffer_index (vm, copy),
844 (buffers + n_cloned),
845 256, head_end_offset);
848 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
850 n_buffers, head_end_offset);
855 /** \brief Attach cloned tail to the buffer
857 @param vm - (vlib_main_t *) vlib main data structure pointer
858 @param head - (vlib_buffer_t *) head buffer
859 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
863 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
864 vlib_buffer_t * tail)
866 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
867 ASSERT (vlib_buffer_get_free_list_index (head) ==
868 vlib_buffer_get_free_list_index (tail));
870 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
871 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
872 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
873 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
874 head->next_buffer = vlib_get_buffer_index (vm, tail);
875 head->total_length_not_including_first_buffer = tail->current_length +
876 tail->total_length_not_including_first_buffer;
879 clib_atomic_add_fetch (&tail->n_add_refs, 1);
881 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
883 tail = vlib_get_buffer (vm, tail->next_buffer);
888 /* Initializes the buffer as an empty packet with no chained buffers. */
890 vlib_buffer_chain_init (vlib_buffer_t * first)
892 first->total_length_not_including_first_buffer = 0;
893 first->current_length = 0;
894 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
895 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
898 /* The provided next_bi buffer index is appended to the end of the packet. */
899 always_inline vlib_buffer_t *
900 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
902 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
903 last->next_buffer = next_bi;
904 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
905 next_buffer->current_length = 0;
906 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
910 /* Increases or decreases the packet length.
911 * It does not allocate or deallocate new buffers.
912 * Therefore, the added length must be compatible
913 * with the last buffer. */
915 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
916 vlib_buffer_t * last, i32 len)
918 last->current_length += len;
920 first->total_length_not_including_first_buffer += len;
923 /* Copy data to the end of the packet and increases its length.
924 * It does not allocate new buffers.
925 * Returns the number of copied bytes. */
927 vlib_buffer_chain_append_data (vlib_main_t * vm,
928 vlib_buffer_free_list_index_t free_list_index,
929 vlib_buffer_t * first,
930 vlib_buffer_t * last, void *data, u16 data_len)
933 vlib_buffer_free_list_buffer_size (vm, free_list_index);
934 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
935 u16 len = clib_min (data_len,
936 n_buffer_bytes - last->current_length -
938 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
940 vlib_buffer_chain_increase_length (first, last, len);
944 /* Copy data to the end of the packet and increases its length.
945 * Allocates additional buffers from the free list if necessary.
946 * Returns the number of copied bytes.
947 * 'last' value is modified whenever new buffers are allocated and
948 * chained and points to the last buffer in the chain. */
950 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
951 vlib_buffer_free_list_index_t
953 vlib_buffer_t * first,
954 vlib_buffer_t ** last, void *data,
956 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
958 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
959 format_vlib_buffer_contents;
963 /* Vector of packet data. */
966 /* Number of buffers to allocate in each call to allocator. */
967 u32 min_n_buffers_each_alloc;
969 /* Buffer free list for this template. */
970 vlib_buffer_free_list_index_t free_list_index;
975 } vlib_packet_template_t;
977 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
978 vlib_packet_template_t * t);
980 void vlib_packet_template_init (vlib_main_t * vm,
981 vlib_packet_template_t * t,
983 uword n_packet_data_bytes,
984 uword min_n_buffers_each_alloc,
987 void *vlib_packet_template_get_packet (vlib_main_t * vm,
988 vlib_packet_template_t * t,
992 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
994 vec_free (t->packet_data);
997 /* Set a buffer quickly into "uninitialized" state. We want this to
998 be extremely cheap and arrange for all fields that need to be
999 initialized to be in the first 128 bits of the buffer. */
1001 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1002 vlib_buffer_free_list_t * fl)
1004 vlib_buffer_t *src = &fl->buffer_init_template;
1006 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1007 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1008 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1009 CLIB_CACHE_LINE_BYTES);
1010 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1011 CLIB_CACHE_LINE_BYTES * 2);
1013 /* Make sure buffer template is sane. */
1014 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
1016 clib_memcpy_fast (STRUCT_MARK_PTR (dst, template_start),
1017 STRUCT_MARK_PTR (src, template_start),
1018 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1019 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1021 /* Not in the first 16 octets. */
1022 dst->n_add_refs = src->n_add_refs;
1023 vlib_buffer_set_free_list_index (dst, fl->index);
1025 /* Make sure it really worked. */
1026 #define _(f) ASSERT (dst->f == src->f);
1031 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1032 /* total_length_not_including_first_buffer is not in the template anymore
1033 * so it may actually not zeroed for some buffers. One option is to
1034 * uncomment the line lower (comes at a cost), the other, is to just not
1036 /* dst->total_length_not_including_first_buffer = 0; */
1037 ASSERT (dst->n_add_refs == 0);
1041 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1042 vlib_buffer_free_list_t * f,
1043 u32 buffer_index, u8 do_init)
1045 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1047 b = vlib_get_buffer (vm, buffer_index);
1048 if (PREDICT_TRUE (do_init))
1049 vlib_buffer_init_for_free_list (b, f);
1050 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1052 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1054 clib_spinlock_lock (&bp->lock);
1055 /* keep last stored buffers, as they are more likely hot in the cache */
1056 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1057 CLIB_CACHE_LINE_BYTES);
1058 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1059 f->n_alloc -= VLIB_FRAME_SIZE;
1060 clib_spinlock_unlock (&bp->lock);
1065 extern u32 *vlib_buffer_state_validation_lock;
1066 extern uword *vlib_buffer_state_validation_hash;
1067 extern void *vlib_buffer_state_heap;
1071 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1077 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1079 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1082 p = hash_get (vlib_buffer_state_validation_hash, b);
1084 /* If we don't know about b, declare it to be in the expected state */
1087 hash_set (vlib_buffer_state_validation_hash, b, expected);
1091 if (p[0] != expected)
1093 void cj_stop (void);
1095 vlib_main_t *vm = &vlib_global_main;
1099 bi = vlib_get_buffer_index (vm, b);
1101 clib_mem_set_heap (oldheap);
1102 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1103 vlib_time_now (vm), bi,
1104 p[0] ? "busy" : "free", expected ? "busy" : "free");
1108 CLIB_MEMORY_BARRIER ();
1109 *vlib_buffer_state_validation_lock = 0;
1110 clib_mem_set_heap (oldheap);
1115 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1120 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1122 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1125 hash_set (vlib_buffer_state_validation_hash, b, expected);
1127 CLIB_MEMORY_BARRIER ();
1128 *vlib_buffer_state_validation_lock = 0;
1129 clib_mem_set_heap (oldheap);
1133 /** minimum data size of first buffer in a buffer chain */
1134 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1137 * @brief compress buffer chain in a way where the first buffer is at least
1138 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1140 * @param[in] vm - vlib_main
1141 * @param[in,out] first - first buffer in chain
1142 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1146 vlib_buffer_chain_compress (vlib_main_t * vm,
1147 vlib_buffer_t * first, u32 ** discard_vector)
1149 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1150 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1152 /* this is already big enough or not a chain */
1155 /* probe free list to find allocated buffer size to avoid overfill */
1156 vlib_buffer_free_list_index_t index;
1157 vlib_buffer_free_list_t *free_list =
1158 vlib_buffer_get_buffer_free_list (vm, first, &index);
1160 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1161 free_list->n_data_bytes -
1162 first->current_data);
1165 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1166 u32 need = want_first_size - first->current_length;
1167 u32 amount_to_copy = clib_min (need, second->current_length);
1168 clib_memcpy_fast (((u8 *) vlib_buffer_get_current (first)) +
1169 first->current_length,
1170 vlib_buffer_get_current (second), amount_to_copy);
1171 first->current_length += amount_to_copy;
1172 second->current_data += amount_to_copy;
1173 second->current_length -= amount_to_copy;
1174 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1176 first->total_length_not_including_first_buffer -= amount_to_copy;
1178 if (!second->current_length)
1180 vec_add1 (*discard_vector, first->next_buffer);
1181 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1183 first->next_buffer = second->next_buffer;
1187 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1189 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1192 while ((first->current_length < want_first_size) &&
1193 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1197 * @brief linearize buffer chain - the first buffer is filled, if needed,
1198 * buffers are allocated and filled, returns free space in last buffer or
1199 * negative on failure
1201 * @param[in] vm - vlib_main
1202 * @param[in,out] first - first buffer in chain
1205 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * first)
1207 vlib_buffer_t *b = first;
1208 vlib_buffer_free_list_t *fl =
1209 vlib_buffer_get_free_list (vm, vlib_buffer_get_free_list_index (b));
1210 u32 buf_len = fl->n_data_bytes;
1211 // free buffer chain starting from the second buffer
1212 int free_count = (b->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1213 u32 chain_to_free = b->next_buffer;
1215 u32 len = vlib_buffer_length_in_chain (vm, b);
1216 u32 free_len = buf_len - b->current_data - b->current_length;
1217 int alloc_len = clib_max (len - free_len, 0); //use the free len in the first buffer
1218 int n_buffers = (alloc_len + buf_len - 1) / buf_len;
1219 u32 new_buffers[n_buffers];
1221 u32 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1222 if (n_alloc != n_buffers)
1224 vlib_buffer_free_no_next (vm, new_buffers, n_alloc);
1228 vlib_buffer_t *s = b;
1229 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1231 s = vlib_get_buffer (vm, s->next_buffer);
1232 int d_free_len = buf_len - b->current_data - b->current_length;
1233 ASSERT (d_free_len >= 0);
1234 // chain buf and split write
1235 u32 copy_len = clib_min (d_free_len, s->current_length);
1236 u8 *d = vlib_buffer_put_uninit (b, copy_len);
1237 clib_memcpy (d, vlib_buffer_get_current (s), copy_len);
1238 int rest = s->current_length - copy_len;
1242 ASSERT (vlib_buffer_get_tail (b) == b->data + buf_len);
1243 ASSERT (n_buffers > 0);
1244 b = vlib_buffer_chain_buffer (vm, b, new_buffers[--n_buffers]);
1245 //make full use of the new buffers
1246 b->current_data = 0;
1247 d = vlib_buffer_put_uninit (b, rest);
1248 clib_memcpy (d, vlib_buffer_get_current (s) + copy_len, rest);
1251 vlib_buffer_free (vm, &chain_to_free, free_count);
1252 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1253 if (b == first) /* no buffers addeed */
1254 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1255 ASSERT (len == vlib_buffer_length_in_chain (vm, first));
1256 ASSERT (n_buffers == 0);
1257 return buf_len - b->current_data - b->current_length;
1260 #endif /* included_vlib_buffer_funcs_h */
1263 * fd.io coding-style-patch-verification: ON
1266 * eval: (c-set-style "gnu")