2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = &buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 static_always_inline void
68 vlib_buffer_copy_indices (u32 * dst, u32 * src, u32 n_indices)
70 clib_memcpy_fast (dst, src, n_indices * sizeof (u32));
73 static_always_inline void
74 vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
76 clib_memcpy_fast (b, bt, STRUCT_OFFSET_OF (vlib_buffer_t, template_end));
79 /** \brief Translate array of buffer indices into buffer pointers with offset
81 @param vm - (vlib_main_t *) vlib main data structure pointer
82 @param bi - (u32 *) array of buffer indices
83 @param b - (void **) array to store buffer pointers
84 @param count - (uword) number of elements
85 @param offset - (i32) offset applied to each pointer
87 static_always_inline void
88 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
91 #ifdef CLIB_HAVE_VEC256
92 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
93 /* if count is not const, compiler will not unroll while loop
94 se we maintain two-in-parallel variant */
97 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
98 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
99 /* shift and add to get vlib_buffer_t pointer */
100 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
109 #ifdef CLIB_HAVE_VEC256
110 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
111 /* shift and add to get vlib_buffer_t pointer */
112 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
113 #elif defined (CLIB_HAVE_VEC128)
114 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
115 u32x4 bi4 = u32x4_load_unaligned (bi);
116 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
117 #if defined (__aarch64__)
118 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
120 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
121 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
123 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
124 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
126 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
127 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
128 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
129 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
137 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
144 /** \brief Translate array of buffer indices into buffer pointers
146 @param vm - (vlib_main_t *) vlib main data structure pointer
147 @param bi - (u32 *) array of buffer indices
148 @param b - (vlib_buffer_t **) array to store buffer pointers
149 @param count - (uword) number of elements
152 static_always_inline void
153 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
155 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
158 /** \brief Translate buffer pointer into buffer index
160 @param vm - (vlib_main_t *) vlib main data structure pointer
161 @param p - (void *) buffer pointer
162 @return - (u32) buffer index
166 vlib_get_buffer_index (vlib_main_t * vm, void *p)
168 vlib_buffer_main_t *bm = &buffer_main;
169 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
170 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
171 ASSERT (offset < bm->buffer_mem_size);
172 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
173 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
176 /** \brief Translate array of buffer pointers into buffer indices with offset
178 @param vm - (vlib_main_t *) vlib main data structure pointer
179 @param b - (void **) array of buffer pointers
180 @param bi - (u32 *) array to store buffer indices
181 @param count - (uword) number of elements
182 @param offset - (i32) offset applied to each pointer
184 static_always_inline void
185 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
186 uword count, i32 offset)
188 #ifdef CLIB_HAVE_VEC256
189 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
190 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
194 /* load 4 pointers into 256-bit register */
195 u64x4 v0 = u64x4_load_unaligned (b);
196 u64x4 v1 = u64x4_load_unaligned (b + 4);
202 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
203 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
205 /* permute 256-bit register so lower u32s of each buffer index are
206 * placed into lower 128-bits */
207 v2 = u32x8_permute ((u32x8) v0, mask);
208 v3 = u32x8_permute ((u32x8) v1, mask);
210 /* extract lower 128-bits and save them to the array of buffer indices */
211 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
212 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
220 /* equivalent non-nector implementation */
221 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
222 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
223 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
224 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
231 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
238 /** \brief Translate array of buffer pointers into buffer indices
240 @param vm - (vlib_main_t *) vlib main data structure pointer
241 @param b - (vlib_buffer_t **) array of buffer pointers
242 @param bi - (u32 *) array to store buffer indices
243 @param count - (uword) number of elements
245 static_always_inline void
246 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
249 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
252 /** \brief Get next buffer in buffer linklist, or zero for end of list.
254 @param vm - (vlib_main_t *) vlib main data structure pointer
255 @param b - (void *) buffer pointer
256 @return - (vlib_buffer_t *) next buffer, or NULL
258 always_inline vlib_buffer_t *
259 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
261 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
262 ? vlib_get_buffer (vm, b->next_buffer) : 0);
265 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
266 vlib_buffer_t * b_first);
268 /** \brief Get length in bytes of the buffer chain
270 @param vm - (vlib_main_t *) vlib main data structure pointer
271 @param b - (void *) buffer pointer
272 @return - (uword) length of buffer chain
275 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
277 uword len = b->current_length;
279 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
282 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
283 return len + b->total_length_not_including_first_buffer;
285 return vlib_buffer_length_in_chain_slow_path (vm, b);
288 /** \brief Get length in bytes of the buffer index buffer chain
290 @param vm - (vlib_main_t *) vlib main data structure pointer
291 @param bi - (u32) buffer index
292 @return - (uword) length of buffer chain
295 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
297 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
298 return vlib_buffer_length_in_chain (vm, b);
301 /** \brief Copy buffer contents to memory
303 @param vm - (vlib_main_t *) vlib main data structure pointer
304 @param buffer_index - (u32) buffer index
305 @param contents - (u8 *) memory, <strong>must be large enough</strong>
306 @return - (uword) length of buffer chain
309 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
311 uword content_len = 0;
317 b = vlib_get_buffer (vm, buffer_index);
318 l = b->current_length;
319 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
321 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
323 buffer_index = b->next_buffer;
330 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
332 return vlib_physmem_get_pa (vm, b->data);
336 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
338 return vlib_buffer_get_pa (vm, b) + b->current_data;
341 /** \brief Prefetch buffer metadata by buffer index
342 The first 64 bytes of buffer contains most header information
344 @param vm - (vlib_main_t *) vlib main data structure pointer
345 @param bi - (u32) buffer index
346 @param type - LOAD, STORE. In most cases, STORE is the right answer
348 /* Prefetch buffer header given index. */
349 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
351 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
352 vlib_prefetch_buffer_header (_b, type); \
357 /* Index is unknown. */
360 /* Index is known and free/allocated. */
361 VLIB_BUFFER_KNOWN_FREE,
362 VLIB_BUFFER_KNOWN_ALLOCATED,
363 } vlib_buffer_known_state_t;
365 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
367 vlib_buffer_known_state_t
370 always_inline vlib_buffer_known_state_t
371 vlib_buffer_is_known (u32 buffer_index)
373 vlib_buffer_main_t *bm = &buffer_main;
375 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
376 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
377 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
378 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
382 vlib_buffer_set_known_state (u32 buffer_index,
383 vlib_buffer_known_state_t state)
385 vlib_buffer_main_t *bm = &buffer_main;
387 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
388 hash_set (bm->buffer_known_hash, buffer_index, state);
389 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
392 /* Validates sanity of a single buffer.
393 Returns format'ed vector with error message if any. */
394 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
398 vlib_buffer_round_size (u32 size)
400 return round_pow2 (size, sizeof (vlib_buffer_t));
403 /** \brief Allocate buffers from specific freelist into supplied array
405 @param vm - (vlib_main_t *) vlib main data structure pointer
406 @param buffers - (u32 * ) buffer index array
407 @param n_buffers - (u32) number of buffers requested
408 @return - (u32) number of buffers actually allocated, may be
409 less than the number requested or zero
412 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
415 vlib_buffer_free_list_index_t index)
417 vlib_buffer_main_t *bm = &buffer_main;
418 vlib_buffer_free_list_t *fl;
422 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
424 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
426 len = vec_len (fl->buffers);
428 if (PREDICT_FALSE (len < n_buffers))
430 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
431 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
434 /* even if fill free list didn't manage to refill free list
435 we should give what we have */
436 n_buffers = clib_min (len, n_buffers);
438 /* following code is intentionaly duplicated to allow compiler
439 to optimize fast path when n_buffers is constant value */
440 src = fl->buffers + len - n_buffers;
441 vlib_buffer_copy_indices (buffers, src, n_buffers);
442 _vec_len (fl->buffers) -= n_buffers;
444 /* Verify that buffers are known free. */
445 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
446 VLIB_BUFFER_KNOWN_FREE);
451 src = fl->buffers + len - n_buffers;
452 vlib_buffer_copy_indices (buffers, src, n_buffers);
453 _vec_len (fl->buffers) -= n_buffers;
455 /* Verify that buffers are known free. */
456 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
457 VLIB_BUFFER_KNOWN_FREE);
462 /** \brief Allocate buffers into supplied array
464 @param vm - (vlib_main_t *) vlib main data structure pointer
465 @param buffers - (u32 * ) buffer index array
466 @param n_buffers - (u32) number of buffers requested
467 @return - (u32) number of buffers actually allocated, may be
468 less than the number requested or zero
471 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
473 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
474 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
477 /** \brief Allocate buffers into ring
479 @param vm - (vlib_main_t *) vlib main data structure pointer
480 @param buffers - (u32 * ) buffer index ring
481 @param start - (u32) first slot in the ring
482 @param ring_size - (u32) ring size
483 @param n_buffers - (u32) number of buffers requested
484 @return - (u32) number of buffers actually allocated, may be
485 less than the number requested or zero
488 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
489 u32 ring_size, u32 n_buffers)
493 ASSERT (n_buffers <= ring_size);
495 if (PREDICT_TRUE (start + n_buffers <= ring_size))
496 return vlib_buffer_alloc (vm, ring + start, n_buffers);
498 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
500 if (PREDICT_TRUE (n_alloc == ring_size - start))
501 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
506 /** \brief Free buffers
507 Frees the entire buffer chain for each buffer
509 @param vm - (vlib_main_t *) vlib main data structure pointer
510 @param buffers - (u32 * ) buffer index array
511 @param n_buffers - (u32) number of buffers to free
515 vlib_buffer_free (vlib_main_t * vm,
516 /* pointer to first buffer */
518 /* number of buffers to free */
521 vlib_buffer_main_t *bm = &buffer_main;
523 ASSERT (bm->cb.vlib_buffer_free_cb);
525 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
528 /** \brief Free buffers, does not free the buffer chain for each buffer
530 @param vm - (vlib_main_t *) vlib main data structure pointer
531 @param buffers - (u32 * ) buffer index array
532 @param n_buffers - (u32) number of buffers to free
536 vlib_buffer_free_no_next (vlib_main_t * vm,
537 /* pointer to first buffer */
539 /* number of buffers to free */
542 vlib_buffer_main_t *bm = &buffer_main;
544 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
546 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
549 /** \brief Free one buffer
550 Shorthand to free a single buffer chain.
552 @param vm - (vlib_main_t *) vlib main data structure pointer
553 @param buffer_index - (u32) buffer index to free
556 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
558 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
561 /** \brief Free buffers from ring
563 @param vm - (vlib_main_t *) vlib main data structure pointer
564 @param buffers - (u32 * ) buffer index ring
565 @param start - (u32) first slot in the ring
566 @param ring_size - (u32) ring size
567 @param n_buffers - (u32) number of buffers
570 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
571 u32 ring_size, u32 n_buffers)
573 ASSERT (n_buffers <= ring_size);
575 if (PREDICT_TRUE (start + n_buffers <= ring_size))
577 vlib_buffer_free (vm, ring + start, n_buffers);
581 vlib_buffer_free (vm, ring + start, ring_size - start);
582 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
586 /** \brief Free buffers from ring without freeing tail buffers
588 @param vm - (vlib_main_t *) vlib main data structure pointer
589 @param buffers - (u32 * ) buffer index ring
590 @param start - (u32) first slot in the ring
591 @param ring_size - (u32) ring size
592 @param n_buffers - (u32) number of buffers
595 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
596 u32 ring_size, u32 n_buffers)
598 ASSERT (n_buffers <= ring_size);
600 if (PREDICT_TRUE (start + n_buffers <= ring_size))
602 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
606 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
607 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
611 /* Add/delete buffer free lists. */
612 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
616 vlib_buffer_delete_free_list (vlib_main_t * vm,
617 vlib_buffer_free_list_index_t free_list_index)
619 vlib_buffer_main_t *bm = &buffer_main;
621 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
623 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
626 /* Make sure we have at least given number of unaligned buffers. */
627 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
628 vlib_buffer_free_list_t *
630 uword n_unaligned_buffers);
632 always_inline vlib_buffer_free_list_t *
633 vlib_buffer_get_free_list (vlib_main_t * vm,
634 vlib_buffer_free_list_index_t free_list_index)
636 vlib_buffer_free_list_t *f;
638 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
640 /* Sanity: indices must match. */
641 ASSERT (f->index == free_list_index);
647 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
648 vlib_buffer_free_list_index_t index)
650 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
651 return f->n_data_bytes;
654 /* Append given data to end of buffer, possibly allocating new buffers. */
655 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
658 /* duplicate all buffers in chain */
659 always_inline vlib_buffer_t *
660 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
662 vlib_buffer_t *s, *d, *fd;
663 uword n_alloc, n_buffers = 1;
664 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
668 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
671 s = vlib_get_buffer (vm, s->next_buffer);
673 u32 new_buffers[n_buffers];
675 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
677 /* No guarantee that we'll get all the buffers we asked for */
678 if (PREDICT_FALSE (n_alloc < n_buffers))
681 vlib_buffer_free (vm, new_buffers, n_alloc);
687 fd = d = vlib_get_buffer (vm, new_buffers[0]);
688 d->current_data = s->current_data;
689 d->current_length = s->current_length;
690 d->flags = s->flags & flag_mask;
691 d->total_length_not_including_first_buffer =
692 s->total_length_not_including_first_buffer;
693 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
694 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
695 clib_memcpy_fast (vlib_buffer_get_current (d),
696 vlib_buffer_get_current (s), s->current_length);
699 for (i = 1; i < n_buffers; i++)
702 d->next_buffer = new_buffers[i];
704 s = vlib_get_buffer (vm, s->next_buffer);
705 d = vlib_get_buffer (vm, new_buffers[i]);
706 d->current_data = s->current_data;
707 d->current_length = s->current_length;
708 clib_memcpy_fast (vlib_buffer_get_current (d),
709 vlib_buffer_get_current (s), s->current_length);
710 d->flags = s->flags & flag_mask;
716 /** \brief Create a maximum of 256 clones of buffer and store them
717 in the supplied array
719 @param vm - (vlib_main_t *) vlib main data structure pointer
720 @param src_buffer - (u32) source buffer index
721 @param buffers - (u32 * ) buffer index array
722 @param n_buffers - (u16) number of buffer clones requested (<=256)
723 @param head_end_offset - (u16) offset relative to current position
724 where packet head ends
725 @return - (u16) number of buffers actually cloned, may be
726 less than the number requested or zero
729 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
730 u16 n_buffers, u16 head_end_offset)
733 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
735 ASSERT (s->n_add_refs == 0);
737 ASSERT (n_buffers <= 256);
739 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
741 buffers[0] = src_buffer;
742 for (i = 1; i < n_buffers; i++)
745 d = vlib_buffer_copy (vm, s);
748 buffers[i] = vlib_get_buffer_index (vm, d);
754 if (PREDICT_FALSE (n_buffers == 1))
756 buffers[0] = src_buffer;
760 n_buffers = vlib_buffer_alloc (vm, buffers, n_buffers);
762 for (i = 0; i < n_buffers; i++)
764 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
765 d->current_data = s->current_data;
766 d->current_length = head_end_offset;
767 d->total_length_not_including_first_buffer = s->current_length -
769 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
771 d->total_length_not_including_first_buffer +=
772 s->total_length_not_including_first_buffer;
774 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
775 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
776 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
777 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
778 clib_memcpy_fast (vlib_buffer_get_current (d),
779 vlib_buffer_get_current (s), head_end_offset);
780 d->next_buffer = src_buffer;
782 vlib_buffer_advance (s, head_end_offset);
783 s->n_add_refs = n_buffers - 1;
784 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
786 s = vlib_get_buffer (vm, s->next_buffer);
787 s->n_add_refs = n_buffers - 1;
793 /** \brief Create multiple clones of buffer and store them
794 in the supplied array
796 @param vm - (vlib_main_t *) vlib main data structure pointer
797 @param src_buffer - (u32) source buffer index
798 @param buffers - (u32 * ) buffer index array
799 @param n_buffers - (u16) number of buffer clones requested (<=256)
800 @param head_end_offset - (u16) offset relative to current position
801 where packet head ends
802 @return - (u16) number of buffers actually cloned, may be
803 less than the number requested or zero
806 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
807 u16 n_buffers, u16 head_end_offset)
809 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
812 while (n_buffers > 256)
815 copy = vlib_buffer_copy (vm, s);
816 n_cloned += vlib_buffer_clone_256 (vm,
817 vlib_get_buffer_index (vm, copy),
818 (buffers + n_cloned),
819 256, head_end_offset);
822 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
824 n_buffers, head_end_offset);
829 /** \brief Attach cloned tail to the buffer
831 @param vm - (vlib_main_t *) vlib main data structure pointer
832 @param head - (vlib_buffer_t *) head buffer
833 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
837 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
838 vlib_buffer_t * tail)
840 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
842 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
843 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
844 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
845 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
846 head->next_buffer = vlib_get_buffer_index (vm, tail);
847 head->total_length_not_including_first_buffer = tail->current_length +
848 tail->total_length_not_including_first_buffer;
851 clib_atomic_add_fetch (&tail->n_add_refs, 1);
853 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
855 tail = vlib_get_buffer (vm, tail->next_buffer);
860 /* Initializes the buffer as an empty packet with no chained buffers. */
862 vlib_buffer_chain_init (vlib_buffer_t * first)
864 first->total_length_not_including_first_buffer = 0;
865 first->current_length = 0;
866 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
867 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
870 /* The provided next_bi buffer index is appended to the end of the packet. */
871 always_inline vlib_buffer_t *
872 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
874 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
875 last->next_buffer = next_bi;
876 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
877 next_buffer->current_length = 0;
878 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
882 /* Increases or decreases the packet length.
883 * It does not allocate or deallocate new buffers.
884 * Therefore, the added length must be compatible
885 * with the last buffer. */
887 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
888 vlib_buffer_t * last, i32 len)
890 last->current_length += len;
892 first->total_length_not_including_first_buffer += len;
895 /* Copy data to the end of the packet and increases its length.
896 * It does not allocate new buffers.
897 * Returns the number of copied bytes. */
899 vlib_buffer_chain_append_data (vlib_main_t * vm,
900 vlib_buffer_free_list_index_t free_list_index,
901 vlib_buffer_t * first,
902 vlib_buffer_t * last, void *data, u16 data_len)
905 vlib_buffer_free_list_buffer_size (vm, free_list_index);
906 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
907 u16 len = clib_min (data_len,
908 n_buffer_bytes - last->current_length -
910 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
912 vlib_buffer_chain_increase_length (first, last, len);
916 /* Copy data to the end of the packet and increases its length.
917 * Allocates additional buffers from the free list if necessary.
918 * Returns the number of copied bytes.
919 * 'last' value is modified whenever new buffers are allocated and
920 * chained and points to the last buffer in the chain. */
922 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
923 vlib_buffer_free_list_index_t
925 vlib_buffer_t * first,
926 vlib_buffer_t ** last, void *data,
928 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
930 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
931 format_vlib_buffer_contents;
935 /* Vector of packet data. */
938 /* Number of buffers to allocate in each call to allocator. */
939 u32 min_n_buffers_each_alloc;
941 /* Buffer free list for this template. */
942 vlib_buffer_free_list_index_t free_list_index;
947 } vlib_packet_template_t;
949 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
950 vlib_packet_template_t * t);
952 void vlib_packet_template_init (vlib_main_t * vm,
953 vlib_packet_template_t * t,
955 uword n_packet_data_bytes,
956 uword min_n_buffers_each_alloc,
959 void *vlib_packet_template_get_packet (vlib_main_t * vm,
960 vlib_packet_template_t * t,
964 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
966 vec_free (t->packet_data);
969 /* Set a buffer quickly into "uninitialized" state. We want this to
970 be extremely cheap and arrange for all fields that need to be
971 initialized to be in the first 128 bits of the buffer. */
973 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
974 vlib_buffer_free_list_t * fl)
976 vlib_buffer_t *src = &fl->buffer_init_template;
978 /* Make sure vlib_buffer_t is cacheline aligned and sized */
979 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
980 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
981 CLIB_CACHE_LINE_BYTES);
982 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
983 CLIB_CACHE_LINE_BYTES * 2);
985 /* Make sure buffer template is sane. */
986 vlib_buffer_copy_template (dst, src);
988 /* Not in the first 16 octets. */
989 dst->n_add_refs = src->n_add_refs;
991 /* Make sure it really worked. */
992 #define _(f) ASSERT (dst->f == src->f);
997 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
998 /* total_length_not_including_first_buffer is not in the template anymore
999 * so it may actually not zeroed for some buffers. One option is to
1000 * uncomment the line lower (comes at a cost), the other, is to just not
1002 /* dst->total_length_not_including_first_buffer = 0; */
1003 ASSERT (dst->n_add_refs == 0);
1007 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1008 vlib_buffer_free_list_t * f,
1009 u32 buffer_index, u8 do_init)
1011 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1013 b = vlib_get_buffer (vm, buffer_index);
1014 if (PREDICT_TRUE (do_init))
1015 vlib_buffer_init_for_free_list (b, f);
1016 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1018 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1020 clib_spinlock_lock (&bp->lock);
1021 /* keep last stored buffers, as they are more likely hot in the cache */
1022 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1023 CLIB_CACHE_LINE_BYTES);
1024 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1025 f->n_alloc -= VLIB_FRAME_SIZE;
1026 clib_spinlock_unlock (&bp->lock);
1031 extern u32 *vlib_buffer_state_validation_lock;
1032 extern uword *vlib_buffer_state_validation_hash;
1033 extern void *vlib_buffer_state_heap;
1037 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1043 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1045 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1048 p = hash_get (vlib_buffer_state_validation_hash, b);
1050 /* If we don't know about b, declare it to be in the expected state */
1053 hash_set (vlib_buffer_state_validation_hash, b, expected);
1057 if (p[0] != expected)
1059 void cj_stop (void);
1061 vlib_main_t *vm = &vlib_global_main;
1065 bi = vlib_get_buffer_index (vm, b);
1067 clib_mem_set_heap (oldheap);
1068 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1069 vlib_time_now (vm), bi,
1070 p[0] ? "busy" : "free", expected ? "busy" : "free");
1074 CLIB_MEMORY_BARRIER ();
1075 *vlib_buffer_state_validation_lock = 0;
1076 clib_mem_set_heap (oldheap);
1081 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1086 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1088 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1091 hash_set (vlib_buffer_state_validation_hash, b, expected);
1093 CLIB_MEMORY_BARRIER ();
1094 *vlib_buffer_state_validation_lock = 0;
1095 clib_mem_set_heap (oldheap);
1099 /** minimum data size of first buffer in a buffer chain */
1100 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1103 * @brief compress buffer chain in a way where the first buffer is at least
1104 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1106 * @param[in] vm - vlib_main
1107 * @param[in,out] first - first buffer in chain
1108 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1112 vlib_buffer_chain_compress (vlib_main_t * vm,
1113 vlib_buffer_t * first, u32 ** discard_vector)
1115 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1116 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1118 /* this is already big enough or not a chain */
1121 /* probe free list to find allocated buffer size to avoid overfill */
1122 vlib_buffer_free_list_t *free_list;
1124 free_list = pool_elt_at_index (vm->buffer_free_list_pool,
1125 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
1127 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1128 free_list->n_data_bytes -
1129 first->current_data);
1132 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1133 u32 need = want_first_size - first->current_length;
1134 u32 amount_to_copy = clib_min (need, second->current_length);
1135 clib_memcpy_fast (((u8 *) vlib_buffer_get_current (first)) +
1136 first->current_length,
1137 vlib_buffer_get_current (second), amount_to_copy);
1138 first->current_length += amount_to_copy;
1139 second->current_data += amount_to_copy;
1140 second->current_length -= amount_to_copy;
1141 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1143 first->total_length_not_including_first_buffer -= amount_to_copy;
1145 if (!second->current_length)
1147 vec_add1 (*discard_vector, first->next_buffer);
1148 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1150 first->next_buffer = second->next_buffer;
1154 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1156 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1159 while ((first->current_length < want_first_size) &&
1160 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1164 * @brief linearize buffer chain - the first buffer is filled, if needed,
1165 * buffers are allocated and filled, returns free space in last buffer or
1166 * negative on failure
1168 * @param[in] vm - vlib_main
1169 * @param[in,out] first - first buffer in chain
1172 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * first)
1174 vlib_buffer_t *b = first;
1175 vlib_buffer_free_list_t *fl =
1176 vlib_buffer_get_free_list (vm, VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
1177 u32 buf_len = fl->n_data_bytes;
1178 // free buffer chain starting from the second buffer
1179 int free_count = (b->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1180 u32 chain_to_free = b->next_buffer;
1182 u32 len = vlib_buffer_length_in_chain (vm, b);
1183 u32 free_len = buf_len - b->current_data - b->current_length;
1184 int alloc_len = clib_max (len - free_len, 0); //use the free len in the first buffer
1185 int n_buffers = (alloc_len + buf_len - 1) / buf_len;
1186 u32 new_buffers[n_buffers];
1188 u32 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1189 if (n_alloc != n_buffers)
1191 vlib_buffer_free_no_next (vm, new_buffers, n_alloc);
1195 vlib_buffer_t *s = b;
1196 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1198 s = vlib_get_buffer (vm, s->next_buffer);
1199 int d_free_len = buf_len - b->current_data - b->current_length;
1200 ASSERT (d_free_len >= 0);
1201 // chain buf and split write
1202 u32 copy_len = clib_min (d_free_len, s->current_length);
1203 u8 *d = vlib_buffer_put_uninit (b, copy_len);
1204 clib_memcpy (d, vlib_buffer_get_current (s), copy_len);
1205 int rest = s->current_length - copy_len;
1209 ASSERT (vlib_buffer_get_tail (b) == b->data + buf_len);
1210 ASSERT (n_buffers > 0);
1211 b = vlib_buffer_chain_buffer (vm, b, new_buffers[--n_buffers]);
1212 //make full use of the new buffers
1213 b->current_data = 0;
1214 d = vlib_buffer_put_uninit (b, rest);
1215 clib_memcpy (d, vlib_buffer_get_current (s) + copy_len, rest);
1218 vlib_buffer_free (vm, &chain_to_free, free_count);
1219 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1220 if (b == first) /* no buffers addeed */
1221 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1222 ASSERT (len == vlib_buffer_length_in_chain (vm, first));
1223 ASSERT (n_buffers == 0);
1224 return buf_len - b->current_data - b->current_length;
1227 #endif /* included_vlib_buffer_funcs_h */
1230 * fd.io coding-style-patch-verification: ON
1233 * eval: (c-set-style "gnu")