2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
47 vlib buffer access methods.
51 /** \brief Translate buffer index into buffer pointer
53 @param vm - (vlib_main_t *) vlib main data structure pointer
54 @param buffer_index - (u32) buffer index
55 @return - (vlib_buffer_t *) buffer pointer
57 always_inline vlib_buffer_t *
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
60 vlib_buffer_main_t *bm = &buffer_main;
61 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62 ASSERT (offset < bm->buffer_mem_size);
64 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
67 static_always_inline void
68 vlib_buffer_copy_template (vlib_buffer_t * b, vlib_buffer_t * bt)
70 clib_memcpy_fast (b, bt, STRUCT_OFFSET_OF (vlib_buffer_t, template_end));
73 /** \brief Translate array of buffer indices into buffer pointers with offset
75 @param vm - (vlib_main_t *) vlib main data structure pointer
76 @param bi - (u32 *) array of buffer indices
77 @param b - (void **) array to store buffer pointers
78 @param count - (uword) number of elements
79 @param offset - (i32) offset applied to each pointer
81 static_always_inline void
82 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
85 #ifdef CLIB_HAVE_VEC256
86 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
87 /* if count is not const, compiler will not unroll while loop
88 se we maintain two-in-parallel variant */
91 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
92 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
93 /* shift and add to get vlib_buffer_t pointer */
94 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
95 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
103 #ifdef CLIB_HAVE_VEC256
104 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
105 /* shift and add to get vlib_buffer_t pointer */
106 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
107 #elif defined (CLIB_HAVE_VEC128)
108 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
109 u32x4 bi4 = u32x4_load_unaligned (bi);
110 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
111 #if defined (__aarch64__)
112 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
114 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
115 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
117 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
118 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
120 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
121 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
122 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
123 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
131 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
138 /** \brief Translate array of buffer indices into buffer pointers
140 @param vm - (vlib_main_t *) vlib main data structure pointer
141 @param bi - (u32 *) array of buffer indices
142 @param b - (vlib_buffer_t **) array to store buffer pointers
143 @param count - (uword) number of elements
146 static_always_inline void
147 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
149 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
152 /** \brief Translate buffer pointer into buffer index
154 @param vm - (vlib_main_t *) vlib main data structure pointer
155 @param p - (void *) buffer pointer
156 @return - (u32) buffer index
160 vlib_get_buffer_index (vlib_main_t * vm, void *p)
162 vlib_buffer_main_t *bm = &buffer_main;
163 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
164 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
165 ASSERT (offset < bm->buffer_mem_size);
166 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
167 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
170 /** \brief Translate array of buffer pointers into buffer indices with offset
172 @param vm - (vlib_main_t *) vlib main data structure pointer
173 @param b - (void **) array of buffer pointers
174 @param bi - (u32 *) array to store buffer indices
175 @param count - (uword) number of elements
176 @param offset - (i32) offset applied to each pointer
178 static_always_inline void
179 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
180 uword count, i32 offset)
182 #ifdef CLIB_HAVE_VEC256
183 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
184 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
188 /* load 4 pointers into 256-bit register */
189 u64x4 v0 = u64x4_load_unaligned (b);
190 u64x4 v1 = u64x4_load_unaligned (b + 4);
196 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
197 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
199 /* permute 256-bit register so lower u32s of each buffer index are
200 * placed into lower 128-bits */
201 v2 = u32x8_permute ((u32x8) v0, mask);
202 v3 = u32x8_permute ((u32x8) v1, mask);
204 /* extract lower 128-bits and save them to the array of buffer indices */
205 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
206 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
214 /* equivalent non-nector implementation */
215 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
216 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
217 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
218 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
225 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
232 /** \brief Translate array of buffer pointers into buffer indices
234 @param vm - (vlib_main_t *) vlib main data structure pointer
235 @param b - (vlib_buffer_t **) array of buffer pointers
236 @param bi - (u32 *) array to store buffer indices
237 @param count - (uword) number of elements
239 static_always_inline void
240 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
243 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
246 /** \brief Get next buffer in buffer linklist, or zero for end of list.
248 @param vm - (vlib_main_t *) vlib main data structure pointer
249 @param b - (void *) buffer pointer
250 @return - (vlib_buffer_t *) next buffer, or NULL
252 always_inline vlib_buffer_t *
253 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
255 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
256 ? vlib_get_buffer (vm, b->next_buffer) : 0);
259 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
260 vlib_buffer_t * b_first);
262 /** \brief Get length in bytes of the buffer chain
264 @param vm - (vlib_main_t *) vlib main data structure pointer
265 @param b - (void *) buffer pointer
266 @return - (uword) length of buffer chain
269 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
271 uword len = b->current_length;
273 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
276 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
277 return len + b->total_length_not_including_first_buffer;
279 return vlib_buffer_length_in_chain_slow_path (vm, b);
282 /** \brief Get length in bytes of the buffer index buffer chain
284 @param vm - (vlib_main_t *) vlib main data structure pointer
285 @param bi - (u32) buffer index
286 @return - (uword) length of buffer chain
289 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
291 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
292 return vlib_buffer_length_in_chain (vm, b);
295 /** \brief Copy buffer contents to memory
297 @param vm - (vlib_main_t *) vlib main data structure pointer
298 @param buffer_index - (u32) buffer index
299 @param contents - (u8 *) memory, <strong>must be large enough</strong>
300 @return - (uword) length of buffer chain
303 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
305 uword content_len = 0;
311 b = vlib_get_buffer (vm, buffer_index);
312 l = b->current_length;
313 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
315 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
317 buffer_index = b->next_buffer;
324 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
326 return vlib_physmem_get_pa (vm, b->data);
330 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
332 return vlib_buffer_get_pa (vm, b) + b->current_data;
335 /** \brief Prefetch buffer metadata by buffer index
336 The first 64 bytes of buffer contains most header information
338 @param vm - (vlib_main_t *) vlib main data structure pointer
339 @param bi - (u32) buffer index
340 @param type - LOAD, STORE. In most cases, STORE is the right answer
342 /* Prefetch buffer header given index. */
343 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
345 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
346 vlib_prefetch_buffer_header (_b, type); \
351 /* Index is unknown. */
354 /* Index is known and free/allocated. */
355 VLIB_BUFFER_KNOWN_FREE,
356 VLIB_BUFFER_KNOWN_ALLOCATED,
357 } vlib_buffer_known_state_t;
359 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
361 vlib_buffer_known_state_t
364 always_inline vlib_buffer_known_state_t
365 vlib_buffer_is_known (u32 buffer_index)
367 vlib_buffer_main_t *bm = &buffer_main;
369 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
370 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
371 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
372 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
376 vlib_buffer_set_known_state (u32 buffer_index,
377 vlib_buffer_known_state_t state)
379 vlib_buffer_main_t *bm = &buffer_main;
381 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
382 hash_set (bm->buffer_known_hash, buffer_index, state);
383 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
386 /* Validates sanity of a single buffer.
387 Returns format'ed vector with error message if any. */
388 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
392 vlib_buffer_round_size (u32 size)
394 return round_pow2 (size, sizeof (vlib_buffer_t));
397 always_inline vlib_buffer_free_list_index_t
398 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
400 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
401 return b->free_list_index;
407 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
408 vlib_buffer_free_list_index_t index)
410 if (PREDICT_FALSE (index))
412 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
413 b->free_list_index = index;
416 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
419 /** \brief Allocate buffers from specific freelist into supplied array
421 @param vm - (vlib_main_t *) vlib main data structure pointer
422 @param buffers - (u32 * ) buffer index array
423 @param n_buffers - (u32) number of buffers requested
424 @return - (u32) number of buffers actually allocated, may be
425 less than the number requested or zero
428 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
431 vlib_buffer_free_list_index_t index)
433 vlib_buffer_main_t *bm = &buffer_main;
434 vlib_buffer_free_list_t *fl;
438 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
440 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
442 len = vec_len (fl->buffers);
444 if (PREDICT_FALSE (len < n_buffers))
446 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
447 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
450 /* even if fill free list didn't manage to refill free list
451 we should give what we have */
452 n_buffers = clib_min (len, n_buffers);
454 /* following code is intentionaly duplicated to allow compiler
455 to optimize fast path when n_buffers is constant value */
456 src = fl->buffers + len - n_buffers;
457 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
458 _vec_len (fl->buffers) -= n_buffers;
460 /* Verify that buffers are known free. */
461 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
462 VLIB_BUFFER_KNOWN_FREE);
467 src = fl->buffers + len - n_buffers;
468 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
469 _vec_len (fl->buffers) -= n_buffers;
471 /* Verify that buffers are known free. */
472 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
473 VLIB_BUFFER_KNOWN_FREE);
478 /** \brief Allocate buffers into supplied array
480 @param vm - (vlib_main_t *) vlib main data structure pointer
481 @param buffers - (u32 * ) buffer index array
482 @param n_buffers - (u32) number of buffers requested
483 @return - (u32) number of buffers actually allocated, may be
484 less than the number requested or zero
487 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
489 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
490 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
493 /** \brief Allocate buffers into ring
495 @param vm - (vlib_main_t *) vlib main data structure pointer
496 @param buffers - (u32 * ) buffer index ring
497 @param start - (u32) first slot in the ring
498 @param ring_size - (u32) ring size
499 @param n_buffers - (u32) number of buffers requested
500 @return - (u32) number of buffers actually allocated, may be
501 less than the number requested or zero
504 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
505 u32 ring_size, u32 n_buffers)
509 ASSERT (n_buffers <= ring_size);
511 if (PREDICT_TRUE (start + n_buffers <= ring_size))
512 return vlib_buffer_alloc (vm, ring + start, n_buffers);
514 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
516 if (PREDICT_TRUE (n_alloc == ring_size - start))
517 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
522 /** \brief Free buffers
523 Frees the entire buffer chain for each buffer
525 @param vm - (vlib_main_t *) vlib main data structure pointer
526 @param buffers - (u32 * ) buffer index array
527 @param n_buffers - (u32) number of buffers to free
531 vlib_buffer_free (vlib_main_t * vm,
532 /* pointer to first buffer */
534 /* number of buffers to free */
537 vlib_buffer_main_t *bm = &buffer_main;
539 ASSERT (bm->cb.vlib_buffer_free_cb);
541 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
544 /** \brief Free buffers, does not free the buffer chain for each buffer
546 @param vm - (vlib_main_t *) vlib main data structure pointer
547 @param buffers - (u32 * ) buffer index array
548 @param n_buffers - (u32) number of buffers to free
552 vlib_buffer_free_no_next (vlib_main_t * vm,
553 /* pointer to first buffer */
555 /* number of buffers to free */
558 vlib_buffer_main_t *bm = &buffer_main;
560 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
562 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
565 /** \brief Free one buffer
566 Shorthand to free a single buffer chain.
568 @param vm - (vlib_main_t *) vlib main data structure pointer
569 @param buffer_index - (u32) buffer index to free
572 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
574 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
577 /** \brief Free buffers from ring
579 @param vm - (vlib_main_t *) vlib main data structure pointer
580 @param buffers - (u32 * ) buffer index ring
581 @param start - (u32) first slot in the ring
582 @param ring_size - (u32) ring size
583 @param n_buffers - (u32) number of buffers
586 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
587 u32 ring_size, u32 n_buffers)
589 ASSERT (n_buffers <= ring_size);
591 if (PREDICT_TRUE (start + n_buffers <= ring_size))
593 vlib_buffer_free (vm, ring + start, n_buffers);
597 vlib_buffer_free (vm, ring + start, ring_size - start);
598 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
602 /** \brief Free buffers from ring without freeing tail buffers
604 @param vm - (vlib_main_t *) vlib main data structure pointer
605 @param buffers - (u32 * ) buffer index ring
606 @param start - (u32) first slot in the ring
607 @param ring_size - (u32) ring size
608 @param n_buffers - (u32) number of buffers
611 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
612 u32 ring_size, u32 n_buffers)
614 ASSERT (n_buffers <= ring_size);
616 if (PREDICT_TRUE (start + n_buffers <= ring_size))
618 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
622 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
623 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
627 /* Add/delete buffer free lists. */
628 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
632 vlib_buffer_delete_free_list (vlib_main_t * vm,
633 vlib_buffer_free_list_index_t free_list_index)
635 vlib_buffer_main_t *bm = &buffer_main;
637 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
639 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
642 /* Make sure we have at least given number of unaligned buffers. */
643 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
644 vlib_buffer_free_list_t *
646 uword n_unaligned_buffers);
648 always_inline vlib_buffer_free_list_t *
649 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
650 vlib_buffer_free_list_index_t * index)
652 vlib_buffer_free_list_index_t i;
654 *index = i = vlib_buffer_get_free_list_index (b);
655 return pool_elt_at_index (vm->buffer_free_list_pool, i);
658 always_inline vlib_buffer_free_list_t *
659 vlib_buffer_get_free_list (vlib_main_t * vm,
660 vlib_buffer_free_list_index_t free_list_index)
662 vlib_buffer_free_list_t *f;
664 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
666 /* Sanity: indices must match. */
667 ASSERT (f->index == free_list_index);
673 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
674 vlib_buffer_free_list_index_t index)
676 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
677 return f->n_data_bytes;
680 /* Append given data to end of buffer, possibly allocating new buffers. */
681 int vlib_buffer_add_data (vlib_main_t * vm,
682 vlib_buffer_free_list_index_t free_list_index,
683 u32 * buffer_index, void *data, u32 n_data_bytes);
685 /* duplicate all buffers in chain */
686 always_inline vlib_buffer_t *
687 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
689 vlib_buffer_t *s, *d, *fd;
690 uword n_alloc, n_buffers = 1;
691 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
695 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
698 s = vlib_get_buffer (vm, s->next_buffer);
700 u32 new_buffers[n_buffers];
702 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
704 /* No guarantee that we'll get all the buffers we asked for */
705 if (PREDICT_FALSE (n_alloc < n_buffers))
708 vlib_buffer_free (vm, new_buffers, n_alloc);
714 fd = d = vlib_get_buffer (vm, new_buffers[0]);
715 d->current_data = s->current_data;
716 d->current_length = s->current_length;
717 d->flags = s->flags & flag_mask;
718 d->total_length_not_including_first_buffer =
719 s->total_length_not_including_first_buffer;
720 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
721 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
722 clib_memcpy_fast (vlib_buffer_get_current (d),
723 vlib_buffer_get_current (s), s->current_length);
726 for (i = 1; i < n_buffers; i++)
729 d->next_buffer = new_buffers[i];
731 s = vlib_get_buffer (vm, s->next_buffer);
732 d = vlib_get_buffer (vm, new_buffers[i]);
733 d->current_data = s->current_data;
734 d->current_length = s->current_length;
735 clib_memcpy_fast (vlib_buffer_get_current (d),
736 vlib_buffer_get_current (s), s->current_length);
737 d->flags = s->flags & flag_mask;
743 /** \brief Create a maximum of 256 clones of buffer and store them
744 in the supplied array
746 @param vm - (vlib_main_t *) vlib main data structure pointer
747 @param src_buffer - (u32) source buffer index
748 @param buffers - (u32 * ) buffer index array
749 @param n_buffers - (u16) number of buffer clones requested (<=256)
750 @param head_end_offset - (u16) offset relative to current position
751 where packet head ends
752 @return - (u16) number of buffers actually cloned, may be
753 less than the number requested or zero
756 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
757 u16 n_buffers, u16 head_end_offset)
760 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
762 ASSERT (s->n_add_refs == 0);
764 ASSERT (n_buffers <= 256);
766 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
768 buffers[0] = src_buffer;
769 for (i = 1; i < n_buffers; i++)
772 d = vlib_buffer_copy (vm, s);
775 buffers[i] = vlib_get_buffer_index (vm, d);
781 if (PREDICT_FALSE (n_buffers == 1))
783 buffers[0] = src_buffer;
787 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
788 vlib_buffer_get_free_list_index
791 for (i = 0; i < n_buffers; i++)
793 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
794 d->current_data = s->current_data;
795 d->current_length = head_end_offset;
796 vlib_buffer_set_free_list_index (d,
797 vlib_buffer_get_free_list_index (s));
799 d->total_length_not_including_first_buffer = s->current_length -
801 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
803 d->total_length_not_including_first_buffer +=
804 s->total_length_not_including_first_buffer;
806 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
807 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
808 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
809 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
810 clib_memcpy_fast (vlib_buffer_get_current (d),
811 vlib_buffer_get_current (s), head_end_offset);
812 d->next_buffer = src_buffer;
814 vlib_buffer_advance (s, head_end_offset);
815 s->n_add_refs = n_buffers - 1;
816 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
818 s = vlib_get_buffer (vm, s->next_buffer);
819 s->n_add_refs = n_buffers - 1;
825 /** \brief Create multiple clones of buffer and store them
826 in the supplied array
828 @param vm - (vlib_main_t *) vlib main data structure pointer
829 @param src_buffer - (u32) source buffer index
830 @param buffers - (u32 * ) buffer index array
831 @param n_buffers - (u16) number of buffer clones requested (<=256)
832 @param head_end_offset - (u16) offset relative to current position
833 where packet head ends
834 @return - (u16) number of buffers actually cloned, may be
835 less than the number requested or zero
838 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
839 u16 n_buffers, u16 head_end_offset)
841 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
844 while (n_buffers > 256)
847 copy = vlib_buffer_copy (vm, s);
848 n_cloned += vlib_buffer_clone_256 (vm,
849 vlib_get_buffer_index (vm, copy),
850 (buffers + n_cloned),
851 256, head_end_offset);
854 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
856 n_buffers, head_end_offset);
861 /** \brief Attach cloned tail to the buffer
863 @param vm - (vlib_main_t *) vlib main data structure pointer
864 @param head - (vlib_buffer_t *) head buffer
865 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
869 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
870 vlib_buffer_t * tail)
872 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
873 ASSERT (vlib_buffer_get_free_list_index (head) ==
874 vlib_buffer_get_free_list_index (tail));
876 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
877 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
878 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
879 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
880 head->next_buffer = vlib_get_buffer_index (vm, tail);
881 head->total_length_not_including_first_buffer = tail->current_length +
882 tail->total_length_not_including_first_buffer;
885 clib_atomic_add_fetch (&tail->n_add_refs, 1);
887 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
889 tail = vlib_get_buffer (vm, tail->next_buffer);
894 /* Initializes the buffer as an empty packet with no chained buffers. */
896 vlib_buffer_chain_init (vlib_buffer_t * first)
898 first->total_length_not_including_first_buffer = 0;
899 first->current_length = 0;
900 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
901 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
904 /* The provided next_bi buffer index is appended to the end of the packet. */
905 always_inline vlib_buffer_t *
906 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
908 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
909 last->next_buffer = next_bi;
910 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
911 next_buffer->current_length = 0;
912 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
916 /* Increases or decreases the packet length.
917 * It does not allocate or deallocate new buffers.
918 * Therefore, the added length must be compatible
919 * with the last buffer. */
921 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
922 vlib_buffer_t * last, i32 len)
924 last->current_length += len;
926 first->total_length_not_including_first_buffer += len;
929 /* Copy data to the end of the packet and increases its length.
930 * It does not allocate new buffers.
931 * Returns the number of copied bytes. */
933 vlib_buffer_chain_append_data (vlib_main_t * vm,
934 vlib_buffer_free_list_index_t free_list_index,
935 vlib_buffer_t * first,
936 vlib_buffer_t * last, void *data, u16 data_len)
939 vlib_buffer_free_list_buffer_size (vm, free_list_index);
940 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
941 u16 len = clib_min (data_len,
942 n_buffer_bytes - last->current_length -
944 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
946 vlib_buffer_chain_increase_length (first, last, len);
950 /* Copy data to the end of the packet and increases its length.
951 * Allocates additional buffers from the free list if necessary.
952 * Returns the number of copied bytes.
953 * 'last' value is modified whenever new buffers are allocated and
954 * chained and points to the last buffer in the chain. */
956 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
957 vlib_buffer_free_list_index_t
959 vlib_buffer_t * first,
960 vlib_buffer_t ** last, void *data,
962 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
964 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
965 format_vlib_buffer_contents;
969 /* Vector of packet data. */
972 /* Number of buffers to allocate in each call to allocator. */
973 u32 min_n_buffers_each_alloc;
975 /* Buffer free list for this template. */
976 vlib_buffer_free_list_index_t free_list_index;
981 } vlib_packet_template_t;
983 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
984 vlib_packet_template_t * t);
986 void vlib_packet_template_init (vlib_main_t * vm,
987 vlib_packet_template_t * t,
989 uword n_packet_data_bytes,
990 uword min_n_buffers_each_alloc,
993 void *vlib_packet_template_get_packet (vlib_main_t * vm,
994 vlib_packet_template_t * t,
998 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
1000 vec_free (t->packet_data);
1003 /* Set a buffer quickly into "uninitialized" state. We want this to
1004 be extremely cheap and arrange for all fields that need to be
1005 initialized to be in the first 128 bits of the buffer. */
1007 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1008 vlib_buffer_free_list_t * fl)
1010 vlib_buffer_t *src = &fl->buffer_init_template;
1012 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1013 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1014 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1015 CLIB_CACHE_LINE_BYTES);
1016 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1017 CLIB_CACHE_LINE_BYTES * 2);
1019 /* Make sure buffer template is sane. */
1020 vlib_buffer_copy_template (dst, src);
1022 /* Not in the first 16 octets. */
1023 dst->n_add_refs = src->n_add_refs;
1024 vlib_buffer_set_free_list_index (dst, fl->index);
1026 /* Make sure it really worked. */
1027 #define _(f) ASSERT (dst->f == src->f);
1032 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1033 /* total_length_not_including_first_buffer is not in the template anymore
1034 * so it may actually not zeroed for some buffers. One option is to
1035 * uncomment the line lower (comes at a cost), the other, is to just not
1037 /* dst->total_length_not_including_first_buffer = 0; */
1038 ASSERT (dst->n_add_refs == 0);
1042 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1043 vlib_buffer_free_list_t * f,
1044 u32 buffer_index, u8 do_init)
1046 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1048 b = vlib_get_buffer (vm, buffer_index);
1049 if (PREDICT_TRUE (do_init))
1050 vlib_buffer_init_for_free_list (b, f);
1051 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1053 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1055 clib_spinlock_lock (&bp->lock);
1056 /* keep last stored buffers, as they are more likely hot in the cache */
1057 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1058 CLIB_CACHE_LINE_BYTES);
1059 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1060 f->n_alloc -= VLIB_FRAME_SIZE;
1061 clib_spinlock_unlock (&bp->lock);
1066 extern u32 *vlib_buffer_state_validation_lock;
1067 extern uword *vlib_buffer_state_validation_hash;
1068 extern void *vlib_buffer_state_heap;
1072 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1078 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1080 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1083 p = hash_get (vlib_buffer_state_validation_hash, b);
1085 /* If we don't know about b, declare it to be in the expected state */
1088 hash_set (vlib_buffer_state_validation_hash, b, expected);
1092 if (p[0] != expected)
1094 void cj_stop (void);
1096 vlib_main_t *vm = &vlib_global_main;
1100 bi = vlib_get_buffer_index (vm, b);
1102 clib_mem_set_heap (oldheap);
1103 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1104 vlib_time_now (vm), bi,
1105 p[0] ? "busy" : "free", expected ? "busy" : "free");
1109 CLIB_MEMORY_BARRIER ();
1110 *vlib_buffer_state_validation_lock = 0;
1111 clib_mem_set_heap (oldheap);
1116 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1121 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1123 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1126 hash_set (vlib_buffer_state_validation_hash, b, expected);
1128 CLIB_MEMORY_BARRIER ();
1129 *vlib_buffer_state_validation_lock = 0;
1130 clib_mem_set_heap (oldheap);
1134 /** minimum data size of first buffer in a buffer chain */
1135 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1138 * @brief compress buffer chain in a way where the first buffer is at least
1139 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1141 * @param[in] vm - vlib_main
1142 * @param[in,out] first - first buffer in chain
1143 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1147 vlib_buffer_chain_compress (vlib_main_t * vm,
1148 vlib_buffer_t * first, u32 ** discard_vector)
1150 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1151 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1153 /* this is already big enough or not a chain */
1156 /* probe free list to find allocated buffer size to avoid overfill */
1157 vlib_buffer_free_list_index_t index;
1158 vlib_buffer_free_list_t *free_list =
1159 vlib_buffer_get_buffer_free_list (vm, first, &index);
1161 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1162 free_list->n_data_bytes -
1163 first->current_data);
1166 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1167 u32 need = want_first_size - first->current_length;
1168 u32 amount_to_copy = clib_min (need, second->current_length);
1169 clib_memcpy_fast (((u8 *) vlib_buffer_get_current (first)) +
1170 first->current_length,
1171 vlib_buffer_get_current (second), amount_to_copy);
1172 first->current_length += amount_to_copy;
1173 second->current_data += amount_to_copy;
1174 second->current_length -= amount_to_copy;
1175 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1177 first->total_length_not_including_first_buffer -= amount_to_copy;
1179 if (!second->current_length)
1181 vec_add1 (*discard_vector, first->next_buffer);
1182 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1184 first->next_buffer = second->next_buffer;
1188 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1190 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1193 while ((first->current_length < want_first_size) &&
1194 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1198 * @brief linearize buffer chain - the first buffer is filled, if needed,
1199 * buffers are allocated and filled, returns free space in last buffer or
1200 * negative on failure
1202 * @param[in] vm - vlib_main
1203 * @param[in,out] first - first buffer in chain
1206 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * first)
1208 vlib_buffer_t *b = first;
1209 vlib_buffer_free_list_t *fl =
1210 vlib_buffer_get_free_list (vm, vlib_buffer_get_free_list_index (b));
1211 u32 buf_len = fl->n_data_bytes;
1212 // free buffer chain starting from the second buffer
1213 int free_count = (b->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1214 u32 chain_to_free = b->next_buffer;
1216 u32 len = vlib_buffer_length_in_chain (vm, b);
1217 u32 free_len = buf_len - b->current_data - b->current_length;
1218 int alloc_len = clib_max (len - free_len, 0); //use the free len in the first buffer
1219 int n_buffers = (alloc_len + buf_len - 1) / buf_len;
1220 u32 new_buffers[n_buffers];
1222 u32 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1223 if (n_alloc != n_buffers)
1225 vlib_buffer_free_no_next (vm, new_buffers, n_alloc);
1229 vlib_buffer_t *s = b;
1230 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1232 s = vlib_get_buffer (vm, s->next_buffer);
1233 int d_free_len = buf_len - b->current_data - b->current_length;
1234 ASSERT (d_free_len >= 0);
1235 // chain buf and split write
1236 u32 copy_len = clib_min (d_free_len, s->current_length);
1237 u8 *d = vlib_buffer_put_uninit (b, copy_len);
1238 clib_memcpy (d, vlib_buffer_get_current (s), copy_len);
1239 int rest = s->current_length - copy_len;
1243 ASSERT (vlib_buffer_get_tail (b) == b->data + buf_len);
1244 ASSERT (n_buffers > 0);
1245 b = vlib_buffer_chain_buffer (vm, b, new_buffers[--n_buffers]);
1246 //make full use of the new buffers
1247 b->current_data = 0;
1248 d = vlib_buffer_put_uninit (b, rest);
1249 clib_memcpy (d, vlib_buffer_get_current (s) + copy_len, rest);
1252 vlib_buffer_free (vm, &chain_to_free, free_count);
1253 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1254 if (b == first) /* no buffers addeed */
1255 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1256 ASSERT (len == vlib_buffer_length_in_chain (vm, first));
1257 ASSERT (n_buffers == 0);
1258 return buf_len - b->current_data - b->current_length;
1261 #endif /* included_vlib_buffer_funcs_h */
1264 * fd.io coding-style-patch-verification: ON
1267 * eval: (c-set-style "gnu")