2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
45 #include <vlib/buffer.h>
46 #include <vlib/physmem_funcs.h>
47 #include <vlib/main.h>
48 #include <vlib/node.h>
51 vlib buffer access methods.
55 /** \brief Translate buffer index into buffer pointer
57 @param vm - (vlib_main_t *) vlib main data structure pointer
58 @param buffer_index - (u32) buffer index
59 @return - (vlib_buffer_t *) buffer pointer
61 always_inline vlib_buffer_t *
62 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
64 vlib_buffer_main_t *bm = &buffer_main;
65 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
66 ASSERT (offset < bm->buffer_mem_size);
68 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
71 /** \brief Translate array of buffer indices into buffer pointers with offset
73 @param vm - (vlib_main_t *) vlib main data structure pointer
74 @param bi - (u32 *) array of buffer indices
75 @param b - (void **) array to store buffer pointers
76 @param count - (uword) number of elements
77 @param offset - (i32) offset applied to each pointer
79 static_always_inline void
80 vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
83 #ifdef CLIB_HAVE_VEC256
84 u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
85 /* if count is not const, compiler will not unroll while loop
86 se we maintain two-in-parallel variant */
89 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
90 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
91 /* shift and add to get vlib_buffer_t pointer */
92 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
93 u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
101 #ifdef CLIB_HAVE_VEC256
102 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
103 /* shift and add to get vlib_buffer_t pointer */
104 u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
105 #elif defined (CLIB_HAVE_VEC128)
106 u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
107 u32x4 bi4 = u32x4_load_unaligned (bi);
108 u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
109 #if defined (__aarch64__)
110 u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
112 bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
113 u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
115 u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
116 u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
118 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
119 b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
120 b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
121 b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
129 b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
136 /** \brief Translate array of buffer indices into buffer pointers
138 @param vm - (vlib_main_t *) vlib main data structure pointer
139 @param bi - (u32 *) array of buffer indices
140 @param b - (vlib_buffer_t **) array to store buffer pointers
141 @param count - (uword) number of elements
144 static_always_inline void
145 vlib_get_buffers (vlib_main_t * vm, u32 * bi, vlib_buffer_t ** b, int count)
147 vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
150 /** \brief Translate buffer pointer into buffer index
152 @param vm - (vlib_main_t *) vlib main data structure pointer
153 @param p - (void *) buffer pointer
154 @return - (u32) buffer index
158 vlib_get_buffer_index (vlib_main_t * vm, void *p)
160 vlib_buffer_main_t *bm = &buffer_main;
161 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
162 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
163 ASSERT (offset < bm->buffer_mem_size);
164 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
165 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
168 /** \brief Translate array of buffer pointers into buffer indices with offset
170 @param vm - (vlib_main_t *) vlib main data structure pointer
171 @param b - (void **) array of buffer pointers
172 @param bi - (u32 *) array to store buffer indices
173 @param count - (uword) number of elements
174 @param offset - (i32) offset applied to each pointer
176 static_always_inline void
177 vlib_get_buffer_indices_with_offset (vlib_main_t * vm, void **b, u32 * bi,
178 uword count, i32 offset)
180 #ifdef CLIB_HAVE_VEC256
181 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
182 u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
186 /* load 4 pointers into 256-bit register */
187 u64x4 v0 = u64x4_load_unaligned (b);
188 u64x4 v1 = u64x4_load_unaligned (b + 4);
194 v0 >>= CLIB_LOG2_CACHE_LINE_BYTES;
195 v1 >>= CLIB_LOG2_CACHE_LINE_BYTES;
197 /* permute 256-bit register so lower u32s of each buffer index are
198 * placed into lower 128-bits */
199 v2 = u32x8_permute ((u32x8) v0, mask);
200 v3 = u32x8_permute ((u32x8) v1, mask);
202 /* extract lower 128-bits and save them to the array of buffer indices */
203 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
204 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
212 /* equivalent non-nector implementation */
213 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
214 bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
215 bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
216 bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
223 bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
230 /** \brief Translate array of buffer pointers into buffer indices
232 @param vm - (vlib_main_t *) vlib main data structure pointer
233 @param b - (vlib_buffer_t **) array of buffer pointers
234 @param bi - (u32 *) array to store buffer indices
235 @param count - (uword) number of elements
237 static_always_inline void
238 vlib_get_buffer_indices (vlib_main_t * vm, vlib_buffer_t ** b, u32 * bi,
241 vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
244 /** \brief Get next buffer in buffer linklist, or zero for end of list.
246 @param vm - (vlib_main_t *) vlib main data structure pointer
247 @param b - (void *) buffer pointer
248 @return - (vlib_buffer_t *) next buffer, or NULL
250 always_inline vlib_buffer_t *
251 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
253 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
254 ? vlib_get_buffer (vm, b->next_buffer) : 0);
257 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
258 vlib_buffer_t * b_first);
260 /** \brief Get length in bytes of the buffer chain
262 @param vm - (vlib_main_t *) vlib main data structure pointer
263 @param b - (void *) buffer pointer
264 @return - (uword) length of buffer chain
267 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
269 uword len = b->current_length;
271 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
274 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
275 return len + b->total_length_not_including_first_buffer;
277 return vlib_buffer_length_in_chain_slow_path (vm, b);
280 /** \brief Get length in bytes of the buffer index buffer chain
282 @param vm - (vlib_main_t *) vlib main data structure pointer
283 @param bi - (u32) buffer index
284 @return - (uword) length of buffer chain
287 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
289 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
290 return vlib_buffer_length_in_chain (vm, b);
293 /** \brief Copy buffer contents to memory
295 @param vm - (vlib_main_t *) vlib main data structure pointer
296 @param buffer_index - (u32) buffer index
297 @param contents - (u8 *) memory, <strong>must be large enough</strong>
298 @return - (uword) length of buffer chain
301 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
303 uword content_len = 0;
309 b = vlib_get_buffer (vm, buffer_index);
310 l = b->current_length;
311 clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
313 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
315 buffer_index = b->next_buffer;
322 vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
324 return vlib_physmem_get_pa (vm, b->data);
328 vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
330 return vlib_buffer_get_pa (vm, b) + b->current_data;
333 /** \brief Prefetch buffer metadata by buffer index
334 The first 64 bytes of buffer contains most header information
336 @param vm - (vlib_main_t *) vlib main data structure pointer
337 @param bi - (u32) buffer index
338 @param type - LOAD, STORE. In most cases, STORE is the right answer
340 /* Prefetch buffer header given index. */
341 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
343 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
344 vlib_prefetch_buffer_header (_b, type); \
349 /* Index is unknown. */
352 /* Index is known and free/allocated. */
353 VLIB_BUFFER_KNOWN_FREE,
354 VLIB_BUFFER_KNOWN_ALLOCATED,
355 } vlib_buffer_known_state_t;
357 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
359 vlib_buffer_known_state_t
362 always_inline vlib_buffer_known_state_t
363 vlib_buffer_is_known (u32 buffer_index)
365 vlib_buffer_main_t *bm = &buffer_main;
367 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
368 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
369 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
370 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
374 vlib_buffer_set_known_state (u32 buffer_index,
375 vlib_buffer_known_state_t state)
377 vlib_buffer_main_t *bm = &buffer_main;
379 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
380 hash_set (bm->buffer_known_hash, buffer_index, state);
381 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
384 /* Validates sanity of a single buffer.
385 Returns format'ed vector with error message if any. */
386 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
390 vlib_buffer_round_size (u32 size)
392 return round_pow2 (size, sizeof (vlib_buffer_t));
395 always_inline vlib_buffer_free_list_index_t
396 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
398 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
399 return b->free_list_index;
405 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
406 vlib_buffer_free_list_index_t index)
408 if (PREDICT_FALSE (index))
410 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
411 b->free_list_index = index;
414 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
417 /** \brief Allocate buffers from specific freelist into supplied array
419 @param vm - (vlib_main_t *) vlib main data structure pointer
420 @param buffers - (u32 * ) buffer index array
421 @param n_buffers - (u32) number of buffers requested
422 @return - (u32) number of buffers actually allocated, may be
423 less than the number requested or zero
426 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
429 vlib_buffer_free_list_index_t index)
431 vlib_buffer_main_t *bm = &buffer_main;
432 vlib_buffer_free_list_t *fl;
436 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
438 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
440 len = vec_len (fl->buffers);
442 if (PREDICT_FALSE (len < n_buffers))
444 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
445 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
448 /* even if fill free list didn't manage to refill free list
449 we should give what we have */
450 n_buffers = clib_min (len, n_buffers);
452 /* following code is intentionaly duplicated to allow compiler
453 to optimize fast path when n_buffers is constant value */
454 src = fl->buffers + len - n_buffers;
455 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
456 _vec_len (fl->buffers) -= n_buffers;
458 /* Verify that buffers are known free. */
459 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
460 VLIB_BUFFER_KNOWN_FREE);
465 src = fl->buffers + len - n_buffers;
466 clib_memcpy_fast (buffers, src, n_buffers * sizeof (u32));
467 _vec_len (fl->buffers) -= n_buffers;
469 /* Verify that buffers are known free. */
470 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
471 VLIB_BUFFER_KNOWN_FREE);
476 /** \brief Allocate buffers into supplied array
478 @param vm - (vlib_main_t *) vlib main data structure pointer
479 @param buffers - (u32 * ) buffer index array
480 @param n_buffers - (u32) number of buffers requested
481 @return - (u32) number of buffers actually allocated, may be
482 less than the number requested or zero
485 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
487 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
488 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
491 /** \brief Allocate buffers into ring
493 @param vm - (vlib_main_t *) vlib main data structure pointer
494 @param buffers - (u32 * ) buffer index ring
495 @param start - (u32) first slot in the ring
496 @param ring_size - (u32) ring size
497 @param n_buffers - (u32) number of buffers requested
498 @return - (u32) number of buffers actually allocated, may be
499 less than the number requested or zero
502 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
503 u32 ring_size, u32 n_buffers)
507 ASSERT (n_buffers <= ring_size);
509 if (PREDICT_TRUE (start + n_buffers <= ring_size))
510 return vlib_buffer_alloc (vm, ring + start, n_buffers);
512 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
514 if (PREDICT_TRUE (n_alloc == ring_size - start))
515 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
520 /** \brief Free buffers
521 Frees the entire buffer chain for each buffer
523 @param vm - (vlib_main_t *) vlib main data structure pointer
524 @param buffers - (u32 * ) buffer index array
525 @param n_buffers - (u32) number of buffers to free
529 vlib_buffer_free (vlib_main_t * vm,
530 /* pointer to first buffer */
532 /* number of buffers to free */
535 vlib_buffer_main_t *bm = &buffer_main;
537 ASSERT (bm->cb.vlib_buffer_free_cb);
539 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
542 /** \brief Free buffers, does not free the buffer chain for each buffer
544 @param vm - (vlib_main_t *) vlib main data structure pointer
545 @param buffers - (u32 * ) buffer index array
546 @param n_buffers - (u32) number of buffers to free
550 vlib_buffer_free_no_next (vlib_main_t * vm,
551 /* pointer to first buffer */
553 /* number of buffers to free */
556 vlib_buffer_main_t *bm = &buffer_main;
558 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
560 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
563 /** \brief Free one buffer
564 Shorthand to free a single buffer chain.
566 @param vm - (vlib_main_t *) vlib main data structure pointer
567 @param buffer_index - (u32) buffer index to free
570 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
572 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
575 /** \brief Free buffers from ring
577 @param vm - (vlib_main_t *) vlib main data structure pointer
578 @param buffers - (u32 * ) buffer index ring
579 @param start - (u32) first slot in the ring
580 @param ring_size - (u32) ring size
581 @param n_buffers - (u32) number of buffers
584 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
585 u32 ring_size, u32 n_buffers)
587 ASSERT (n_buffers <= ring_size);
589 if (PREDICT_TRUE (start + n_buffers <= ring_size))
591 vlib_buffer_free (vm, ring + start, n_buffers);
595 vlib_buffer_free (vm, ring + start, ring_size - start);
596 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
600 /** \brief Free buffers from ring without freeing tail buffers
602 @param vm - (vlib_main_t *) vlib main data structure pointer
603 @param buffers - (u32 * ) buffer index ring
604 @param start - (u32) first slot in the ring
605 @param ring_size - (u32) ring size
606 @param n_buffers - (u32) number of buffers
609 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
610 u32 ring_size, u32 n_buffers)
612 ASSERT (n_buffers <= ring_size);
614 if (PREDICT_TRUE (start + n_buffers <= ring_size))
616 vlib_buffer_free_no_next (vm, ring + start, n_buffers);
620 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
621 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
625 /* Add/delete buffer free lists. */
626 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
630 vlib_buffer_delete_free_list (vlib_main_t * vm,
631 vlib_buffer_free_list_index_t free_list_index)
633 vlib_buffer_main_t *bm = &buffer_main;
635 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
637 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
640 /* Make sure we have at least given number of unaligned buffers. */
641 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
642 vlib_buffer_free_list_t *
644 uword n_unaligned_buffers);
646 always_inline vlib_buffer_free_list_t *
647 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
648 vlib_buffer_free_list_index_t * index)
650 vlib_buffer_free_list_index_t i;
652 *index = i = vlib_buffer_get_free_list_index (b);
653 return pool_elt_at_index (vm->buffer_free_list_pool, i);
656 always_inline vlib_buffer_free_list_t *
657 vlib_buffer_get_free_list (vlib_main_t * vm,
658 vlib_buffer_free_list_index_t free_list_index)
660 vlib_buffer_free_list_t *f;
662 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
664 /* Sanity: indices must match. */
665 ASSERT (f->index == free_list_index);
671 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
672 vlib_buffer_free_list_index_t index)
674 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
675 return f->n_data_bytes;
678 /* Append given data to end of buffer, possibly allocating new buffers. */
679 int vlib_buffer_add_data (vlib_main_t * vm,
680 vlib_buffer_free_list_index_t free_list_index,
681 u32 * buffer_index, void *data, u32 n_data_bytes);
683 /* duplicate all buffers in chain */
684 always_inline vlib_buffer_t *
685 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
687 vlib_buffer_t *s, *d, *fd;
688 uword n_alloc, n_buffers = 1;
689 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
693 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
696 s = vlib_get_buffer (vm, s->next_buffer);
698 u32 new_buffers[n_buffers];
700 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
702 /* No guarantee that we'll get all the buffers we asked for */
703 if (PREDICT_FALSE (n_alloc < n_buffers))
706 vlib_buffer_free (vm, new_buffers, n_alloc);
712 fd = d = vlib_get_buffer (vm, new_buffers[0]);
713 d->current_data = s->current_data;
714 d->current_length = s->current_length;
715 d->flags = s->flags & flag_mask;
716 d->total_length_not_including_first_buffer =
717 s->total_length_not_including_first_buffer;
718 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
719 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
720 clib_memcpy_fast (vlib_buffer_get_current (d),
721 vlib_buffer_get_current (s), s->current_length);
724 for (i = 1; i < n_buffers; i++)
727 d->next_buffer = new_buffers[i];
729 s = vlib_get_buffer (vm, s->next_buffer);
730 d = vlib_get_buffer (vm, new_buffers[i]);
731 d->current_data = s->current_data;
732 d->current_length = s->current_length;
733 clib_memcpy_fast (vlib_buffer_get_current (d),
734 vlib_buffer_get_current (s), s->current_length);
735 d->flags = s->flags & flag_mask;
741 /** \brief Create a maximum of 256 clones of buffer and store them
742 in the supplied array
744 @param vm - (vlib_main_t *) vlib main data structure pointer
745 @param src_buffer - (u32) source buffer index
746 @param buffers - (u32 * ) buffer index array
747 @param n_buffers - (u16) number of buffer clones requested (<=256)
748 @param head_end_offset - (u16) offset relative to current position
749 where packet head ends
750 @return - (u16) number of buffers actually cloned, may be
751 less than the number requested or zero
754 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
755 u16 n_buffers, u16 head_end_offset)
758 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
760 ASSERT (s->n_add_refs == 0);
762 ASSERT (n_buffers <= 256);
764 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
766 buffers[0] = src_buffer;
767 for (i = 1; i < n_buffers; i++)
770 d = vlib_buffer_copy (vm, s);
773 buffers[i] = vlib_get_buffer_index (vm, d);
779 if (PREDICT_FALSE (n_buffers == 1))
781 buffers[0] = src_buffer;
785 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
786 vlib_buffer_get_free_list_index
789 for (i = 0; i < n_buffers; i++)
791 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
792 d->current_data = s->current_data;
793 d->current_length = head_end_offset;
794 vlib_buffer_set_free_list_index (d,
795 vlib_buffer_get_free_list_index (s));
797 d->total_length_not_including_first_buffer = s->current_length -
799 if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
801 d->total_length_not_including_first_buffer +=
802 s->total_length_not_including_first_buffer;
804 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
805 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
806 clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
807 clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
808 clib_memcpy_fast (vlib_buffer_get_current (d),
809 vlib_buffer_get_current (s), head_end_offset);
810 d->next_buffer = src_buffer;
812 vlib_buffer_advance (s, head_end_offset);
813 s->n_add_refs = n_buffers - 1;
814 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
816 s = vlib_get_buffer (vm, s->next_buffer);
817 s->n_add_refs = n_buffers - 1;
823 /** \brief Create multiple clones of buffer and store them
824 in the supplied array
826 @param vm - (vlib_main_t *) vlib main data structure pointer
827 @param src_buffer - (u32) source buffer index
828 @param buffers - (u32 * ) buffer index array
829 @param n_buffers - (u16) number of buffer clones requested (<=256)
830 @param head_end_offset - (u16) offset relative to current position
831 where packet head ends
832 @return - (u16) number of buffers actually cloned, may be
833 less than the number requested or zero
836 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
837 u16 n_buffers, u16 head_end_offset)
839 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
842 while (n_buffers > 256)
845 copy = vlib_buffer_copy (vm, s);
846 n_cloned += vlib_buffer_clone_256 (vm,
847 vlib_get_buffer_index (vm, copy),
848 (buffers + n_cloned),
849 256, head_end_offset);
852 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
854 n_buffers, head_end_offset);
859 /** \brief Attach cloned tail to the buffer
861 @param vm - (vlib_main_t *) vlib main data structure pointer
862 @param head - (vlib_buffer_t *) head buffer
863 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
867 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
868 vlib_buffer_t * tail)
870 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
871 ASSERT (vlib_buffer_get_free_list_index (head) ==
872 vlib_buffer_get_free_list_index (tail));
874 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
875 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
876 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
877 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
878 head->next_buffer = vlib_get_buffer_index (vm, tail);
879 head->total_length_not_including_first_buffer = tail->current_length +
880 tail->total_length_not_including_first_buffer;
883 clib_atomic_add_fetch (&tail->n_add_refs, 1);
885 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
887 tail = vlib_get_buffer (vm, tail->next_buffer);
892 /* Initializes the buffer as an empty packet with no chained buffers. */
894 vlib_buffer_chain_init (vlib_buffer_t * first)
896 first->total_length_not_including_first_buffer = 0;
897 first->current_length = 0;
898 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
899 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
902 /* The provided next_bi buffer index is appended to the end of the packet. */
903 always_inline vlib_buffer_t *
904 vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
906 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
907 last->next_buffer = next_bi;
908 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
909 next_buffer->current_length = 0;
910 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
914 /* Increases or decreases the packet length.
915 * It does not allocate or deallocate new buffers.
916 * Therefore, the added length must be compatible
917 * with the last buffer. */
919 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
920 vlib_buffer_t * last, i32 len)
922 last->current_length += len;
924 first->total_length_not_including_first_buffer += len;
927 /* Copy data to the end of the packet and increases its length.
928 * It does not allocate new buffers.
929 * Returns the number of copied bytes. */
931 vlib_buffer_chain_append_data (vlib_main_t * vm,
932 vlib_buffer_free_list_index_t free_list_index,
933 vlib_buffer_t * first,
934 vlib_buffer_t * last, void *data, u16 data_len)
937 vlib_buffer_free_list_buffer_size (vm, free_list_index);
938 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
939 u16 len = clib_min (data_len,
940 n_buffer_bytes - last->current_length -
942 clib_memcpy_fast (vlib_buffer_get_current (last) + last->current_length,
944 vlib_buffer_chain_increase_length (first, last, len);
948 /* Copy data to the end of the packet and increases its length.
949 * Allocates additional buffers from the free list if necessary.
950 * Returns the number of copied bytes.
951 * 'last' value is modified whenever new buffers are allocated and
952 * chained and points to the last buffer in the chain. */
954 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
955 vlib_buffer_free_list_index_t
957 vlib_buffer_t * first,
958 vlib_buffer_t ** last, void *data,
960 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
962 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
963 format_vlib_buffer_contents;
967 /* Vector of packet data. */
970 /* Number of buffers to allocate in each call to allocator. */
971 u32 min_n_buffers_each_alloc;
973 /* Buffer free list for this template. */
974 vlib_buffer_free_list_index_t free_list_index;
979 } vlib_packet_template_t;
981 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
982 vlib_packet_template_t * t);
984 void vlib_packet_template_init (vlib_main_t * vm,
985 vlib_packet_template_t * t,
987 uword n_packet_data_bytes,
988 uword min_n_buffers_each_alloc,
991 void *vlib_packet_template_get_packet (vlib_main_t * vm,
992 vlib_packet_template_t * t,
996 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
998 vec_free (t->packet_data);
1001 /* Set a buffer quickly into "uninitialized" state. We want this to
1002 be extremely cheap and arrange for all fields that need to be
1003 initialized to be in the first 128 bits of the buffer. */
1005 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
1006 vlib_buffer_free_list_t * fl)
1008 vlib_buffer_t *src = &fl->buffer_init_template;
1010 /* Make sure vlib_buffer_t is cacheline aligned and sized */
1011 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1012 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1013 CLIB_CACHE_LINE_BYTES);
1014 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1015 CLIB_CACHE_LINE_BYTES * 2);
1017 /* Make sure buffer template is sane. */
1018 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
1020 clib_memcpy_fast (STRUCT_MARK_PTR (dst, template_start),
1021 STRUCT_MARK_PTR (src, template_start),
1022 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1023 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1025 /* Not in the first 16 octets. */
1026 dst->n_add_refs = src->n_add_refs;
1027 vlib_buffer_set_free_list_index (dst, fl->index);
1029 /* Make sure it really worked. */
1030 #define _(f) ASSERT (dst->f == src->f);
1035 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1036 /* total_length_not_including_first_buffer is not in the template anymore
1037 * so it may actually not zeroed for some buffers. One option is to
1038 * uncomment the line lower (comes at a cost), the other, is to just not
1040 /* dst->total_length_not_including_first_buffer = 0; */
1041 ASSERT (dst->n_add_refs == 0);
1045 vlib_buffer_add_to_free_list (vlib_main_t * vm,
1046 vlib_buffer_free_list_t * f,
1047 u32 buffer_index, u8 do_init)
1049 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
1051 b = vlib_get_buffer (vm, buffer_index);
1052 if (PREDICT_TRUE (do_init))
1053 vlib_buffer_init_for_free_list (b, f);
1054 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1056 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1058 clib_spinlock_lock (&bp->lock);
1059 /* keep last stored buffers, as they are more likely hot in the cache */
1060 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
1061 CLIB_CACHE_LINE_BYTES);
1062 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
1063 f->n_alloc -= VLIB_FRAME_SIZE;
1064 clib_spinlock_unlock (&bp->lock);
1069 extern u32 *vlib_buffer_state_validation_lock;
1070 extern uword *vlib_buffer_state_validation_hash;
1071 extern void *vlib_buffer_state_heap;
1075 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
1081 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1083 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1086 p = hash_get (vlib_buffer_state_validation_hash, b);
1088 /* If we don't know about b, declare it to be in the expected state */
1091 hash_set (vlib_buffer_state_validation_hash, b, expected);
1095 if (p[0] != expected)
1097 void cj_stop (void);
1099 vlib_main_t *vm = &vlib_global_main;
1103 bi = vlib_get_buffer_index (vm, b);
1105 clib_mem_set_heap (oldheap);
1106 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1107 vlib_time_now (vm), bi,
1108 p[0] ? "busy" : "free", expected ? "busy" : "free");
1112 CLIB_MEMORY_BARRIER ();
1113 *vlib_buffer_state_validation_lock = 0;
1114 clib_mem_set_heap (oldheap);
1119 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1124 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1126 while (clib_atomic_test_and_set (vlib_buffer_state_validation_lock))
1129 hash_set (vlib_buffer_state_validation_hash, b, expected);
1131 CLIB_MEMORY_BARRIER ();
1132 *vlib_buffer_state_validation_lock = 0;
1133 clib_mem_set_heap (oldheap);
1138 vlib_buffer_space_left_at_end (vlib_main_t * vm, vlib_buffer_t * b)
1140 return b->data + VLIB_BUFFER_DATA_SIZE -
1141 ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1145 vlib_buffer_chain_linearize (vlib_main_t * vm, vlib_buffer_t * b)
1147 vlib_buffer_t *db = b, *sb, *first = b;
1149 u32 bytes_left = 0, data_size;
1150 u16 src_left, dst_left, n_buffers = 1;
1154 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1157 data_size = VLIB_BUFFER_DATA_SIZE;
1159 dst_left = vlib_buffer_space_left_at_end (vm, b);
1161 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1163 b = vlib_get_buffer (vm, b->next_buffer);
1164 if (b->n_add_refs > 0)
1166 bytes_left += b->current_length;
1170 /* if buffer is cloned, create completely new chain - unless everything fits
1171 * into one buffer */
1172 if (is_cloned && bytes_left >= dst_left)
1175 u32 space_needed = bytes_left - dst_left;
1178 if (vlib_buffer_alloc (vm, &tail, 1) == 0)
1183 b = vlib_get_buffer (vm, tail);
1185 while (len < space_needed)
1188 if (vlib_buffer_alloc (vm, &bi, 1) == 0)
1190 vlib_buffer_free_one (vm, tail);
1193 b->flags = VLIB_BUFFER_NEXT_PRESENT;
1194 b->next_buffer = bi;
1195 b = vlib_get_buffer (vm, bi);
1199 sb = vlib_get_buffer (vm, first->next_buffer);
1200 to_free = first->next_buffer;
1201 first->next_buffer = tail;
1204 sb = vlib_get_buffer (vm, first->next_buffer);
1206 src_left = sb->current_length;
1207 sp = vlib_buffer_get_current (sb);
1208 dp = vlib_buffer_get_tail (db);
1217 db->current_data = 0;
1218 db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1219 ASSERT (db->flags & VLIB_BUFFER_NEXT_PRESENT);
1220 db = vlib_get_buffer (vm, db->next_buffer);
1221 dst_left = data_size;
1225 while (src_left == 0)
1227 ASSERT (sb->flags & VLIB_BUFFER_NEXT_PRESENT);
1228 sb = vlib_get_buffer (vm, sb->next_buffer);
1229 src_left = sb->current_length;
1230 sp = vlib_buffer_get_current (sb);
1233 bytes_to_copy = clib_min (dst_left, src_left);
1238 bytes_to_copy = clib_min (bytes_to_copy, sp - dp);
1240 clib_memcpy_fast (dp, sp, bytes_to_copy);
1243 src_left -= bytes_to_copy;
1244 dst_left -= bytes_to_copy;
1245 dp += bytes_to_copy;
1246 sp += bytes_to_copy;
1247 bytes_left -= bytes_to_copy;
1250 db->current_data = 0;
1251 db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1253 if (is_cloned && to_free)
1254 vlib_buffer_free_one (vm, to_free);
1257 if (db->flags & VLIB_BUFFER_NEXT_PRESENT)
1258 vlib_buffer_free_one (vm, db->next_buffer);
1259 db->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1262 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1264 b = vlib_get_buffer (vm, b->next_buffer);
1269 first->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1274 #endif /* included_vlib_buffer_funcs_h */
1277 * fd.io coding-style-patch-verification: ON
1280 * eval: (c-set-style "gnu")