2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 vlib_buffer_main_t *bm = vm->buffer_main;
166 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
167 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
168 b->buffer_pool_index);
170 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
173 /** \brief Prefetch buffer metadata by buffer index
174 The first 64 bytes of buffer contains most header information
176 @param vm - (vlib_main_t *) vlib main data structure pointer
177 @param bi - (u32) buffer index
178 @param type - LOAD, STORE. In most cases, STORE is the right answer
180 /* Prefetch buffer header given index. */
181 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
183 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
184 vlib_prefetch_buffer_header (_b, type); \
188 /* Iterate over known allocated vlib bufs. You probably do not want
190 @param vm the vlib_main_t
191 @param bi found allocated buffer index
192 @param body operation to perform on buffer index
193 function executes body for each allocated buffer index
195 #define vlib_buffer_foreach_allocated(vm,bi,body) \
197 vlib_main_t * _vmain = (vm); \
198 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
199 hash_pair_t * _vbpair; \
200 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
201 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
202 (bi) = _vbpair->key; \
211 /* Index is unknown. */
214 /* Index is known and free/allocated. */
215 VLIB_BUFFER_KNOWN_FREE,
216 VLIB_BUFFER_KNOWN_ALLOCATED,
217 } vlib_buffer_known_state_t;
219 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
221 vlib_buffer_known_state_t
224 always_inline vlib_buffer_known_state_t
225 vlib_buffer_is_known (u32 buffer_index)
227 vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
229 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
230 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
231 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
232 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
236 vlib_buffer_set_known_state (u32 buffer_index,
237 vlib_buffer_known_state_t state)
239 vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
241 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
242 hash_set (bm->buffer_known_hash, buffer_index, state);
243 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
246 /* Validates sanity of a single buffer.
247 Returns format'ed vector with error message if any. */
248 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
252 vlib_buffer_round_size (u32 size)
254 return round_pow2 (size, sizeof (vlib_buffer_t));
258 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
260 return b->flags & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
264 vlib_buffer_set_free_list_index (vlib_buffer_t * b, u32 index)
266 /* if there is an need for more free lists we should consider
267 storig data in the 2nd cacheline */
268 ASSERT (VLIB_BUFFER_FREE_LIST_INDEX_MASK & 1);
269 ASSERT (index <= VLIB_BUFFER_FREE_LIST_INDEX_MASK);
271 b->flags &= ~VLIB_BUFFER_FREE_LIST_INDEX_MASK;
272 b->flags |= index & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
275 /** \brief Allocate buffers from specific freelist into supplied array
277 @param vm - (vlib_main_t *) vlib main data structure pointer
278 @param buffers - (u32 * ) buffer index array
279 @param n_buffers - (u32) number of buffers requested
280 @return - (u32) number of buffers actually allocated, may be
281 less than the number requested or zero
284 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
286 u32 n_buffers, u32 free_list_index)
288 vlib_buffer_main_t *bm = vm->buffer_main;
289 vlib_buffer_free_list_t *fl;
293 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
295 fl = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
297 len = vec_len (fl->buffers);
299 if (PREDICT_FALSE (len < n_buffers))
301 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
302 len = vec_len (fl->buffers);
304 /* even if fill free list didn't manage to refill free list
305 we should give what we have */
306 n_buffers = clib_min (len, n_buffers);
308 /* following code is intentionaly duplicated to allow compiler
309 to optimize fast path when n_buffers is constant value */
310 src = fl->buffers + len - n_buffers;
311 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
312 _vec_len (fl->buffers) -= n_buffers;
314 /* Verify that buffers are known free. */
315 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
316 VLIB_BUFFER_KNOWN_FREE);
321 src = fl->buffers + len - n_buffers;
322 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
323 _vec_len (fl->buffers) -= n_buffers;
325 /* Verify that buffers are known free. */
326 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
327 VLIB_BUFFER_KNOWN_FREE);
332 /** \brief Allocate buffers into supplied array
334 @param vm - (vlib_main_t *) vlib main data structure pointer
335 @param buffers - (u32 * ) buffer index array
336 @param n_buffers - (u32) number of buffers requested
337 @return - (u32) number of buffers actually allocated, may be
338 less than the number requested or zero
341 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
343 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
344 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
347 /** \brief Allocate buffers into ring
349 @param vm - (vlib_main_t *) vlib main data structure pointer
350 @param buffers - (u32 * ) buffer index ring
351 @param start - (u32) first slot in the ring
352 @param ring_size - (u32) ring size
353 @param n_buffers - (u32) number of buffers requested
354 @return - (u32) number of buffers actually allocated, may be
355 less than the number requested or zero
358 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
359 u32 ring_size, u32 n_buffers)
363 ASSERT (n_buffers <= ring_size);
365 if (PREDICT_TRUE (start + n_buffers <= ring_size))
366 return vlib_buffer_alloc (vm, ring + start, n_buffers);
368 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
370 if (PREDICT_TRUE (n_alloc == ring_size - start))
371 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
376 /** \brief Free buffers
377 Frees the entire buffer chain for each buffer
379 @param vm - (vlib_main_t *) vlib main data structure pointer
380 @param buffers - (u32 * ) buffer index array
381 @param n_buffers - (u32) number of buffers to free
385 vlib_buffer_free (vlib_main_t * vm,
386 /* pointer to first buffer */
388 /* number of buffers to free */
391 vlib_buffer_main_t *bm = vm->buffer_main;
393 ASSERT (bm->cb.vlib_buffer_free_cb);
395 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
398 /** \brief Free buffers, does not free the buffer chain for each buffer
400 @param vm - (vlib_main_t *) vlib main data structure pointer
401 @param buffers - (u32 * ) buffer index array
402 @param n_buffers - (u32) number of buffers to free
406 vlib_buffer_free_no_next (vlib_main_t * vm,
407 /* pointer to first buffer */
409 /* number of buffers to free */
412 vlib_buffer_main_t *bm = vm->buffer_main;
414 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
416 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
419 /** \brief Free one buffer
420 Shorthand to free a single buffer chain.
422 @param vm - (vlib_main_t *) vlib main data structure pointer
423 @param buffer_index - (u32) buffer index to free
426 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
428 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
431 /* Add/delete buffer free lists. */
432 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
435 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
437 vlib_buffer_main_t *bm = vm->buffer_main;
439 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
441 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
444 /* Find already existing public free list with given size or create one. */
445 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
448 /* Merge two free lists */
449 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
450 vlib_buffer_free_list_t * src);
452 /* Make sure we have at least given number of unaligned buffers. */
453 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
454 vlib_buffer_free_list_t *
456 uword n_unaligned_buffers);
459 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
461 vlib_buffer_main_t *bm = vm->buffer_main;
463 size = vlib_buffer_round_size (size);
464 uword *p = hash_get (bm->free_list_by_size, size);
465 return p ? p[0] : ~0;
468 always_inline vlib_buffer_free_list_t *
469 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
472 vlib_buffer_main_t *bm = vm->buffer_main;
475 *index = i = vlib_buffer_get_free_list_index (b);
476 return pool_elt_at_index (bm->buffer_free_list_pool, i);
479 always_inline vlib_buffer_free_list_t *
480 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
482 vlib_buffer_main_t *bm = vm->buffer_main;
483 vlib_buffer_free_list_t *f;
485 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
487 /* Sanity: indices must match. */
488 ASSERT (f->index == free_list_index);
494 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
496 vlib_buffer_free_list_t *f =
497 vlib_buffer_get_free_list (vm, free_list_index);
498 return f->n_data_bytes;
501 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
503 /* Reasonably fast buffer copy routine. */
505 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
526 /* Append given data to end of buffer, possibly allocating new buffers. */
527 u32 vlib_buffer_add_data (vlib_main_t * vm,
529 u32 buffer_index, void *data, u32 n_data_bytes);
531 /* duplicate all buffers in chain */
532 always_inline vlib_buffer_t *
533 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
535 vlib_buffer_t *s, *d, *fd;
536 uword n_alloc, n_buffers = 1;
537 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
541 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
544 s = vlib_get_buffer (vm, s->next_buffer);
546 u32 new_buffers[n_buffers];
548 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
550 /* No guarantee that we'll get all the buffers we asked for */
551 if (PREDICT_FALSE (n_alloc < n_buffers))
554 vlib_buffer_free (vm, new_buffers, n_alloc);
560 fd = d = vlib_get_buffer (vm, new_buffers[0]);
561 d->current_data = s->current_data;
562 d->current_length = s->current_length;
563 d->flags = s->flags & flag_mask;
564 d->total_length_not_including_first_buffer =
565 s->total_length_not_including_first_buffer;
566 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
567 clib_memcpy (vlib_buffer_get_current (d),
568 vlib_buffer_get_current (s), s->current_length);
571 for (i = 1; i < n_buffers; i++)
574 d->next_buffer = new_buffers[i];
576 s = vlib_get_buffer (vm, s->next_buffer);
577 d = vlib_get_buffer (vm, new_buffers[i]);
578 d->current_data = s->current_data;
579 d->current_length = s->current_length;
580 clib_memcpy (vlib_buffer_get_current (d),
581 vlib_buffer_get_current (s), s->current_length);
582 d->flags = s->flags & flag_mask;
588 /** \brief Create a maximum of 256 clones of buffer and store them
589 in the supplied array
591 @param vm - (vlib_main_t *) vlib main data structure pointer
592 @param src_buffer - (u32) source buffer index
593 @param buffers - (u32 * ) buffer index array
594 @param n_buffers - (u16) number of buffer clones requested (<=256)
595 @param head_end_offset - (u16) offset relative to current position
596 where packet head ends
597 @return - (u16) number of buffers actually cloned, may be
598 less than the number requested or zero
601 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
602 u16 n_buffers, u16 head_end_offset)
605 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
607 ASSERT (s->n_add_refs == 0);
609 ASSERT (n_buffers <= 256);
611 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
613 buffers[0] = src_buffer;
614 for (i = 1; i < n_buffers; i++)
617 d = vlib_buffer_copy (vm, s);
620 buffers[i] = vlib_get_buffer_index (vm, d);
626 if (PREDICT_FALSE (n_buffers == 1))
628 buffers[0] = src_buffer;
632 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
633 vlib_buffer_get_free_list_index
636 for (i = 0; i < n_buffers; i++)
638 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
639 d->current_data = s->current_data;
640 d->current_length = head_end_offset;
641 vlib_buffer_set_free_list_index (d,
642 vlib_buffer_get_free_list_index (s));
643 d->total_length_not_including_first_buffer =
644 s->total_length_not_including_first_buffer + s->current_length -
646 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
647 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
648 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
649 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
651 d->next_buffer = src_buffer;
653 vlib_buffer_advance (s, head_end_offset);
654 s->n_add_refs = n_buffers - 1;
655 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
657 s = vlib_get_buffer (vm, s->next_buffer);
658 s->n_add_refs = n_buffers - 1;
664 /** \brief Create multiple clones of buffer and store them
665 in the supplied array
667 @param vm - (vlib_main_t *) vlib main data structure pointer
668 @param src_buffer - (u32) source buffer index
669 @param buffers - (u32 * ) buffer index array
670 @param n_buffers - (u16) number of buffer clones requested (<=256)
671 @param head_end_offset - (u16) offset relative to current position
672 where packet head ends
673 @return - (u16) number of buffers actually cloned, may be
674 less than the number requested or zero
677 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
678 u16 n_buffers, u16 head_end_offset)
680 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
683 while (n_buffers > 256)
686 copy = vlib_buffer_copy (vm, s);
687 n_cloned += vlib_buffer_clone_256 (vm,
688 vlib_get_buffer_index (vm, copy),
689 (buffers + n_cloned),
690 256, head_end_offset);
693 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
695 n_buffers, head_end_offset);
700 /** \brief Attach cloned tail to the buffer
702 @param vm - (vlib_main_t *) vlib main data structure pointer
703 @param head - (vlib_buffer_t *) head buffer
704 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
708 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
709 vlib_buffer_t * tail)
711 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
712 ASSERT (vlib_buffer_get_free_list_index (head) ==
713 vlib_buffer_get_free_list_index (tail));
715 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
716 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
717 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
718 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
719 head->next_buffer = vlib_get_buffer_index (vm, tail);
720 head->total_length_not_including_first_buffer = tail->current_length +
721 tail->total_length_not_including_first_buffer;
724 __sync_add_and_fetch (&tail->n_add_refs, 1);
726 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
728 tail = vlib_get_buffer (vm, tail->next_buffer);
733 /* Initializes the buffer as an empty packet with no chained buffers. */
735 vlib_buffer_chain_init (vlib_buffer_t * first)
737 first->total_length_not_including_first_buffer = 0;
738 first->current_length = 0;
739 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
740 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
743 /* The provided next_bi buffer index is appended to the end of the packet. */
744 always_inline vlib_buffer_t *
745 vlib_buffer_chain_buffer (vlib_main_t * vm,
746 vlib_buffer_t * first,
747 vlib_buffer_t * last, u32 next_bi)
749 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
750 last->next_buffer = next_bi;
751 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
752 next_buffer->current_length = 0;
753 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
757 /* Increases or decreases the packet length.
758 * It does not allocate or deallocate new buffers.
759 * Therefore, the added length must be compatible
760 * with the last buffer. */
762 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
763 vlib_buffer_t * last, i32 len)
765 last->current_length += len;
767 first->total_length_not_including_first_buffer += len;
770 /* Copy data to the end of the packet and increases its length.
771 * It does not allocate new buffers.
772 * Returns the number of copied bytes. */
774 vlib_buffer_chain_append_data (vlib_main_t * vm,
776 vlib_buffer_t * first,
777 vlib_buffer_t * last, void *data, u16 data_len)
780 vlib_buffer_free_list_buffer_size (vm, free_list_index);
781 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
782 u16 len = clib_min (data_len,
783 n_buffer_bytes - last->current_length -
785 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
787 vlib_buffer_chain_increase_length (first, last, len);
791 /* Copy data to the end of the packet and increases its length.
792 * Allocates additional buffers from the free list if necessary.
793 * Returns the number of copied bytes.
794 * 'last' value is modified whenever new buffers are allocated and
795 * chained and points to the last buffer in the chain. */
797 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
799 vlib_buffer_t * first,
800 vlib_buffer_t ** last,
801 void *data, u16 data_len);
802 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
804 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
805 format_vlib_buffer_contents;
809 /* Vector of packet data. */
812 /* Number of buffers to allocate in each call to physmem
814 u32 min_n_buffers_each_physmem_alloc;
816 /* Buffer free list for this template. */
820 } vlib_packet_template_t;
822 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
823 vlib_packet_template_t * t);
825 void vlib_packet_template_init (vlib_main_t * vm,
826 vlib_packet_template_t * t,
828 uword n_packet_data_bytes,
829 uword min_n_buffers_each_physmem_alloc,
832 void *vlib_packet_template_get_packet (vlib_main_t * vm,
833 vlib_packet_template_t * t,
837 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
839 vec_free (t->packet_data);
843 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
845 serialize_stream_t *s = &m->stream;
846 vlib_serialize_buffer_main_t *sm
847 = uword_to_pointer (m->stream.data_function_opaque,
848 vlib_serialize_buffer_main_t *);
849 vlib_main_t *vm = sm->vlib_main;
852 n = s->n_buffer_bytes - s->current_buffer_index;
853 if (sm->last_buffer != ~0)
855 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
856 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
858 b = vlib_get_buffer (vm, b->next_buffer);
859 n += b->current_length;
864 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
865 n += vlib_buffer_index_length_in_chain (vm, f[0]);
872 /* Set a buffer quickly into "uninitialized" state. We want this to
873 be extremely cheap and arrange for all fields that need to be
874 initialized to be in the first 128 bits of the buffer. */
876 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
877 vlib_buffer_free_list_t * fl)
879 vlib_buffer_t *src = &fl->buffer_init_template;
881 /* Make sure vlib_buffer_t is cacheline aligned and sized */
882 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
883 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
884 CLIB_CACHE_LINE_BYTES);
885 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
886 CLIB_CACHE_LINE_BYTES * 2);
888 /* Make sure buffer template is sane. */
889 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
891 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
892 STRUCT_MARK_PTR (src, template_start),
893 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
894 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
896 /* Not in the first 16 octets. */
897 dst->n_add_refs = src->n_add_refs;
899 /* Make sure it really worked. */
900 #define _(f) ASSERT (dst->f == src->f);
905 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
906 /* total_length_not_including_first_buffer is not in the template anymore
907 * so it may actually not zeroed for some buffers. One option is to
908 * uncomment the line lower (comes at a cost), the other, is to just not
910 /* dst->total_length_not_including_first_buffer = 0; */
911 ASSERT (dst->n_add_refs == 0);
915 vlib_buffer_add_to_free_list (vlib_main_t * vm,
916 vlib_buffer_free_list_t * f,
917 u32 buffer_index, u8 do_init)
920 b = vlib_get_buffer (vm, buffer_index);
921 if (PREDICT_TRUE (do_init))
922 vlib_buffer_init_for_free_list (b, f);
923 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
925 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
927 vlib_buffer_free_list_t *mf;
928 mf = vlib_buffer_get_free_list (vlib_mains[0], f->index);
929 clib_spinlock_lock (&mf->global_buffers_lock);
930 /* keep last stored buffers, as they are more likely hot in the cache */
931 vec_add_aligned (mf->global_buffers, f->buffers, VLIB_FRAME_SIZE,
932 CLIB_CACHE_LINE_BYTES);
933 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
934 f->n_alloc -= VLIB_FRAME_SIZE;
935 clib_spinlock_unlock (&mf->global_buffers_lock);
940 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
941 vlib_buffer_t * dst1,
942 vlib_buffer_free_list_t * fl)
944 vlib_buffer_t *src = &fl->buffer_init_template;
946 /* Make sure buffer template is sane. */
947 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
949 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
950 STRUCT_MARK_PTR (src, template_start),
951 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
952 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
954 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
955 STRUCT_MARK_PTR (src, template_start),
956 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
957 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
959 /* Not in the first 16 octets. */
960 dst0->n_add_refs = src->n_add_refs;
961 dst1->n_add_refs = src->n_add_refs;
963 /* Make sure it really worked. */
964 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
970 ASSERT (dst0->total_length_not_including_first_buffer == 0);
971 ASSERT (dst1->total_length_not_including_first_buffer == 0);
972 ASSERT (dst0->n_add_refs == 0);
973 ASSERT (dst1->n_add_refs == 0);
977 extern u32 *vlib_buffer_state_validation_lock;
978 extern uword *vlib_buffer_state_validation_hash;
979 extern void *vlib_buffer_state_heap;
983 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
989 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
991 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
994 p = hash_get (vlib_buffer_state_validation_hash, b);
996 /* If we don't know about b, declare it to be in the expected state */
999 hash_set (vlib_buffer_state_validation_hash, b, expected);
1003 if (p[0] != expected)
1005 void cj_stop (void);
1007 vlib_main_t *vm = &vlib_global_main;
1011 bi = vlib_get_buffer_index (vm, b);
1013 clib_mem_set_heap (oldheap);
1014 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1015 vlib_time_now (vm), bi,
1016 p[0] ? "busy" : "free", expected ? "busy" : "free");
1020 CLIB_MEMORY_BARRIER ();
1021 *vlib_buffer_state_validation_lock = 0;
1022 clib_mem_set_heap (oldheap);
1027 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1032 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1034 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1037 hash_set (vlib_buffer_state_validation_hash, b, expected);
1039 CLIB_MEMORY_BARRIER ();
1040 *vlib_buffer_state_validation_lock = 0;
1041 clib_mem_set_heap (oldheap);
1045 /** minimum data size of first buffer in a buffer chain */
1046 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1049 * @brief compress buffer chain in a way where the first buffer is at least
1050 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1052 * @param[in] vm - vlib_main
1053 * @param[in,out] first - first buffer in chain
1054 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1058 vlib_buffer_chain_compress (vlib_main_t * vm,
1059 vlib_buffer_t * first, u32 ** discard_vector)
1061 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1062 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1064 /* this is already big enough or not a chain */
1067 /* probe free list to find allocated buffer size to avoid overfill */
1069 vlib_buffer_free_list_t *free_list =
1070 vlib_buffer_get_buffer_free_list (vm, first, &index);
1072 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1073 free_list->n_data_bytes -
1074 first->current_data);
1077 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1078 u32 need = want_first_size - first->current_length;
1079 u32 amount_to_copy = clib_min (need, second->current_length);
1080 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1081 first->current_length,
1082 vlib_buffer_get_current (second), amount_to_copy);
1083 first->current_length += amount_to_copy;
1084 vlib_buffer_advance (second, amount_to_copy);
1085 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1087 first->total_length_not_including_first_buffer -= amount_to_copy;
1089 if (!second->current_length)
1091 vec_add1 (*discard_vector, first->next_buffer);
1092 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1094 first->next_buffer = second->next_buffer;
1098 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1100 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1103 while ((first->current_length < want_first_size) &&
1104 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1107 #endif /* included_vlib_buffer_funcs_h */
1110 * fd.io coding-style-patch-verification: ON
1113 * eval: (c-set-style "gnu")